From 2d7baaf50f3277e65cf71071f61ea34823d14c30 Mon Sep 17 00:00:00 2001 From: AustinMroz Date: Tue, 8 Aug 2023 06:44:48 -0500 Subject: [PATCH 01/87] vim : streaming and more (#2495) * Update Vim plugin * Remove getbufoneline usage, Add input bind example. getbufoneline() appears to be a recently added function and has been replaced with getbufline for compatibility. An additional example that explains how to add a keybind that works in insert mode was added. --- examples/llama.vim | 132 +++++++++++++++++++++++++++++++++++++++++++++ examples/llm.vim | 23 -------- 2 files changed, 132 insertions(+), 23 deletions(-) create mode 100644 examples/llama.vim delete mode 100644 examples/llm.vim diff --git a/examples/llama.vim b/examples/llama.vim new file mode 100644 index 000000000..f03fadfb7 --- /dev/null +++ b/examples/llama.vim @@ -0,0 +1,132 @@ +" Requires an already running llama.cpp server +" To install either copy or symlink to ~/.vim/autoload/llama.vim +" Then start with either :call llama#doLlamaGen(), +" or add a keybind to your vimrc such as +" nnoremap Z :call llama#doLlamaGen() +" Similarly, you could add an insert mode keybind with +" inoremap call llama#doLlamaGen() +" +" g:llama_api_url and g:llama_overrides can be configured in your .vimrc +" let g:llama_api_url = "192.168.1.10:8080" +" llama_overrides can also be set through buffer/window scopes. For instance +" autocmd filetype python let b:llama_overrides = {"temp": 0.2} +" Could be added to your .vimrc to automatically set a lower temperature when +" editing a python script +" Additionally, an override dict can be stored at the top of a file +" !*{"stop": ["User:"]} +" Could be added to the start of your chatlog.txt to set the stopping token +" These parameter dicts are merged together from lowest to highest priority: +" server default -> g:llama_overrides -> w:llama_overrides -> +" b:llama_overrides -> in file (!*) overrides +" +" Sublists (like logit_bias and stop) are overridden, not merged +" Example override: +" !*{"logit_bias": [[13, -5], [2, false]], "temperature": 1, "top_k": 5, "top_p": 0.5, "n_predict": 256, "repeat_last_n": 256, "repeat_penalty": 1.17647} +if !exists("g:llama_api_url") + let g:llama_api_url= "127.0.0.1:8080" +endif +if !exists("g:llama_overrides") + let g:llama_overrides = {} +endif +const s:querydata = {"n_predict": 256, "stop": [ "\n" ], "stream": v:true } +const s:curlcommand = ['curl','--data-raw', "{\"prompt\":\"### System:\"}", '--silent', '--no-buffer', '--request', 'POST', '--url', g:llama_api_url .. '/completion', '--header', "Content-Type: application/json"] +let s:linedict = {} + +func s:callbackHandler(bufn, channel, msg) + if len(a:msg) < 3 + return + elseif a:msg[0] == "d" + let l:msg = a:msg[6:-1] + else + let l:msg = a:msg + endif + let l:decoded_msg = json_decode(l:msg) + let l:newtext = split(l:decoded_msg['content'], "\n", 1) + if len(l:newtext) > 0 + call setbufline(a:bufn, s:linedict[a:bufn], getbufline(a:bufn, s:linedict[a:bufn])[0] .. newtext[0]) + else + echo "nothing genned" + endif + if len(newtext) > 1 + let l:failed = appendbufline(a:bufn, s:linedict[a:bufn], newtext[1:-1]) + let s:linedict[a:bufn] = s:linedict[a:bufn] + len(newtext)-1 + endif + if has_key(l:decoded_msg, "stop") && l:decoded_msg.stop + echo "Finished generation" + endif +endfunction + +func llama#doLlamaGen() + if exists("b:job") + if job_status(b:job) == "run" + call job_stop(b:job) + return + endif + endif + + let l:cbuffer = bufnr("%") + let s:linedict[l:cbuffer] = line('$') + let l:buflines = getbufline(l:cbuffer, 1, 1000) + let l:querydata = copy(s:querydata) + call extend(l:querydata, g:llama_overrides) + if exists("w:llama_overrides") + call extend(l:querydata, w:llama_overrides) + endif + if exists("b:llama_overrides") + call extend(l:querydata, b:llama_overrides) + endif + if l:buflines[0][0:1] == '!*' + let l:userdata = json_decode(l:buflines[0][2:-1]) + call extend(l:querydata, l:userdata) + let l:buflines = l:buflines[1:-1] + endif + let l:querydata.prompt = join(l:buflines, "\n") + let l:curlcommand = copy(s:curlcommand) + let l:curlcommand[2] = json_encode(l:querydata) + let b:job = job_start(l:curlcommand, {"callback": function("s:callbackHandler", [l:cbuffer])}) +endfunction + +" Echos the tokkenization of the provided string , or cursor to end of word +" Onus is placed on the user to include the preceding space +func llama#tokenizeWord(...) + if (a:0 > 0) + let l:input = a:1 + else + exe "normal \"*ye" + let l:input = @* + endif + let l:querydata = {"content": l:input} + let l:curlcommand = copy(s:curlcommand) + let l:curlcommand[2] = json_encode(l:querydata) + let l:curlcommand[8] = g:llama_api_url .. "/tokenize" + let s:token_job = job_start(l:curlcommand, {"callback": function("s:tokenizeWordCallback", [l:input])}) +endfunction + +func s:tokenizeWordCallback(plaintext, channel, msg) + echo '"' .. a:plaintext ..'" - ' .. string(json_decode(a:msg).tokens) +endfunction + + +" Echos the token count of the entire buffer (or provided string) +" Example usage :echo llama#tokenCount() +func llama#tokenCount(...) + if (a:0 > 0) + let l:buflines = a:1 + else + let l:buflines = getline(1,1000) + if l:buflines[0][0:1] == '!*' + let l:buflines = l:buflines[1:-1] + endif + let l:buflines = join(l:buflines, "\n") + endif + let l:querydata = {"content": l:buflines} + let l:curlcommand = copy(s:curlcommand) + let l:curlcommand[2] = json_encode(l:querydata) + let l:curlcommand[8] = g:llama_api_url .. "/tokenize" + let s:token_job = job_start(l:curlcommand, {"callback": "s:tokenCountCallback"}) +endfunction + +func s:tokenCountCallback(channel, msg) + let resp = json_decode(a:msg) + echo len(resp.tokens) +endfunction diff --git a/examples/llm.vim b/examples/llm.vim deleted file mode 100644 index efecad0cd..000000000 --- a/examples/llm.vim +++ /dev/null @@ -1,23 +0,0 @@ -function! Llm() - - let url = "http://127.0.0.1:8080/completion" - - " Get the content of the current buffer - let buffer_content = join(getline(1, '$'), "\n") - - " Create the JSON payload - let json_payload = {"temp":0.72,"top_k":100,"top_p":0.73,"repeat_penalty":1.100000023841858,"n_predict":10,"stream": v:false} - let json_payload.prompt = buffer_content - - " Define the curl command - let curl_command = 'curl -k -s -X POST -H "Content-Type: application/json" -d @- ' . url - let response = system(curl_command, json_encode(json_payload)) - - " Extract the content field from the response - let content = json_decode(response).content - - " Insert the content at the cursor position - call setline(line('.'), getline('.') . content) -endfunction - -command! Llm call Llm() From e7f94d6fdc83b41ba449b4b8c80821673dd12ffc Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 8 Aug 2023 15:05:30 +0300 Subject: [PATCH 02/87] vim : bring back simple llm.vim example --- examples/llm.vim | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 examples/llm.vim diff --git a/examples/llm.vim b/examples/llm.vim new file mode 100644 index 000000000..473e0077a --- /dev/null +++ b/examples/llm.vim @@ -0,0 +1,25 @@ +" Basic plugin example + +function! Llm() + + let url = "http://127.0.0.1:8080/completion" + + " Get the content of the current buffer + let buffer_content = join(getline(1, '$'), "\n") + + " Create the JSON payload + let json_payload = {"temp":0.72,"top_k":100,"top_p":0.73,"repeat_penalty":1.100000023841858,"n_predict":10,"stream": v:false} + let json_payload.prompt = buffer_content + + " Define the curl command + let curl_command = 'curl -k -s -X POST -H "Content-Type: application/json" -d @- ' . url + let response = system(curl_command, json_encode(json_payload)) + + " Extract the content field from the response + let content = json_decode(response).content + + " Insert the content at the cursor position + call setline(line('.'), getline('.') . content) +endfunction + +command! Llm call Llm() From 7ed8d1fe7f8cbe6a6763e6b46759795ac8d21e12 Mon Sep 17 00:00:00 2001 From: chaihahaha Date: Tue, 8 Aug 2023 20:07:02 +0800 Subject: [PATCH 03/87] llm.vim : multiline autocompletion, get rid of "^@" (#2543) --- examples/llm.vim | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/llm.vim b/examples/llm.vim index 473e0077a..594a28549 100644 --- a/examples/llm.vim +++ b/examples/llm.vim @@ -18,8 +18,10 @@ function! Llm() " Extract the content field from the response let content = json_decode(response).content + let split_newlines = split(content, '\n', 1) + " Insert the content at the cursor position - call setline(line('.'), getline('.') . content) + call setline(line('.'), [ getline('.') . split_newlines[0] ] + split_newlines[1:]) endfunction command! Llm call Llm() From acfc5478ff3446ca3b54553967a3dea09b7c771a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 8 Aug 2023 14:38:16 +0200 Subject: [PATCH 04/87] CUDA: tighter VRAM scratch size for 65b/70b (#2551) --- llama.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/llama.cpp b/llama.cpp index 39aefd499..71061aab9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -149,7 +149,7 @@ static const std::map & MEM_REQ_EVAL() } // amount of VRAM needed per batch size to hold temporary results -// the values for 3b and 65b are not derived from testing but instead chosen conservatively +// the values for 3b are not derived from testing but instead chosen conservatively static const std::map & VRAM_REQ_SCRATCH_BASE() { static std::map k_sizes = { @@ -157,14 +157,14 @@ static const std::map & VRAM_REQ_SCRATCH_BASE() { MODEL_7B, 512ull * kB }, { MODEL_13B, 640ull * kB }, { MODEL_30B, 768ull * kB }, - { MODEL_65B, 1536ull * kB }, - { MODEL_70B, 1536ull * kB }, // TODO (likely can be reduced) + { MODEL_65B, 1280ull * kB }, + { MODEL_70B, 1280ull * kB }, }; return k_sizes; } // amount of VRAM needed per batch size and context to hold temporary results -// the values for 3b and 65b are not derived from testing but instead chosen conservatively +// the values for 3b are not derived from testing but instead chosen conservatively static const std::map & VRAM_REQ_SCRATCH_PER_CONTEXT() { static std::map k_sizes = { @@ -172,8 +172,8 @@ static const std::map & VRAM_REQ_SCRATCH_PER_CONTEXT() { MODEL_7B, 128ull }, { MODEL_13B, 160ull }, { MODEL_30B, 208ull }, - { MODEL_65B, 416ull }, - { MODEL_70B, 416ull }, // TODO (likely can be reduced) + { MODEL_65B, 256ull }, + { MODEL_70B, 256ull }, }; return k_sizes; } From f5bfea0580e417f99850d5456ca541d871a3e48c Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Tue, 8 Aug 2023 15:29:19 +0200 Subject: [PATCH 05/87] Allow passing grammar to completion endpoint (#2532) * Allow passing grammar to completion endpoint --- Makefile | 2 +- examples/server/README.md | 2 ++ examples/server/server.cpp | 60 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 61 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 897c5cb9a..32598edfe 100644 --- a/Makefile +++ b/Makefile @@ -380,7 +380,7 @@ embedding: examples/embedding/embedding.cpp build-info.h ggml. save-load-state: examples/save-load-state/save-load-state.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp build-info.h ggml.o llama.o common.o $(OBJS) +server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp build-info.h ggml.o llama.o common.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) $(LIB_PRE)embdinput$(DSO_EXT): examples/embd-input/embd-input.h examples/embd-input/embd-input-lib.cpp build-info.h ggml.o llama.o common.o $(OBJS) diff --git a/examples/server/README.md b/examples/server/README.md index aee31ae42..e56ca063a 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -151,6 +151,8 @@ node . `mirostat_eta`: Set the Mirostat learning rate, parameter eta (default: 0.1). + `grammar`: Set grammar for grammar-based sampling (default: no grammar) + `seed`: Set the random number generator (RNG) seed (default: -1, -1 = random seed). `ignore_eos`: Ignore end of stream token and continue generating (default: false). diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6f7a66da1..10ae264f5 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1,6 +1,7 @@ #include "common.h" #include "llama.h" #include "build-info.h" +#include "grammar-parser.h" #ifndef NDEBUG // crash the server in debug mode, otherwise send an http 500 error @@ -195,6 +196,8 @@ struct llama_server_context llama_context *ctx = nullptr; gpt_params params; + llama_grammar *grammar = nullptr; + bool truncated = false; bool stopped_eos = false; bool stopped_word = false; @@ -226,6 +229,7 @@ struct llama_server_context void rewind() { params.antiprompt.clear(); + params.grammar.clear(); num_prompt_tokens = 0; num_tokens_predicted = 0; generated_text = ""; @@ -237,6 +241,7 @@ struct llama_server_context stopped_limit = false; stopping_word = ""; multibyte_pending = 0; + grammar = nullptr; n_remain = 0; n_past = 0; @@ -257,6 +262,33 @@ struct llama_server_context return true; } + bool loadGrammar() + { + if (!params.grammar.empty()) { + grammar_parser::parse_state parsed_grammar; + + parsed_grammar = grammar_parser::parse(params.grammar.c_str()); + // will be empty (default) if there are parse errors + if (parsed_grammar.rules.empty()) { + LOG_ERROR("grammar parse error", {{"grammar", params.grammar}}); + return false; + } + grammar_parser::print_grammar(stderr, parsed_grammar); + + { + auto it = params.logit_bias.find(llama_token_eos()); + if (it != params.logit_bias.end() && it->second == -INFINITY) { + LOG_WARNING("EOS token is disabled, which will cause most grammars to fail", {}); + } + } + + std::vector grammar_rules(parsed_grammar.c_rules()); + grammar = llama_grammar_init( + grammar_rules.data(), grammar_rules.size(), parsed_grammar.symbol_ids.at("root")); + } + return true; + } + void loadPrompt() { params.prompt.insert(0, 1, ' '); // always add a first space @@ -420,6 +452,10 @@ struct llama_server_context logits[llama_token_nl()] = nl_logit; } + if (grammar != nullptr) { + llama_sample_grammar(ctx, &candidates_p, grammar); + } + if (temp <= 0) { // Greedy sampling @@ -457,10 +493,15 @@ struct llama_server_context } } + if (grammar != nullptr) { + llama_grammar_accept_token(ctx, grammar, result.tok); + } + for (size_t i = 0; i < std::min(candidates_p.size, (size_t)n_probs); ++i) { result.probs.push_back({candidates_p.data[i].id, candidates_p.data[i].p}); } + last_n_tokens.erase(last_n_tokens.begin()); last_n_tokens.push_back(result.tok); num_tokens_predicted++; @@ -947,6 +988,7 @@ static json format_generation_settings(llama_server_context &llama) {"stream", llama.stream}, {"logit_bias", llama.params.logit_bias}, {"n_probs", llama.params.n_probs}, + {"grammar", llama.params.grammar}, }; } @@ -1048,6 +1090,7 @@ static void parse_options_completion(const json &body, llama_server_context &lla llama.params.n_keep = body.value("n_keep", default_params.n_keep); llama.params.seed = body.value("seed", default_params.seed); llama.params.prompt = body.value("prompt", default_params.prompt); + llama.params.grammar = body.value("grammar", default_params.grammar); llama.params.n_probs = body.value("n_probs", default_params.n_probs); llama.params.logit_bias.clear(); @@ -1179,6 +1222,12 @@ int main(int argc, char **argv) parse_options_completion(json::parse(req.body), llama); + if (!llama.loadGrammar()) + { + res.status = 400; + return; + } + llama.loadPrompt(); llama.beginCompletion(); @@ -1334,8 +1383,12 @@ int main(int argc, char **argv) svr.set_error_handler([](const Request &, Response &res) { - res.set_content("File Not Found", "text/plain"); - res.status = 404; }); + if (res.status == 400) { + res.set_content("Invalid request", "text/plain"); + } else { + res.set_content("File Not Found", "text/plain"); + res.status = 404; + } }); // set timeouts and change hostname and port svr.set_read_timeout(sparams.read_timeout); @@ -1363,6 +1416,9 @@ int main(int argc, char **argv) return 1; } + if (llama.grammar != nullptr) { + llama_grammar_free(llama.grammar); + } llama_backend_free(); return 0; From ece4fc185edf8677fe158a52b52002258b32833b Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Wed, 9 Aug 2023 00:48:33 +0200 Subject: [PATCH 06/87] map tensor names --- gguf_tensor_map.py | 96 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 gguf_tensor_map.py diff --git a/gguf_tensor_map.py b/gguf_tensor_map.py new file mode 100644 index 000000000..4fba633b2 --- /dev/null +++ b/gguf_tensor_map.py @@ -0,0 +1,96 @@ +# Recommended mapping of model tensor names for storage in gguf + +def get_tensor_map( n_blocks : int): + tensor_map = {} + # Token embeddings + mapped_to = "transformer.token_embd" + tensor_map["gpt_neox.embed_in"] = mapped_to # gptneox + tensor_map["transformer.wte"] = mapped_to # gpt2 mpt + tensor_map["transformer.word_embeddings"] = mapped_to # falcon + tensor_map["model.embed_tokens"] = mapped_to # llama-hf + tensor_map["tok_embeddings"] = mapped_to # llama-pth + # Position embeddings + mapped_to = "transformer.pos_embd" + tensor_map["transformer.wpe"] = mapped_to # gpt2 + # Output norm + mapped_to = "transformer.output_norm" + tensor_map["gpt_neox.final_layer_norm"] = mapped_to # gptneox + tensor_map["transformer.ln_f"] = mapped_to # gpt2 falcon + tensor_map["transformer.norm_f"] = mapped_to # mpt + tensor_map["model.norm"] = mapped_to # llama-hf + tensor_map["norm"] = mapped_to # llama-pth + # Output + mapped_to = "transformer.output" + tensor_map["embed_out"] = mapped_to # gptneox + tensor_map["lm_head"] = mapped_to # gpt2 mpt falcon llama-hf + tensor_map["output"] = mapped_to # llama-pth + # Attention and fee-forward layer blocks + for i in range(0,n_blocks): + # Attention norm 1 + mapped_to = "transformer.blocks."+str(i)+".attn_norm_1" + tensor_map["gpt_neox.layers."+str(i)+".input_layernorm"] = mapped_to # gptneox + tensor_map["transformer.h."+str(i)+".ln_1"] = mapped_to # gpt2 + tensor_map["transformer.blocks."+str(i)+".norm_1"] = mapped_to # mpt + tensor_map["transformer.h."+str(i)+".input_layernorm"] = mapped_to # falcon7b + tensor_map["transformer.h."+str(i)+".ln_attn"] = mapped_to # falcon40b + tensor_map["model.layers."+str(i)+".input_layernorm"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".attention_norm"] = mapped_to # llama-pth + # Attention norm 2 + mapped_to = "transformer.blocks."+str(i)+".attn_norm_2" + tensor_map["transformer.h."+str(i)+".ln_mlp"] = mapped_to # falcon40b + # Attention query-key-value + mapped_to = "transformer.blocks."+str(i)+".attn_qkv" + tensor_map["gpt_neox.layers."+str(i)+".attention.query_key_value"] = mapped_to # gptneox + tensor_map["transformer.h."+str(i)+".attn.c_attn"] = mapped_to # gpt2 + tensor_map["transformer.blocks."+str(i)+".attn.Wqkv"] = mapped_to # mpt + tensor_map["transformer.h."+str(i)+".self_attention.query_key_value"] = mapped_to # falcon + # Attention query + mapped_to = "transformer.blocks."+str(i)+".attn_q" + tensor_map["model.layers."+str(i)+".self_attn.q_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".attention.wq"] = mapped_to # llama-pth + # Attention key + mapped_to = "transformer.blocks."+str(i)+".attn_k" + tensor_map["model.layers."+str(i)+".self_attn.k_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".attention.wk"] = mapped_to # llama-pth + # Attention value + mapped_to = "transformer.blocks."+str(i)+".attn_v" + tensor_map["model.layers."+str(i)+".self_attn.v_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".attention.wv"] = mapped_to # llama-pth + # Attention output + mapped_to = "transformer.blocks."+str(i)+".attn_output" + tensor_map["gpt_neox.layers."+str(i)+".attention.dense"] = mapped_to # gptneox + tensor_map["transformer.h."+str(i)+".attn.c_proj"] = mapped_to # gpt2 + tensor_map["transformer.blocks."+str(i)+".attn.out_proj"] = mapped_to # mpt + tensor_map["transformer.h."+str(i)+".self_attention.dense"] = mapped_to # falcon + tensor_map["model.layers."+str(i)+".self_attn.o_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".attention.wo"] = mapped_to # llama-pth + # Feed-forward norm + mapped_to = "transformer.blocks."+str(i)+".ffn_norm" + tensor_map["gpt_neox.layers."+str(i)+".post_attention_layernorm"] = mapped_to # gptneox + tensor_map["transformer.h."+str(i)+".ln_2"] = mapped_to # gpt2 + tensor_map[" transformer.blocks."+str(i)+".norm_2"] = mapped_to # mpt + tensor_map["model.layers."+str(i)+".post_attention_layernorm"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".ffn_norm"] = mapped_to # llama-pth + # Feed-forward up + mapped_to = "transformer.blocks."+str(i)+".ffn_up" + tensor_map["gpt_neox.layers."+str(i)+".mlp.dense_h_to_4h"] = mapped_to # gptneox + tensor_map["transformer.h."+str(i)+".mlp.c_fc"] = mapped_to # gpt2 + tensor_map["transformer.blocks."+str(i)+".ffn.up_proj"] = mapped_to # mpt + tensor_map["transformer.h."+str(i)+".mlp.dense_h_to_4h"] = mapped_to # falcon + tensor_map["model.layers."+str(i)+".mlp.up_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".feed_forward.w3"] = mapped_to # llama-pth + # Feed-forward gate + mapped_to = "transformer.blocks."+str(i)+".ffn_gate" + tensor_map["model.layers."+str(i)+".mlp.gate_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".feed_forward.w1"] = mapped_to # llama-pth + # Feed-forward down + mapped_to = "transformer.blocks."+str(i)+".ffn_down" + tensor_map["gpt_neox.layers."+str(i)+".mlp.dense_4h_to_h"] = mapped_to # gptneox + tensor_map["transformer.h."+str(i)+".mlp.c_proj"] = mapped_to # gpt2 + tensor_map["transformer.blocks."+str(i)+".ffn.down_proj"] = mapped_to # mpt + tensor_map["transformer.h."+str(i)+".mlp.dense_4h_to_h"] = mapped_to # falcon + tensor_map["model.layers."+str(i)+".mlp.down_proj"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".feed_forward.w2"] = mapped_to # llama-pth + + return tensor_map + From f4d137d98cf770d205cd788947cb990012593bd8 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Wed, 9 Aug 2023 00:50:11 +0200 Subject: [PATCH 07/87] convert-gptneox-h5-to-gguf.py : map tensor names --- convert-gptneox-h5-to-gguf.py | 36 ++++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/convert-gptneox-h5-to-gguf.py b/convert-gptneox-h5-to-gguf.py index 066ed0da0..22508bd3d 100644 --- a/convert-gptneox-h5-to-gguf.py +++ b/convert-gptneox-h5-to-gguf.py @@ -1,6 +1,7 @@ # Quick and dirty HF gptneox--> gguf conversion import gguf +import gguf_tensor_map as tmap import os import sys import struct @@ -32,6 +33,7 @@ def bytes_to_unicode(): cs = [chr(n) for n in cs] return dict(zip(bs, cs)) + if len(sys.argv) < 3: print("Usage: convert-h5-to-ggml.py dir-model ftype\n") print(" ftype == 0 -> float32") @@ -74,16 +76,17 @@ list_vars = model.state_dict() gguf_writer = gguf.GGUFWriter.open(fname_out) -print("gguf: add metadata") +print("gguf: get model metadata") -llm_arch = "gptneox" +llm_arch = "gptneox" +block_count = hparams["num_hidden_layers"] gguf_writer.add_name(last_dir) gguf_writer.add_description("gguf test model") gguf_writer.add_architecture(llm_arch) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) -gguf_writer.add_layer_count(llm_arch, hparams["num_hidden_layers"]) +gguf_writer.add_layer_count(llm_arch, block_count) gguf_writer.add_feed_forward_length(llm_arch, hparams["intermediate_size"]) gguf_writer.add_rope_dimension_count(llm_arch, int( hparams["rotary_pct"]*(hparams["hidden_size"]//hparams["num_attention_heads"])) ) gguf_writer.add_head_count(llm_arch, hparams["num_attention_heads"]) @@ -92,7 +95,7 @@ gguf_writer.add_layer_norm_eps(llm_arch, hparams["layer_norm_eps"]) # TOKENIZATION -print("gguf: add tokenizer") +print("gguf: get tokenizer metadata") tokens: List[str] = [] merges: List[str] = [] @@ -102,7 +105,7 @@ if Path(dir_model + "/tokenizer.json").is_file(): # gpt2 tokenizer gguf_writer.add_tokenizer_model("gpt2") - print("gguf: adding gpt2 tokenizer merges") + print("gguf: get gpt2 tokenizer merges") with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f: tokenizer_json = json.load(f) @@ -110,7 +113,7 @@ if Path(dir_model + "/tokenizer.json").is_file(): gguf_writer.add_token_merges(merges) - print("gguf: adding gpt2 tokenizer vocab") + print("gguf: get gpt2 tokenizer vocab") vocab_size = len( tokenizer_json["model"]["vocab"] ) @@ -141,7 +144,7 @@ if Path(dir_model + "/tokenizer.json").is_file(): gguf_writer.add_token_list(tokens) if "added_tokens" in tokenizer_json and Path(dir_model + "/tokenizer_config.json").is_file(): - print("gguf: adding special token ids") + print("gguf: get special token ids") with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f: tokenizer_config = json.load(f) @@ -176,8 +179,10 @@ if Path(dir_model + "/tokenizer.json").is_file(): # TENSORS +tensor_map = tmap.get_tensor_map(block_count) + # tensor info -print("gguf: add gguf tensor info") +print("gguf: get tensor metadata") for name in list_vars.keys(): data = list_vars[name].squeeze().numpy() @@ -186,6 +191,15 @@ for name in list_vars.keys(): if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): continue + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print( "Can not map tensor '" + name + "'" ) + sys.exit() + n_dims = len(data.shape) # ftype == 0 -> float32, ftype == 1 -> float16 @@ -206,9 +220,9 @@ for name in list_vars.keys(): print("gguf: write header") gguf_writer.write_header_to_file() -print("gguf: write key-values") +print("gguf: write metadata") gguf_writer.write_kv_data_to_file() -print("gguf: write tensor info") +print("gguf: write tensor metadata") gguf_writer.write_ti_data_to_file() # tensor data @@ -242,5 +256,5 @@ for name in list_vars.keys(): gguf_writer.close() -print("gguf: conversion done, output file: " + fname_out) +print("gguf: model successfully exported to '" + fname_out + "'" ) print("") From 7d5f4522ddc2717e878966bf357dee790c33e860 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Wed, 9 Aug 2023 00:52:16 +0200 Subject: [PATCH 08/87] convert-llama-h5-to-gguf.py : map tensor names --- convert-llama-h5-to-gguf.py | 85 +++++++++++++------------------------ 1 file changed, 29 insertions(+), 56 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 67b3c55d4..0b477a133 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -1,6 +1,8 @@ # Quick and dirty HF llama --> gguf conversion, GQA/70b wont work import gguf +import gguf_tensor_map as tmap +import os import sys import struct import json @@ -12,7 +14,6 @@ from sentencepiece import SentencePieceProcessor #NDArray = np.ndarray[Any, Any] - # compatible with python < 3.9 NDArray: 'TypeAlias' = 'np.ndarray[Any, Any]' @@ -32,6 +33,7 @@ if len(sys.argv) < 3: # output in the same directory as the model dir_model = sys.argv[1] fname_out = sys.argv[1] + "/ggml-model.bin" +last_dir = os.path.basename(os.path.normpath(dir_model)) # possible tensor data types @@ -48,6 +50,8 @@ if len(sys.argv) > 2: print("Invalid ftype: " + str(ftype)) sys.exit(1) fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +print("gguf: loading model "+last_dir) with open(dir_model + "/config.json", "r", encoding="utf-8") as f: hparams = json.load(f) @@ -62,32 +66,34 @@ list_vars = model.state_dict() gguf_writer = gguf.GGUFWriter.open(fname_out) -print("gguf: add key-values, metadata") +print("gguf: get model metadata") -llm_arch = "llama" +llm_arch = "llama" +head_count = hparams["num_attention_heads"] +block_count = hparams["num_hidden_layers"] gguf_writer.add_name("llama2-7b") gguf_writer.add_description("gguf test model") gguf_writer.add_architecture(llm_arch) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) -gguf_writer.add_layer_count(llm_arch, hparams["num_hidden_layers"]) +gguf_writer.add_layer_count(llm_arch, block_count) gguf_writer.add_feed_forward_length(llm_arch, hparams["intermediate_size"]) gguf_writer.add_rope_dimension_count(llm_arch, hparams["hidden_size"] // hparams["num_attention_heads"]) -gguf_writer.add_head_count(llm_arch, hparams["num_attention_heads"]) +gguf_writer.add_head_count(llm_arch, head_count) gguf_writer.add_layer_norm_rms_eps(llm_arch, hparams["rms_norm_eps"]) # TOKENIZATION -print("gguf: add key-values, tokenizer") +print("gguf: get tokenizer metadata") tokens: List[str] = [] scores: List[float] = [] if Path(dir_model + "/tokenizer.model").is_file(): # vocab type sentencepiece - print("gguf: adding sentencepiece tokenizer vocab") + print("gguf: get sentencepiece tokenizer vocab and scores") tokenizer = SentencePieceProcessor(dir_model + "/tokenizer.model") @@ -119,7 +125,7 @@ if Path(dir_model + "/tokenizer.json").is_file(): tokenizer = json.load(f) if "added_tokens" in tokenizer and Path(dir_model + "/tokenizer_config.json").is_file(): - print("gguf: adding special token ids") + print("gguf: get special token ids") with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f: tokenizer_config = json.load(f) @@ -154,8 +160,10 @@ if Path(dir_model + "/tokenizer.json").is_file(): # TENSORS +tensor_map = tmap.get_tensor_map(block_count) + # tensor info -print("gguf: add gguf tensor info") +print("gguf: get tensor metadata") for name in list_vars.keys(): data = list_vars[name].squeeze().numpy() @@ -166,45 +174,16 @@ for name in list_vars.keys(): # permute these if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data, hparams["num_attention_heads"]) + data = permute(data,head_count) - # chnage tensor name - - if name == "model.embed_tokens.weight": - name = "tok_embeddings.weight" - elif name == "model.norm.weight": - name = "norm.weight" - elif name == "lm_head.weight": - name = "output.weight" + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" else: - for i in range(80): # maximum number of layers - if name == "model.layers." + str(i) + ".input_layernorm.weight": - name = "layers." + str(i) + ".attention_norm.weight" - break - if name == "model.layers." + str(i) + ".self_attn.q_proj.weight": - name = "layers." + str(i) + ".attention.wq.weight" - break - if name == "model.layers." + str(i) + ".self_attn.k_proj.weight": - name = "layers." + str(i) + ".attention.wk.weight" - break - if name == "model.layers." + str(i) + ".self_attn.v_proj.weight": - name = "layers." + str(i) + ".attention.wv.weight" - break - if name == "model.layers." + str(i) + ".self_attn.o_proj.weight": - name = "layers." + str(i) + ".attention.wo.weight" - break - if name == "model.layers." + str(i) + ".post_attention_layernorm.weight": - name = "layers." + str(i) + ".ffn_norm.weight" - break - if name == "model.layers." + str(i) + ".mlp.gate_proj.weight": - name = "layers." + str(i) + ".feed_forward.w1.weight" - break - if name == "model.layers." + str(i) + ".mlp.down_proj.weight": - name = "layers." + str(i) + ".feed_forward.w2.weight" - break - if name == "model.layers." + str(i) + ".mlp.up_proj.weight": - name = "layers." + str(i) + ".feed_forward.w3.weight" - break + print( "Can not map tensor '" + name + "'" ) + sys.exit() n_dims = len(data.shape) @@ -227,9 +206,9 @@ for name in list_vars.keys(): print("gguf: write header") gguf_writer.write_header_to_file() -print("gguf: write key-values") +print("gguf: write metadata") gguf_writer.write_kv_data_to_file() -print("gguf: write tensor info") +print("gguf: write tensor metadata") gguf_writer.write_ti_data_to_file() # tensor data @@ -237,17 +216,14 @@ print("gguf: write tensor data") for name in list_vars.keys(): data = list_vars[name].squeeze().numpy() -# print("Process tensor: " + name + " with shape: ", data.shape) # we don't need these if name.endswith(".rotary_emb.inv_freq"): -# print(" Skip tensor: " + name) continue # permute these if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): -# print(" Permute tensor: " + name) - data = permute(data, hparams["num_attention_heads"]) + data = permute(data, head_count) n_dims = len(data.shape) @@ -255,16 +231,13 @@ for name in list_vars.keys(): ftype_cur = 0 if ftype != 0: if name.endswith(".weight") and n_dims == 2: -# print(" Converting to float16") data = data.astype(np.float16) ftype_cur = 1 else: -# print(" Converting to float32") data = data.astype(np.float32) ftype_cur = 0 else: if data.dtype != np.float32: -# print(" Converting to float32") data = data.astype(np.float32) ftype_cur = 0 @@ -273,5 +246,5 @@ for name in list_vars.keys(): gguf_writer.close() -print("gguf: conversion done, output file: " + fname_out) +print("gguf: model successfully exported to '" + fname_out + "'" ) print("") From 0246d0dd6f8f57b64b696a33362ecf287d33ef63 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Wed, 9 Aug 2023 00:54:21 +0200 Subject: [PATCH 09/87] gptneox-main.cpp : map tensor names --- gptneox-main.cpp | 66 +++++++++++++++++++++++++----------------------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/gptneox-main.cpp b/gptneox-main.cpp index eecd59678..1667c4d54 100644 --- a/gptneox-main.cpp +++ b/gptneox-main.cpp @@ -549,56 +549,58 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 model.layers.resize(n_layer); - model.wte = ggml_get_tensor(ctx, "gpt_neox.embed_in.weight"); - model.ln_f_g = ggml_get_tensor(ctx, "gpt_neox.final_layer_norm.weight"); - model.ln_f_b = ggml_get_tensor(ctx, "gpt_neox.final_layer_norm.bias"); - model.lmh_g = ggml_get_tensor(ctx, "embed_out.weight"); + model.wte = ggml_get_tensor(ctx, "transformer.token_embd.weight"); + model.ln_f_g = ggml_get_tensor(ctx, "transformer.output_norm.weight"); + model.ln_f_b = ggml_get_tensor(ctx, "transformer.output_norm.bias"); + model.lmh_g = ggml_get_tensor(ctx, "transformer.output.weight"); // map by name - model.tensors["gpt_neox.embed_in.weight"] = model.wte; - model.tensors["gpt_neox.final_layer_norm.weight"] = model.ln_f_g; - model.tensors["gpt_neox.final_layer_norm.bias"] = model.ln_f_b; - model.tensors["embed_out.weight"] = model.lmh_g; + model.tensors["transformer.token_embd.weight"] = model.wte; + model.tensors["transformer.output_norm.weight"] = model.ln_f_g; + model.tensors["transformer.output_norm.bias"] = model.ln_f_b; + model.tensors["transformer.output.weight"] = model.lmh_g; for (int i = 0; i < n_layer; ++i) { auto & layer = model.layers[i]; - layer.ln_1_g = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".input_layernorm.weight" ); - layer.ln_1_b = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".input_layernorm.bias" ); + std::string blocknamestart = "transformer.blocks." + std::to_string(i) + "."; - layer.c_attn_attn_w = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".attention.query_key_value.weight" ); - layer.c_attn_attn_b = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".attention.query_key_value.bias" ); + layer.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm_1.weight" ); + layer.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm_1.bias" ); - layer.c_attn_proj_w = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".attention.dense.weight" ); - layer.c_attn_proj_b = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".attention.dense.bias" ); + layer.c_attn_attn_w = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); + layer.c_attn_attn_b = get_tensor_ex(ctx ,blocknamestart + "attn_qkv.bias" ); - layer.ln_2_g = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".post_attention_layernorm.weight" ); - layer.ln_2_b = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".post_attention_layernorm.bias"); + layer.c_attn_proj_w = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); + layer.c_attn_proj_b = get_tensor_ex(ctx, blocknamestart + "attn_output.bias" ); - layer.c_mlp_fc_w = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".mlp.dense_h_to_4h.weight" ); - layer.c_mlp_fc_b = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".mlp.dense_h_to_4h.bias" ); + layer.ln_2_g = get_tensor_ex(ctx, blocknamestart + "ffn_norm.weight" ); + layer.ln_2_b = get_tensor_ex(ctx, blocknamestart + "ffn_norm.bias"); - layer.c_mlp_proj_w = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".mlp.dense_4h_to_h.weight" ); - layer.c_mlp_proj_b = get_tensor_ex(ctx, "gpt_neox.layers." + std::to_string(i) + ".mlp.dense_4h_to_h.bias" ); + layer.c_mlp_fc_w = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); + layer.c_mlp_fc_b = get_tensor_ex(ctx, blocknamestart + "ffn_up.bias" ); + + layer.c_mlp_proj_w = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); + layer.c_mlp_proj_b = get_tensor_ex(ctx, blocknamestart + "ffn_down.bias" ); // map by name - model.tensors["gpt_neox.layers." + std::to_string(i) + ".input_layernorm.weight"] = layer.ln_1_g; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".input_layernorm.bias"] = layer.ln_1_b; + model.tensors[blocknamestart + "attn_norm_1.weight"] = layer.ln_1_g; + model.tensors[blocknamestart + "attn_norm_1.bias"] = layer.ln_1_b; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".attention.query_key_value.weight"] = layer.c_attn_attn_w; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".attention.query_key_value.bias"] = layer.c_attn_attn_b; + model.tensors[blocknamestart + "attn_qkv.weight"] = layer.c_attn_attn_w; + model.tensors[blocknamestart + "attn_qkv.bias"] = layer.c_attn_attn_b; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".attention.dense.weight"] = layer.c_attn_proj_w; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".attention.dense.bias"] = layer.c_attn_proj_b; + model.tensors[blocknamestart + "attn_output.weight"] = layer.c_attn_proj_w; + model.tensors[blocknamestart + "attn_output.bias"] = layer.c_attn_proj_b; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".post_attention_layernorm.weight"] = layer.ln_2_g; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".post_attention_layernorm.bias"] = layer.ln_2_b; + model.tensors[blocknamestart + "ffn_norm.weight"] = layer.ln_2_g; + model.tensors[blocknamestart + "ffn_norm.bias"] = layer.ln_2_b; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".mlp.dense_h_to_4h.weight"] = layer.c_mlp_fc_w; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".mlp.dense_h_to_4h.bias"] = layer.c_mlp_fc_b; + model.tensors[blocknamestart + "ffn_up.weight"] = layer.c_mlp_fc_w; + model.tensors[blocknamestart + "ffn_up.bias"] = layer.c_mlp_fc_b; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".mlp.dense_4h_to_h.weight"] = layer.c_mlp_proj_w; - model.tensors["gpt_neox.layers." + std::to_string(i) + ".mlp.dense_4h_to_h.bias"] = layer.c_mlp_proj_b; + model.tensors[blocknamestart + "ffn_down.weight"] = layer.c_mlp_proj_w; + model.tensors[blocknamestart + "ffn_down.bias"] = layer.c_mlp_proj_b; } } From 25d43e0eb578b6e73046d9d6644a3a14d460600d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 9 Aug 2023 09:42:34 +0200 Subject: [PATCH 10/87] CUDA: tuned mul_mat_q kernels (#2546) --- Makefile | 5 - README.md | 1 - ggml-cuda.cu | 1056 ++++++++++++++++++++++++++++++++------------------ 3 files changed, 676 insertions(+), 386 deletions(-) diff --git a/Makefile b/Makefile index 32598edfe..f01bf0c83 100644 --- a/Makefile +++ b/Makefile @@ -253,11 +253,6 @@ ifdef LLAMA_CUDA_KQUANTS_ITER else NVCCFLAGS += -DK_QUANTS_PER_ITERATION=2 endif -ifdef LLAMA_CUDA_MMQ_Y - NVCCFLAGS += -DGGML_CUDA_MMQ_Y=$(LLAMA_CUDA_MMQ_Y) -else - NVCCFLAGS += -DGGML_CUDA_MMQ_Y=64 -endif # LLAMA_CUDA_MMQ_Y #ifdef LLAMA_CUDA_CUBLAS # NVCCFLAGS += -DGGML_CUDA_CUBLAS #endif # LLAMA_CUDA_CUBLAS diff --git a/README.md b/README.md index 2ece294b7..6900b1152 100644 --- a/README.md +++ b/README.md @@ -406,7 +406,6 @@ Building the program with BLAS support may lead to some performance improvements ---> | Option | Legal values | Default | Description | |-------------------------|------------------------|---------|-------------| - | LLAMA_CUDA_MMQ_Y | Positive integer >= 32 | 64 | Tile size in y direction when using the custom CUDA kernels for prompt processing. Higher values can be faster depending on the amount of shared memory available. Power of 2 heavily recommended. | | LLAMA_CUDA_FORCE_DMMV | Boolean | false | Force the use of dequantization + matrix vector multiplication kernels instead of using kernels that do matrix vector multiplication on quantized data. By default the decision is made based on compute capability (MMVQ for 6.1/Pascal/GTX 1000 or higher). Does not affect k-quants. | | LLAMA_CUDA_DMMV_X | Positive integer >= 32 | 32 | Number of values in x direction processed by the CUDA dequantization + matrix vector multiplication kernel per iteration. Increasing this value can improve performance on fast GPUs. Power of 2 heavily recommended. Does not affect k-quants. | | LLAMA_CUDA_MMV_Y | Positive integer | 1 | Block size in y direction for the CUDA mul mat vec kernels. Increasing this value can improve performance on fast GPUs. Power of 2 recommended. Does not affect k-quants. | diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9d42efb0d..6390b1158 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -14,6 +14,7 @@ #include "ggml.h" #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products +#define CC_TURING 700 #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data @@ -262,10 +263,6 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define CUDA_QUANTIZE_BLOCK_SIZE 256 #define CUDA_DEQUANTIZE_BLOCK_SIZE 256 -#ifndef GGML_CUDA_MMQ_Y -#define GGML_CUDA_MMQ_Y 64 -#endif // GGML_CUDA_MMQ_Y - // dmmv = dequantize_mul_mat_vec #ifndef GGML_CUDA_DMMV_X #define GGML_CUDA_DMMV_X 32 @@ -285,6 +282,20 @@ struct ggml_tensor_extra_gpu { cudaEvent_t events[GGML_CUDA_MAX_DEVICES]; // events for synchronizing multiple GPUs }; +static int g_device_count = -1; +static int g_main_device = 0; +static int g_compute_capabilities[GGML_CUDA_MAX_DEVICES]; +static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; +static bool g_mul_mat_q = false; + +static void * g_scratch_buffer = nullptr; +static size_t g_scratch_size = 1024*1024*1024; // 1 GB by default +static size_t g_scratch_offset = 0; + +static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; + +static cudaStream_t g_cudaStreams_main[GGML_CUDA_MAX_DEVICES] = { nullptr }; + static __global__ void add_f32(const float * x, const float * y, float * dst, const int kx, const int ky) { const int i = blockDim.x*blockIdx.x + threadIdx.x; @@ -1549,8 +1560,8 @@ template static __device__ __forceinline__ float vec_dot_q8_1_q8_1_imp #else const float2 dm8f = __half22float2(dm8); const float2 ds8f = __half22float2(ds8); - const float d8d8 = dm8.x * ds8.x; - const float m8s8 = dm8.y * ds8.y; + const float d8d8 = dm8f.x * ds8f.x; + const float m8s8 = dm8f.y * ds8f.y; #endif // GGML_CUDA_F16 // scale second part of sum by QI8_1/ vdr to compensate for multiple threads adding it @@ -1884,21 +1895,21 @@ static __device__ __forceinline__ float vec_dot_q4_0_q8_1( return vec_dot_q4_0_q8_1_impl(v, u, bq4_0->d, bq8_1->ds); } -static __device__ __forceinline__ void allocate_tiles_q4_0(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q4_0(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_qs[GGML_CUDA_MMQ_Y * (WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ float tile_x_d[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI4_0) + GGML_CUDA_MMQ_Y/QI4_0]; + __shared__ int tile_x_qs[mmq_y * (WARP_SIZE) + mmq_y]; + __shared__ float tile_x_d[mmq_y * (WARP_SIZE/QI4_0) + mmq_y/QI4_0]; *x_ql = tile_x_qs; *x_dm = (half2 *) tile_x_d; } -template static __device__ __forceinline__ void load_tiles_q4_0( +template static __device__ __forceinline__ void load_tiles_q4_0( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -1910,7 +1921,7 @@ template static __device__ __forceinline__ void load_tiles_q4_ float * x_dmf = (float *) x_dm; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -1920,39 +1931,30 @@ template static __device__ __forceinline__ void load_tiles_q4_ const block_q4_0 * bxi = bx0 + i*blocks_per_row + kbx; x_ql[i * (WARP_SIZE + 1) + k] = get_int_from_uint8(bxi->qs, kqsx); - x_dmf[i * (WARP_SIZE/QI4_0) + i / QI4_0 + kbx] = bxi->d; + // x_dmf[i * (WARP_SIZE/QI4_0) + i / QI4_0 + kbx] = bxi->d; } -// const int blocks_per_tile_x_row = WARP_SIZE / QI4_0; -// const int kbxd = k % blocks_per_tile_x_row; + const int blocks_per_tile_x_row = WARP_SIZE / QI4_0; + const int kbxd = k % blocks_per_tile_x_row; -// #pragma unroll -// for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI4_0) { -// FIXME out-of-bounds -// const int i = i0 + i_offset * QI4_0 + k / blocks_per_tile_x_row; +#pragma unroll + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI4_0) { + int i = i0 + i_offset * QI4_0 + k / blocks_per_tile_x_row; -// if (i >= GGML_CUDA_MMQ_Y) { -// return; -// } + if (need_check) { + i = min(i, i_max); + } -// const block_q4_0 * bxi = bx0 + i*blocks_per_row + kbxd; + const block_q4_0 * bxi = bx0 + i*blocks_per_row + kbxd; -// x_dm[i * (WARP_SIZE/QI4_0) + i / QI4_0 + kbxd].x = bxi->d; -// } + x_dmf[i * (WARP_SIZE/QI4_0) + i / QI4_0 + kbxd] = bxi->d; + } } static __device__ __forceinline__ float vec_dot_q4_0_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q4_0_Q8_1_MMQ == 0); - const int kyqs = k % (QI8_1/2) + QI8_1 * (k / (QI8_1/2)); const float * x_dmf = (float *) x_dm; @@ -1960,13 +1962,13 @@ static __device__ __forceinline__ float vec_dot_q4_0_q8_1_mul_mat( #pragma unroll for (int l = 0; l < VDR_Q4_0_Q8_1_MMQ; ++l) { - u[2*l+0] = y_qs[j * (2*WARP_SIZE) + kyqs + l]; - u[2*l+1] = y_qs[j * (2*WARP_SIZE) + kyqs + l + QI4_0]; + u[2*l+0] = y_qs[j * WARP_SIZE + (kyqs + l) % WARP_SIZE]; + u[2*l+1] = y_qs[j * WARP_SIZE + (kyqs + l + QI4_0) % WARP_SIZE]; } return vec_dot_q4_0_q8_1_impl (&x_ql[i * (WARP_SIZE + 1) + k], u, x_dmf[i * (WARP_SIZE/QI4_0) + i/QI4_0 + k/QI4_0], - y_ds[j * (2*WARP_SIZE/QI8_1) + 2*k/QI8_1]); + y_ds[j * (WARP_SIZE/QI8_1) + (2*k/QI8_1) % (WARP_SIZE/QI8_1)]); } static __device__ __forceinline__ float vec_dot_q4_1_q8_1( @@ -1987,21 +1989,21 @@ static __device__ __forceinline__ float vec_dot_q4_1_q8_1( return vec_dot_q4_1_q8_1_impl(v, u, bq4_1->dm, bq8_1->ds); } -static __device__ __forceinline__ void allocate_tiles_q4_1(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q4_1(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_qs[GGML_CUDA_MMQ_Y * (WARP_SIZE) + + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI4_1) + GGML_CUDA_MMQ_Y/QI4_1]; + __shared__ int tile_x_qs[mmq_y * (WARP_SIZE) + + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI4_1) + mmq_y/QI4_1]; *x_ql = tile_x_qs; *x_dm = tile_x_dm; } -template static __device__ __forceinline__ void load_tiles_q4_1( +template static __device__ __forceinline__ void load_tiles_q4_1( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2011,7 +2013,7 @@ template static __device__ __forceinline__ void load_tiles_q4_ const block_q4_1 * bx0 = (block_q4_1 *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2027,7 +2029,7 @@ template static __device__ __forceinline__ void load_tiles_q4_ const int kbxd = k % blocks_per_tile_x_row; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI4_1) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI4_1) { int i = i0 + i_offset * QI4_1 + k / blocks_per_tile_x_row; if (need_check) { @@ -2044,27 +2046,19 @@ static __device__ __forceinline__ float vec_dot_q4_1_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q4_1_Q8_1_MMQ == 0); - const int kyqs = k % (QI8_1/2) + QI8_1 * (k / (QI8_1/2)); int u[2*VDR_Q4_1_Q8_1_MMQ]; #pragma unroll for (int l = 0; l < VDR_Q4_1_Q8_1_MMQ; ++l) { - u[2*l+0] = y_qs[j * (2*WARP_SIZE) + kyqs + l]; - u[2*l+1] = y_qs[j * (2*WARP_SIZE) + kyqs + l + QI4_1]; + u[2*l+0] = y_qs[j * WARP_SIZE + (kyqs + l) % WARP_SIZE]; + u[2*l+1] = y_qs[j * WARP_SIZE + (kyqs + l + QI4_1) % WARP_SIZE]; } return vec_dot_q4_1_q8_1_impl (&x_ql[i * (WARP_SIZE + 1) + k], u, x_dm[i * (WARP_SIZE/QI4_1) + i/QI4_1 + k/QI4_1], - y_ds[j * (2*WARP_SIZE/QI8_1) + 2*k/QI8_1]); + y_ds[j * (WARP_SIZE/QI8_1) + (2*k/QI8_1) % (WARP_SIZE/QI8_1)]); } static __device__ __forceinline__ float vec_dot_q5_0_q8_1( @@ -2087,21 +2081,21 @@ static __device__ __forceinline__ float vec_dot_q5_0_q8_1( return vec_dot_q5_0_q8_1_impl(vl, vh, u, bq5_0->d, bq8_1->ds); } -static __device__ __forceinline__ void allocate_tiles_q5_0(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q5_0(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (2*WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ float tile_x_d[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI5_0) + GGML_CUDA_MMQ_Y/QI5_0]; + __shared__ int tile_x_ql[mmq_y * (2*WARP_SIZE) + mmq_y]; + __shared__ float tile_x_d[mmq_y * (WARP_SIZE/QI5_0) + mmq_y/QI5_0]; *x_ql = tile_x_ql; *x_dm = (half2 *) tile_x_d; } -template static __device__ __forceinline__ void load_tiles_q5_0( +template static __device__ __forceinline__ void load_tiles_q5_0( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2111,7 +2105,7 @@ template static __device__ __forceinline__ void load_tiles_q5_ const block_q5_0 * bx0 = (block_q5_0 *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2147,7 +2141,7 @@ template static __device__ __forceinline__ void load_tiles_q5_ float * x_dmf = (float *) x_dm; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI5_0) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI5_0) { int i = i0 + i_offset * QI5_0 + k / blocks_per_tile_x_row; if (need_check) { @@ -2164,14 +2158,6 @@ static __device__ __forceinline__ float vec_dot_q5_0_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q5_0_Q8_1_MMQ == 0); - const int kyqs = k % (QI8_1/2) + QI8_1 * (k / (QI8_1/2)); const int index_bx = i * (WARP_SIZE/QI5_0) + i/QI5_0 + k/QI5_0; const float * x_dmf = (const float *) x_dm; @@ -2181,12 +2167,12 @@ static __device__ __forceinline__ float vec_dot_q5_0_q8_1_mul_mat( #pragma unroll for (int l = 0; l < VDR_Q5_0_Q8_1_MMQ; ++l) { - u[2*l+0] = y_qs[j * (2*WARP_SIZE) + kyqs + l]; - u[2*l+1] = y_qs[j * (2*WARP_SIZE) + kyqs + l + QI5_0]; + u[2*l+0] = y_qs[j * WARP_SIZE + (kyqs + l) % WARP_SIZE]; + u[2*l+1] = y_qs[j * WARP_SIZE + (kyqs + l + QI5_0) % WARP_SIZE]; } return vec_dot_q8_0_q8_1_impl - (&x_ql[i * (2*WARP_SIZE + 1) + 2 * k], u, x_dmf[index_bx], y_df[j * (2*WARP_SIZE/QI8_1) + 2*k/QI8_1]); + (&x_ql[i * (2*WARP_SIZE + 1) + 2 * k], u, x_dmf[index_bx], y_df[j * (WARP_SIZE/QI8_1) + (2*k/QI8_1) % (WARP_SIZE/QI8_1)]); } static __device__ __forceinline__ float vec_dot_q5_1_q8_1( @@ -2209,21 +2195,21 @@ static __device__ __forceinline__ float vec_dot_q5_1_q8_1( return vec_dot_q5_1_q8_1_impl(vl, vh, u, bq5_1->dm, bq8_1->ds); } -static __device__ __forceinline__ void allocate_tiles_q5_1(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q5_1(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (2*WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI5_1) + GGML_CUDA_MMQ_Y/QI5_1]; + __shared__ int tile_x_ql[mmq_y * (2*WARP_SIZE) + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI5_1) + mmq_y/QI5_1]; *x_ql = tile_x_ql; *x_dm = tile_x_dm; } -template static __device__ __forceinline__ void load_tiles_q5_1( +template static __device__ __forceinline__ void load_tiles_q5_1( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2233,7 +2219,7 @@ template static __device__ __forceinline__ void load_tiles_q5_ const block_q5_1 * bx0 = (block_q5_1 *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2266,7 +2252,7 @@ template static __device__ __forceinline__ void load_tiles_q5_ const int kbxd = k % blocks_per_tile_x_row; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI5_1) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI5_1) { int i = i0 + i_offset * QI5_1 + k / blocks_per_tile_x_row; if (need_check) { @@ -2283,14 +2269,6 @@ static __device__ __forceinline__ float vec_dot_q5_1_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q5_1_Q8_1_MMQ == 0); - const int kyqs = k % (QI8_1/2) + QI8_1 * (k / (QI8_1/2)); const int index_bx = i * (WARP_SIZE/QI5_1) + + i/QI5_1 + k/QI5_1; @@ -2298,12 +2276,12 @@ static __device__ __forceinline__ float vec_dot_q5_1_q8_1_mul_mat( #pragma unroll for (int l = 0; l < VDR_Q5_1_Q8_1_MMQ; ++l) { - u[2*l+0] = y_qs[j * (2*WARP_SIZE) + kyqs + l]; - u[2*l+1] = y_qs[j * (2*WARP_SIZE) + kyqs + l + QI5_1]; + u[2*l+0] = y_qs[j * WARP_SIZE + (kyqs + l) % WARP_SIZE]; + u[2*l+1] = y_qs[j * WARP_SIZE + (kyqs + l + QI5_1) % WARP_SIZE]; } return vec_dot_q8_1_q8_1_impl - (&x_ql[i * (2*WARP_SIZE + 1) + 2 * k], u, x_dm[index_bx], y_ds[j * (2*WARP_SIZE/QI8_1) + 2*k/QI8_1]); + (&x_ql[i * (2*WARP_SIZE + 1) + 2 * k], u, x_dm[index_bx], y_ds[j * (WARP_SIZE/QI8_1) + (2*k/QI8_1) % (WARP_SIZE/QI8_1)]); } static __device__ __forceinline__ float vec_dot_q8_0_q8_1( @@ -2323,21 +2301,21 @@ static __device__ __forceinline__ float vec_dot_q8_0_q8_1( return vec_dot_q8_0_q8_1_impl(v, u, bq8_0->d, bq8_1->ds.x); } -static __device__ __forceinline__ void allocate_tiles_q8_0(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q8_0(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_qs[GGML_CUDA_MMQ_Y * (WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ float tile_x_d[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI8_0) + GGML_CUDA_MMQ_Y/QI8_0]; + __shared__ int tile_x_qs[mmq_y * (WARP_SIZE) + mmq_y]; + __shared__ float tile_x_d[mmq_y * (WARP_SIZE/QI8_0) + mmq_y/QI8_0]; *x_ql = tile_x_qs; *x_dm = (half2 *) tile_x_d; } -template static __device__ __forceinline__ void load_tiles_q8_0( +template static __device__ __forceinline__ void load_tiles_q8_0( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2348,7 +2326,7 @@ template static __device__ __forceinline__ void load_tiles_q8_ const block_q8_0 * bx0 = (block_q8_0 *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2358,41 +2336,29 @@ template static __device__ __forceinline__ void load_tiles_q8_ const block_q8_0 * bxi = bx0 + i*blocks_per_row + kbx; x_ql[i * (WARP_SIZE + 1) + k] = get_int_from_int8(bxi->qs, kqsx); - x_dmf[i * (WARP_SIZE/QI8_0) + i / QI8_0 + kbx] = bxi->d; } -// const int blocks_per_tile_x_row = WARP_SIZE / QI8_0; -// const int kbxd = k % blocks_per_tile_x_row; + const int blocks_per_tile_x_row = WARP_SIZE / QI8_0; + const int kbxd = k % blocks_per_tile_x_row; -// #pragma unroll -// for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI8_0) { -// FIXME out-of-bounds -// const int i = i0 + i_offset * QI8_0 + k / blocks_per_tile_x_row; +#pragma unroll + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI8_0) { + int i = i0 + i_offset * QI8_0 + k / blocks_per_tile_x_row; -// #if GGML_CUDA_MMQ_Y < 64 -// if (i >= GGML_CUDA_MMQ_Y) { -// return; -// } -// #endif // GGML_CUDA_MMQ_Y < 64 + if (need_check) { + i = min(i, i_max); + } -// const block_q8_0 * bxi = bx0 + i*blocks_per_row + kbxd; + const block_q8_0 * bxi = bx0 + i*blocks_per_row + kbxd; -// x_dm[i * (WARP_SIZE/QI8_0) + i / QI8_0 + kbxd].x = bxi->d; -// } + x_dmf[i * (WARP_SIZE/QI8_0) + i / QI8_0 + kbxd] = bxi->d; + } } static __device__ __forceinline__ float vec_dot_q8_0_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q8_0_Q8_1_MMQ == 0); - const float * x_dmf = (const float *) x_dm; const float * y_df = (const float *) y_ds; @@ -2424,23 +2390,23 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1( return vec_dot_q2_K_q8_1_impl_mmvq(v, u, scales, bq2_K->dm, d8); } -static __device__ __forceinline__ void allocate_tiles_q2_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q2_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI2_K) + GGML_CUDA_MMQ_Y/QI2_K]; - __shared__ int tile_x_sc[GGML_CUDA_MMQ_Y * (WARP_SIZE/4) + GGML_CUDA_MMQ_Y/4]; + __shared__ int tile_x_ql[mmq_y * (WARP_SIZE) + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI2_K) + mmq_y/QI2_K]; + __shared__ int tile_x_sc[mmq_y * (WARP_SIZE/4) + mmq_y/4]; *x_ql = tile_x_ql; *x_dm = tile_x_dm; *x_sc = tile_x_sc; } -template static __device__ __forceinline__ void load_tiles_q2_K( +template static __device__ __forceinline__ void load_tiles_q2_K( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2450,7 +2416,7 @@ template static __device__ __forceinline__ void load_tiles_q2_ const block_q2_K * bx0 = (block_q2_K *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2466,8 +2432,8 @@ template static __device__ __forceinline__ void load_tiles_q2_ const int kbxd = k % blocks_per_tile_x_row; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI2_K) { - int i = (i0 + i_offset * QI2_K + k / blocks_per_tile_x_row) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI2_K) { + int i = (i0 + i_offset * QI2_K + k / blocks_per_tile_x_row) % mmq_y; if (need_check) { i = min(i, i_max); @@ -2479,7 +2445,7 @@ template static __device__ __forceinline__ void load_tiles_q2_ } #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * 4) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * 4) { int i = i0 + i_offset * 4 + k / (WARP_SIZE/4); if (need_check) { @@ -2496,14 +2462,6 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q2_K_Q8_1_MMQ == 0); - const int kbx = k / QI2_K; const int ky = (k % QI2_K) * QR2_K; const float * y_df = (const float *) y_ds; @@ -2520,7 +2478,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_mul_mat( const uint8_t * scales = ((const uint8_t *) &x_sc[i * (WARP_SIZE/4) + i/4 + kbx*4]) + ky/4; - const int index_y = j * (QR2_K*WARP_SIZE) + QR2_K*k; + const int index_y = j * WARP_SIZE + (QR2_K*k) % WARP_SIZE; return vec_dot_q2_K_q8_1_impl_mmq(v, &y_qs[index_y], scales, x_dm[i * (WARP_SIZE/QI2_K) + i/QI2_K + kbx], y_df[index_y/QI8_1]); } @@ -2551,12 +2509,12 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1( return vec_dot_q3_K_q8_1_impl_mmvq(vl, vh, u, bq3_K->scales, scale_offset, d, d8); } -static __device__ __forceinline__ void allocate_tiles_q3_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q3_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI3_K) + GGML_CUDA_MMQ_Y/QI3_K]; - __shared__ int tile_x_qh[GGML_CUDA_MMQ_Y * (WARP_SIZE/2) + GGML_CUDA_MMQ_Y/2]; - __shared__ int tile_x_sc[GGML_CUDA_MMQ_Y * (WARP_SIZE/4) + GGML_CUDA_MMQ_Y/4]; + __shared__ int tile_x_ql[mmq_y * (WARP_SIZE) + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI3_K) + mmq_y/QI3_K]; + __shared__ int tile_x_qh[mmq_y * (WARP_SIZE/2) + mmq_y/2]; + __shared__ int tile_x_sc[mmq_y * (WARP_SIZE/4) + mmq_y/4]; *x_ql = tile_x_ql; *x_dm = tile_x_dm; @@ -2564,12 +2522,12 @@ static __device__ __forceinline__ void allocate_tiles_q3_K(int ** x_ql, half2 ** *x_sc = tile_x_sc; } -template static __device__ __forceinline__ void load_tiles_q3_K( +template static __device__ __forceinline__ void load_tiles_q3_K( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2579,7 +2537,7 @@ template static __device__ __forceinline__ void load_tiles_q3_ const block_q3_K * bx0 = (block_q3_K *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2596,8 +2554,8 @@ template static __device__ __forceinline__ void load_tiles_q3_ float * x_dmf = (float *) x_dm; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI3_K) { - int i = (i0 + i_offset * QI3_K + k / blocks_per_tile_x_row) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI3_K) { + int i = (i0 + i_offset * QI3_K + k / blocks_per_tile_x_row) % mmq_y; if (need_check) { i = min(i, i_max); @@ -2609,7 +2567,7 @@ template static __device__ __forceinline__ void load_tiles_q3_ } #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * 2) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * 2) { int i = i0 + i_offset * 2 + k / (WARP_SIZE/2); if (need_check) { @@ -2623,7 +2581,7 @@ template static __device__ __forceinline__ void load_tiles_q3_ } #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * 4) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * 4) { int i = i0 + i_offset * 4 + k / (WARP_SIZE/4); if (need_check) { @@ -2652,14 +2610,6 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q3_K_Q8_1_MMQ == 0); - const int kbx = k / QI3_K; const int ky = (k % QI3_K) * QR3_K; const float * x_dmf = (const float *) x_dm; @@ -2681,7 +2631,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_mul_mat( v[l] = __vsubss4(vll, vlh); } - const int index_y = j * (QR3_K*WARP_SIZE) + k*QR3_K; + const int index_y = j * WARP_SIZE + (k*QR3_K) % WARP_SIZE; return vec_dot_q3_K_q8_1_impl_mmq(v, &y_qs[index_y], scales, x_dmf[i * (WARP_SIZE/QI3_K) + i/QI3_K + kbx], y_df[index_y/QI8_1]); } @@ -2778,23 +2728,23 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1( #endif } -static __device__ __forceinline__ void allocate_tiles_q4_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q4_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI4_K) + GGML_CUDA_MMQ_Y/QI4_K]; - __shared__ int tile_x_sc[GGML_CUDA_MMQ_Y * (WARP_SIZE/8) + GGML_CUDA_MMQ_Y/8]; + __shared__ int tile_x_ql[mmq_y * (WARP_SIZE) + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI4_K) + mmq_y/QI4_K]; + __shared__ int tile_x_sc[mmq_y * (WARP_SIZE/8) + mmq_y/8]; *x_ql = tile_x_ql; *x_dm = tile_x_dm; *x_sc = tile_x_sc; } -template static __device__ __forceinline__ void load_tiles_q4_K( +template static __device__ __forceinline__ void load_tiles_q4_K( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2804,7 +2754,7 @@ template static __device__ __forceinline__ void load_tiles_q4_ const block_q4_K * bx0 = (block_q4_K *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -2820,8 +2770,8 @@ template static __device__ __forceinline__ void load_tiles_q4_ const int kbxd = k % blocks_per_tile_x_row; // == 0 if QK_K == 256 #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI4_K) { - int i = (i0 + i_offset * QI4_K + k / blocks_per_tile_x_row) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI4_K) { + int i = (i0 + i_offset * QI4_K + k / blocks_per_tile_x_row) % mmq_y; if (need_check) { i = min(i, i_max); @@ -2833,8 +2783,8 @@ template static __device__ __forceinline__ void load_tiles_q4_ } #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * 8) { - int i = (i0 + i_offset * 8 + k / (WARP_SIZE/8)) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * 8) { + int i = (i0 + i_offset * 8 + k / (WARP_SIZE/8)) % mmq_y; if (need_check) { i = min(i, i_max); @@ -2858,14 +2808,6 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q4_K_Q8_1_MMQ == 0); - int v[QR4_K*VDR_Q4_K_Q8_1_MMQ]; #pragma unroll @@ -2876,7 +2818,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_mul_mat( const uint8_t * sc = ((const uint8_t *) &x_sc[i * (WARP_SIZE/8) + i/8 + k/16]) + 2*((k % 16) / 8); - const int index_y = j * (QR4_K*WARP_SIZE) + QR4_K*k; + const int index_y = j * WARP_SIZE + (QR4_K*k) % WARP_SIZE; return vec_dot_q4_K_q8_1_impl_mmq(v, &y_qs[index_y], sc, sc+8, x_dm[i * (WARP_SIZE/QI4_K) + i/QI4_K], &y_ds[index_y/QI8_1]); } @@ -2969,23 +2911,23 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1( #endif } -static __device__ __forceinline__ void allocate_tiles_q5_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q5_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (2*WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI5_K) + GGML_CUDA_MMQ_Y/QI5_K]; - __shared__ int tile_x_sc[GGML_CUDA_MMQ_Y * (WARP_SIZE/8) + GGML_CUDA_MMQ_Y/8]; + __shared__ int tile_x_ql[mmq_y * (2*WARP_SIZE) + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI5_K) + mmq_y/QI5_K]; + __shared__ int tile_x_sc[mmq_y * (WARP_SIZE/8) + mmq_y/8]; *x_ql = tile_x_ql; *x_dm = tile_x_dm; *x_sc = tile_x_sc; } -template static __device__ __forceinline__ void load_tiles_q5_K( +template static __device__ __forceinline__ void load_tiles_q5_K( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -2995,7 +2937,7 @@ template static __device__ __forceinline__ void load_tiles_q5_ const block_q5_K * bx0 = (block_q5_K *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -3024,8 +2966,8 @@ template static __device__ __forceinline__ void load_tiles_q5_ const int kbxd = k % blocks_per_tile_x_row; // == 0 if QK_K == 256 #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI5_K) { - int i = (i0 + i_offset * QI5_K + k / blocks_per_tile_x_row) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI5_K) { + int i = (i0 + i_offset * QI5_K + k / blocks_per_tile_x_row) % mmq_y; if (need_check) { i = min(i, i_max); @@ -3037,8 +2979,8 @@ template static __device__ __forceinline__ void load_tiles_q5_ } #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * 8) { - int i = (i0 + i_offset * 8 + k / (WARP_SIZE/8)) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * 8) { + int i = (i0 + i_offset * 8 + k / (WARP_SIZE/8)) % mmq_y; if (need_check) { i = min(i, i_max); @@ -3062,18 +3004,10 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q5_K_Q8_1_MMQ == 0); - const uint8_t * sc = ((const uint8_t *) &x_sc[i * (WARP_SIZE/8) + i/8 + k/16]) + 2 * ((k % 16) / 8); - const int index_x = i * (QR5_K*WARP_SIZE + 1) + QR5_K*k; - const int index_y = j * (QR5_K*WARP_SIZE) + QR5_K*k; + const int index_x = i * (QR5_K*WARP_SIZE + 1) + QR5_K*k; + const int index_y = j * WARP_SIZE + (QR5_K*k) % WARP_SIZE; return vec_dot_q4_K_q8_1_impl_mmq(&x_ql[index_x], &y_qs[index_y], sc, sc+8, x_dm[i * (WARP_SIZE/QI5_K) + i/QI5_K], &y_ds[index_y/QI8_1]); } @@ -3103,23 +3037,23 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1( return vec_dot_q6_K_q8_1_impl_mmvq(vl, vh, u, scales, bq6_K->d, d8); } -static __device__ __forceinline__ void allocate_tiles_q6_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { +template static __device__ __forceinline__ void allocate_tiles_q6_K(int ** x_ql, half2 ** x_dm, int ** x_qh, int ** x_sc) { - __shared__ int tile_x_ql[GGML_CUDA_MMQ_Y * (2*WARP_SIZE) + GGML_CUDA_MMQ_Y]; - __shared__ half2 tile_x_dm[GGML_CUDA_MMQ_Y * (WARP_SIZE/QI6_K) + GGML_CUDA_MMQ_Y/QI6_K]; - __shared__ int tile_x_sc[GGML_CUDA_MMQ_Y * (WARP_SIZE/8) + GGML_CUDA_MMQ_Y/8]; + __shared__ int tile_x_ql[mmq_y * (2*WARP_SIZE) + mmq_y]; + __shared__ half2 tile_x_dm[mmq_y * (WARP_SIZE/QI6_K) + mmq_y/QI6_K]; + __shared__ int tile_x_sc[mmq_y * (WARP_SIZE/8) + mmq_y/8]; *x_ql = tile_x_ql; *x_dm = tile_x_dm; *x_sc = tile_x_sc; } -template static __device__ __forceinline__ void load_tiles_q6_K( +template static __device__ __forceinline__ void load_tiles_q6_K( const void * __restrict__ vx, int * __restrict__ x_ql, half2 * __restrict__ x_dm, int * __restrict__ x_qh, int * __restrict__ x_sc, const int & i_offset, const int & i_max, const int & k, const int & blocks_per_row) { __builtin_assume(i_offset >= 0); - __builtin_assume(i_offset < 8); + __builtin_assume(i_offset < nwarps); __builtin_assume(k >= 0); __builtin_assume(k < WARP_SIZE); @@ -3129,7 +3063,7 @@ template static __device__ __forceinline__ void load_tiles_q6_ const block_q6_K * bx0 = (block_q6_K *) vx; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8) { + for (int i0 = 0; i0 < mmq_y; i0 += nwarps) { int i = i0 + i_offset; if (need_check) { @@ -3159,8 +3093,8 @@ template static __device__ __forceinline__ void load_tiles_q6_ float * x_dmf = (float *) x_dm; #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * QI6_K) { - int i = (i0 + i_offset * QI6_K + k / blocks_per_tile_x_row) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * QI6_K) { + int i = (i0 + i_offset * QI6_K + k / blocks_per_tile_x_row) % mmq_y; if (need_check) { i = min(i, i_max); @@ -3172,8 +3106,8 @@ template static __device__ __forceinline__ void load_tiles_q6_ } #pragma unroll - for (int i0 = 0; i0 < GGML_CUDA_MMQ_Y; i0 += 8 * 8) { - int i = (i0 + i_offset * 8 + k / (WARP_SIZE/8)) % GGML_CUDA_MMQ_Y; + for (int i0 = 0; i0 < mmq_y; i0 += nwarps * 8) { + int i = (i0 + i_offset * 8 + k / (WARP_SIZE/8)) % mmq_y; if (need_check) { i = min(i, i_max); @@ -3189,25 +3123,17 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_mul_mat( const int * __restrict__ x_ql, const half2 * __restrict__ x_dm, const int * __restrict__ x_qh, const int * __restrict__ x_sc, const int * __restrict__ y_qs, const half2 * __restrict__ y_ds, const int & i, const int & j, const int & k) { - __builtin_assume(i >= 0); - __builtin_assume(i < GGML_CUDA_MMQ_Y); - __builtin_assume(j >= 0); - __builtin_assume(j < WARP_SIZE); - __builtin_assume(k >= 0); - __builtin_assume(k < WARP_SIZE); - __builtin_assume(k % VDR_Q6_K_Q8_1_MMQ == 0); - const float * x_dmf = (const float *) x_dm; const float * y_df = (const float *) y_ds; const int8_t * sc = ((const int8_t *) &x_sc[i * (WARP_SIZE/8) + i/8 + k/8]); - const int index_x = i * (QR6_K*WARP_SIZE + 1) + QR6_K*k; - const int index_y = j * (QR6_K*WARP_SIZE) + QR6_K*k; + const int index_x = i * (QR6_K*WARP_SIZE + 1) + QR6_K*k; + const int index_y = j * WARP_SIZE + (QR6_K*k) % WARP_SIZE; return vec_dot_q6_K_q8_1_impl_mmq(&x_ql[index_x], &y_qs[index_y], sc, x_dmf[i * (WARP_SIZE/QI6_K) + i/QI6_K], &y_df[index_y/QI8_1]); } -template static __global__ void mul_mat_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, @@ -3222,14 +3148,11 @@ static __global__ void mul_mat_q( const int & ncols_dst = ncols_y; - const int tid_x = threadIdx.x; - const int tid_y = threadIdx.y; - - const int row_dst_0 = blockIdx.x*GGML_CUDA_MMQ_Y; + const int row_dst_0 = blockIdx.x*mmq_y; const int & row_x_0 = row_dst_0; - const int row_dst = row_dst_0 + tid_x; + const int row_dst = row_dst_0 + threadIdx.x; - const int col_dst_0 = blockIdx.y*WARP_SIZE; + const int col_dst_0 = blockIdx.y*mmq_x; const int & col_y_0 = col_dst_0; int * tile_x_ql = nullptr; @@ -3239,64 +3162,65 @@ static __global__ void mul_mat_q( allocate_tiles(&tile_x_ql, &tile_x_dm, &tile_x_qh, &tile_x_sc); - const int blocks_per_tile_y_col = qr*WARP_SIZE/QI8_1; + __shared__ int tile_y_qs[mmq_x * WARP_SIZE]; + __shared__ half2 tile_y_ds[mmq_x * WARP_SIZE/QI8_1]; - __shared__ int tile_y_qs[(WARP_SIZE) * (qr*WARP_SIZE)]; - __shared__ half2 tile_y_ds[(WARP_SIZE) * blocks_per_tile_y_col]; - - float sum[GGML_CUDA_MMQ_Y/WARP_SIZE][4] = {0.0f}; + float sum[mmq_y/WARP_SIZE][mmq_x/nwarps] = {0.0f}; for (int ib0 = 0; ib0 < blocks_per_row_x; ib0 += blocks_per_warp) { load_tiles(x + row_x_0*blocks_per_row_x + ib0, tile_x_ql, tile_x_dm, tile_x_qh, tile_x_sc, - tid_y, nrows_x-row_x_0-1, tid_x, blocks_per_row_x); + threadIdx.y, nrows_x-row_x_0-1, threadIdx.x, blocks_per_row_x); +#pragma unroll for (int ir = 0; ir < qr; ++ir) { - const int kqs = ir*WARP_SIZE + tid_x; + const int kqs = ir*WARP_SIZE + threadIdx.x; const int kbxd = kqs / QI8_1; - for (int i = 0; i < WARP_SIZE; i += 8) { - const int col_y_eff = min(col_y_0 + tid_y + i, ncols_y-1); // to prevent out-of-bounds memory accesses +#pragma unroll + for (int i = 0; i < mmq_x; i += nwarps) { + const int col_y_eff = min(col_y_0 + threadIdx.y + i, ncols_y-1); // to prevent out-of-bounds memory accesses const block_q8_1 * by0 = &y[col_y_eff*blocks_per_col_y + ib0 * (qk/QK8_1) + kbxd]; - tile_y_qs[(tid_y + i) * (qr*WARP_SIZE) + kqs] = get_int_from_int8_aligned(by0->qs, tid_x % QI8_1); + const int index_y = (threadIdx.y + i) * WARP_SIZE + kqs % WARP_SIZE; + tile_y_qs[index_y] = get_int_from_int8_aligned(by0->qs, threadIdx.x % QI8_1); } - } - for (int ids0 = 0; ids0 < WARP_SIZE; ids0 += 8 * (WARP_SIZE/blocks_per_tile_y_col)) { - const int ids = (ids0 + tid_y * (WARP_SIZE/blocks_per_tile_y_col) + tid_x / blocks_per_tile_y_col) % WARP_SIZE; - const int kby = tid_x % blocks_per_tile_y_col; - const int col_y_eff = min(col_y_0 + ids, ncols_y-1); - - // if the sum is not needed it's faster to transform the scale to f32 ahead of time - const half2 * dsi_src = &y[col_y_eff*blocks_per_col_y + ib0 * (qk/QK8_1) + kby].ds; - half2 * dsi_dst = &tile_y_ds[ids * (qr*WARP_SIZE/QI8_1) + kby]; - if (need_sum) { - *dsi_dst = *dsi_src; - } else { - float * dfi_dst = (float *) dsi_dst; - *dfi_dst = (*dsi_src).x; - } - } - - __syncthreads(); - -#if __CUDA_ARCH__ >= 700 // Unrolling the loop is slower on Pascal #pragma unroll -#endif // __CUDA_ARCH__ >= 700 - for (int k = 0; k < WARP_SIZE; k += vdr) { -#pragma unroll - for (int j = 0; j < WARP_SIZE; j += 8) { -#pragma unroll - for (int i = 0; i < GGML_CUDA_MMQ_Y; i += WARP_SIZE) { - sum[i/WARP_SIZE][j/8] += vec_dot(tile_x_ql, tile_x_dm, tile_x_qh, tile_x_sc, tile_y_qs, tile_y_ds, - tid_x + i, tid_y + j, k); + for (int ids0 = 0; ids0 < mmq_x; ids0 += nwarps * QI8_1) { + const int ids = (ids0 + threadIdx.y * QI8_1 + threadIdx.x / (WARP_SIZE/QI8_1)) % mmq_x; + const int kby = threadIdx.x % (WARP_SIZE/QI8_1); + const int col_y_eff = min(col_y_0 + ids, ncols_y-1); + + // if the sum is not needed it's faster to transform the scale to f32 ahead of time + const half2 * dsi_src = &y[col_y_eff*blocks_per_col_y + ib0 * (qk/QK8_1) + ir*(WARP_SIZE/QI8_1) + kby].ds; + half2 * dsi_dst = &tile_y_ds[ids * (WARP_SIZE/QI8_1) + kby]; + if (need_sum) { + *dsi_dst = *dsi_src; + } else { + float * dfi_dst = (float *) dsi_dst; + *dfi_dst = (*dsi_src).x; } } - } - __syncthreads(); + __syncthreads(); + +// #pragma unroll // unrolling this loop causes too much register pressure + for (int k = ir*WARP_SIZE/qr; k < (ir+1)*WARP_SIZE/qr; k += vdr) { +#pragma unroll + for (int j = 0; j < mmq_x; j += nwarps) { +#pragma unroll + for (int i = 0; i < mmq_y; i += WARP_SIZE) { + sum[i/WARP_SIZE][j/nwarps] += vec_dot( + tile_x_ql, tile_x_dm, tile_x_qh, tile_x_sc, tile_y_qs, tile_y_ds, + threadIdx.x + i, threadIdx.y + j, k); + } + } + } + + __syncthreads(); + } } @@ -3304,15 +3228,15 @@ static __global__ void mul_mat_q( return; } - for (int j = 0; j < WARP_SIZE; j += 8) { - const int col_dst = col_dst_0 + j + tid_y; + for (int j = 0; j < mmq_x; j += nwarps) { + const int col_dst = col_dst_0 + j + threadIdx.y; if (col_dst >= ncols_dst) { return; } - for (int i = 0; i < GGML_CUDA_MMQ_Y; i += WARP_SIZE) { - dst[col_dst*nrows_dst + row_dst + i] = sum[i/WARP_SIZE][j/8]; + for (int i = 0; i < mmq_y; i += WARP_SIZE) { + dst[col_dst*nrows_dst + row_dst + i] = sum[i/WARP_SIZE][j/nwarps]; } } } @@ -4014,17 +3938,52 @@ static void ggml_mul_mat_q4_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 64; + const int mmq_y = 128; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4032,17 +3991,53 @@ static void ggml_mul_mat_q4_1_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 64; + const int mmq_y = 128; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } + } } @@ -4050,17 +4045,52 @@ static void ggml_mul_mat_q5_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 128; + const int mmq_y = 64; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4068,17 +4098,52 @@ static void ggml_mul_mat_q5_1_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 128; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4086,17 +4151,52 @@ static void ggml_mul_mat_q8_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 128; + const int mmq_y = 64; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4104,17 +4204,52 @@ static void ggml_mul_mat_q2_K_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 64; + const int mmq_y = 128; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4122,17 +4257,52 @@ static void ggml_mul_mat_q3_K_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 128; + const int mmq_y = 128; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4140,17 +4310,52 @@ static void ggml_mul_mat_q4_K_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 64; + const int mmq_y = 128; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 32; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4158,17 +4363,52 @@ static void ggml_mul_mat_q5_K_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 64; + const int mmq_y = 128; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4176,17 +4416,52 @@ static void ggml_mul_mat_q6_K_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { - const int block_num_x = (nrows_x + GGML_CUDA_MMQ_Y - 1) / GGML_CUDA_MMQ_Y; - const int block_num_y = (ncols_y + WARP_SIZE - 1) / WARP_SIZE; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, WARP_SIZE/4, 1); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + const int compute_capability = g_compute_capabilities[id]; - if (nrows_x % GGML_CUDA_MMQ_Y == 0) { - mul_mat_q, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + if (compute_capability >= CC_TURING) { + const int mmq_x = 64; + const int mmq_y = 64; + const int nwarps = 4; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } else { - mul_mat_q, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + const int mmq_x = 32; + const int mmq_y = 64; + const int nwarps = 8; + + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q, + load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q, + load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> + <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } } } @@ -4361,20 +4636,6 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { } -static void * g_scratch_buffer = nullptr; -static size_t g_scratch_size = 1024*1024*1024; // 1 GB by default -static size_t g_scratch_offset = 0; - -static int g_device_count = -1; -static int g_main_device = 0; -static int g_compute_capabilities[GGML_CUDA_MAX_DEVICES]; -static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; -static bool g_mul_mat_q = false; - -static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; - -static cudaStream_t g_cudaStreams_main[GGML_CUDA_MAX_DEVICES] = { nullptr }; - void ggml_init_cublas() { static bool initialized = false; @@ -4730,6 +4991,37 @@ inline void ggml_cuda_op_mul_mat_q( (void) i1; } +static int64_t get_row_rounding(ggml_type type) { + int max_compute_capability = INT_MIN; + for (int id = 0; id < g_device_count; ++id) { + if (max_compute_capability < g_compute_capabilities[id] + && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { + max_compute_capability = g_compute_capabilities[id]; + } + } + + switch(type) { + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + return max_compute_capability >= CC_TURING ? 128 : 64; + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + return 64; + case GGML_TYPE_F16: + return 1; + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + return max_compute_capability >= CC_TURING ? 128 : 64; + case GGML_TYPE_Q6_K: + return 64; + default: + GGML_ASSERT(false); + } +} + inline void ggml_cuda_op_mul_mat_vec( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, char * src0_ddq_i, float * src0_ddf_i, float * src1_ddf_i, float * dst_ddf_i, int64_t i02, int64_t i01_low, int64_t i01_high, int i1, @@ -5130,14 +5422,16 @@ static void ggml_cuda_op(const ggml_tensor * src0, const ggml_tensor * src1, ggm int64_t row_low, row_high; if (split) { + const int64_t rounding = get_row_rounding(src0->type); + row_low = id == 0 ? 0 : nrows0*g_tensor_split[id]; - row_low -= row_low % GGML_CUDA_MMQ_Y; + row_low -= row_low % rounding; if (id == g_device_count - 1) { row_high = nrows0; } else { row_high = nrows0*g_tensor_split[id + 1]; - row_high -= row_high % GGML_CUDA_MMQ_Y; + row_high -= row_high % rounding; } } else { row_low = 0; @@ -5616,14 +5910,16 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { row_low = 0; row_high = nrows; } else if (backend == GGML_BACKEND_GPU_SPLIT) { + const int64_t rounding = get_row_rounding(tensor->type); + row_low = id == 0 ? 0 : nrows*g_tensor_split[id]; - row_low -= row_low % GGML_CUDA_MMQ_Y; + row_low -= row_low % rounding; if (id == g_device_count - 1) { row_high = nrows; } else { row_high = nrows*g_tensor_split[id + 1]; - row_high -= row_high % GGML_CUDA_MMQ_Y; + row_high -= row_high % rounding; } } else { GGML_ASSERT(false); From ea04a4ca1940d92becc0ee26523aa2c4a18cf938 Mon Sep 17 00:00:00 2001 From: grahameth <96447521+grahameth@users.noreply.github.com> Date: Wed, 9 Aug 2023 22:46:40 +0200 Subject: [PATCH 11/87] add log_callback to llama_context_params for custom logging. (#2234) * add log_callback to llama_context_params for custom logging. * Fix macro expansion on gcc * Add struct llama_state for global variables and move log_callback there * Turn log level into enum and some minor changes. * Remove model_for_logging parameter (not needed anymore) * Convert remaining fprintf(stderr, ...) calls to use new macros. * Fix enum and initialize g_state * Fix log calls after merge * Fix missing static * Add back all the new lines in the logging strings * Add comment for llama_log_callback and replace remaining printf calls --------- Co-authored-by: grahameth <-> Co-authored-by: Helmut --- llama.cpp | 263 +++++++++++++++++++++++++++++++++--------------------- llama.h | 19 +++- 2 files changed, 177 insertions(+), 105 deletions(-) diff --git a/llama.cpp b/llama.cpp index 71061aab9..0cf2b3749 100644 --- a/llama.cpp +++ b/llama.cpp @@ -56,6 +56,13 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif +static void llama_log_internal(llama_log_level level, const char* format, ...); +static void llama_log_callback_default(llama_log_level level, const char * text, void * user_data); +#define LLAMA_LOG_INFO(...) llama_log_internal(LLAMA_LOG_LEVEL_INFO , __VA_ARGS__) +#define LLAMA_LOG_WARN(...) llama_log_internal(LLAMA_LOG_LEVEL_WARN , __VA_ARGS__) +#define LLAMA_LOG_ERROR(...) llama_log_internal(LLAMA_LOG_LEVEL_ERROR, __VA_ARGS__) + + #if !defined(GGML_USE_CUBLAS) && !defined(GGML_USE_METAL) #include "ggml-alloc.h" #define LLAMA_USE_ALLOCATOR @@ -438,6 +445,14 @@ struct llama_context { } }; +struct llama_state { + // We save the log callback globally + llama_log_callback log_callback = llama_log_callback_default; + void * log_callback_user_data = nullptr; +}; +// global state +static llama_state g_state; + template static T checked_mul(T a, T b) { T ret = a * b; @@ -504,7 +519,7 @@ struct llama_file_loader { llama_file_loader(const char * fname, llama_load_tensors_map & tensors_map) : file(fname, "rb") { - fprintf(stderr, "llama.cpp: loading model from %s\n", fname); + LLAMA_LOG_INFO("llama.cpp: loading model from %s\n", fname); read_magic(); read_hparams(); read_vocab(); @@ -619,7 +634,7 @@ struct llama_file_saver { llama_file_loader * any_file_loader; llama_file_saver(const char * fname, llama_file_loader * any_file_loader, enum llama_ftype new_ftype) : file(fname, "wb"), any_file_loader(any_file_loader) { - fprintf(stderr, "llama.cpp: saving model to %s\n", fname); + LLAMA_LOG_INFO("llama.cpp: saving model to %s\n", fname); write_magic(); write_hparams(new_ftype); write_vocab(); @@ -640,7 +655,7 @@ struct llama_file_saver { } void write_vocab() { if (any_file_loader->file_version == LLAMA_FILE_VERSION_GGML) { - fprintf(stderr, "llama.cpp: WARNING: input is an old file that doesn't have scores; will add dummy scores\n"); + LLAMA_LOG_WARN("llama.cpp: WARNING: input is an old file that doesn't have scores; will add dummy scores\n"); } uint32_t n_vocab = any_file_loader->hparams.n_vocab; for (uint32_t i = 0; i < n_vocab; i++) { @@ -831,7 +846,7 @@ struct llama_model_loader { uint8_t byte = lt.data[i]; sum = byte + (sum << 6) + (sum << 16) - sum; // sdbm hash } - fprintf(stderr, "%s checksum: %#08x (%s, size %zu)\n", lt.name.c_str(), sum, + LLAMA_LOG_INFO("%s checksum: %#08x (%s, size %zu)\n", lt.name.c_str(), sum, llama_format_tensor_shape(lt.ne).c_str(), lt.size); } @@ -864,7 +879,7 @@ static bool kv_cache_init( cache.ctx = ggml_init(params); if (!cache.ctx) { - fprintf(stderr, "%s: failed to allocate memory for kv cache\n", __func__); + LLAMA_LOG_ERROR("%s: failed to allocate memory for kv cache\n", __func__); return false; } @@ -1076,7 +1091,7 @@ static void llama_model_load_internal( LLAMA_ASSERT(hparams.n_head % n_gqa == 0); hparams.n_head_kv = hparams.n_head / n_gqa; if (model.type == e_model::MODEL_65B && n_gqa == 8) { - fprintf(stderr, "%s: warning: assuming 70B model based on GQA == %d\n", __func__, n_gqa); + LLAMA_LOG_WARN("%s: warning: assuming 70B model based on GQA == %d\n", __func__, n_gqa); model.type = e_model::MODEL_70B; hparams.f_ffn_mult = 1.3f; // from the params.json of the 70B model } @@ -1092,22 +1107,22 @@ static void llama_model_load_internal( //const uint32_t n_ff = 28672; { - fprintf(stderr, "%s: format = %s\n", __func__, llama_file_version_name(file_version)); - fprintf(stderr, "%s: n_vocab = %u\n", __func__, hparams.n_vocab); - fprintf(stderr, "%s: n_ctx = %u\n", __func__, hparams.n_ctx); - fprintf(stderr, "%s: n_embd = %u\n", __func__, hparams.n_embd); - fprintf(stderr, "%s: n_mult = %u\n", __func__, hparams.n_mult); - fprintf(stderr, "%s: n_head = %u\n", __func__, hparams.n_head); - fprintf(stderr, "%s: n_head_kv = %u\n", __func__, hparams.n_head_kv); - fprintf(stderr, "%s: n_layer = %u\n", __func__, hparams.n_layer); - fprintf(stderr, "%s: n_rot = %u\n", __func__, hparams.n_rot); // a.k.a. n_embd_head, n_head_dim - fprintf(stderr, "%s: n_gqa = %u\n", __func__, hparams.n_gqa()); - fprintf(stderr, "%s: rnorm_eps = %.1e\n", __func__, hparams.f_rms_norm_eps); - fprintf(stderr, "%s: n_ff = %u\n", __func__, n_ff); - fprintf(stderr, "%s: freq_base = %.1f\n", __func__, hparams.rope_freq_base); - fprintf(stderr, "%s: freq_scale = %g\n", __func__, hparams.rope_freq_scale); - fprintf(stderr, "%s: ftype = %u (%s)\n", __func__, hparams.ftype, llama_ftype_name(hparams.ftype)); - fprintf(stderr, "%s: model size = %s\n", __func__, llama_model_type_name(model.type)); + LLAMA_LOG_INFO("%s: format = %s\n", __func__, llama_file_version_name(file_version)); + LLAMA_LOG_INFO("%s: n_vocab = %u\n", __func__, hparams.n_vocab); + LLAMA_LOG_INFO("%s: n_ctx = %u\n", __func__, hparams.n_ctx); + LLAMA_LOG_INFO("%s: n_embd = %u\n", __func__, hparams.n_embd); + LLAMA_LOG_INFO("%s: n_mult = %u\n", __func__, hparams.n_mult); + LLAMA_LOG_INFO("%s: n_head = %u\n", __func__, hparams.n_head); + LLAMA_LOG_INFO("%s: n_head_kv = %u\n", __func__, hparams.n_head_kv); + LLAMA_LOG_INFO("%s: n_layer = %u\n", __func__, hparams.n_layer); + LLAMA_LOG_INFO("%s: n_rot = %u\n", __func__, hparams.n_rot); // a.k.a. n_embd_head, n_head_dim + LLAMA_LOG_INFO("%s: n_gqa = %u\n", __func__, hparams.n_gqa()); + LLAMA_LOG_INFO("%s: rnorm_eps = %.1e\n", __func__, hparams.f_rms_norm_eps); + LLAMA_LOG_INFO("%s: n_ff = %u\n", __func__, n_ff); + LLAMA_LOG_INFO("%s: freq_base = %.1f\n", __func__, hparams.rope_freq_base); + LLAMA_LOG_INFO("%s: freq_scale = %g\n", __func__, hparams.rope_freq_scale); + LLAMA_LOG_INFO("%s: ftype = %u (%s)\n", __func__, hparams.ftype, llama_ftype_name(hparams.ftype)); + LLAMA_LOG_INFO("%s: model size = %s\n", __func__, llama_model_type_name(model.type)); } if (file_version < LLAMA_FILE_VERSION_GGJT_V2) { @@ -1135,7 +1150,7 @@ static void llama_model_load_internal( size_t ctx_size; size_t mmapped_size; ml->calc_sizes(&ctx_size, &mmapped_size); - fprintf(stderr, "%s: ggml ctx size = %7.2f MB\n", __func__, ctx_size/1024.0/1024.0); + LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MB\n", __func__, ctx_size/1024.0/1024.0); // create the ggml context { @@ -1160,13 +1175,13 @@ static void llama_model_load_internal( (void) main_gpu; (void) mul_mat_q; #if defined(GGML_USE_CUBLAS) - fprintf(stderr, "%s: using CUDA for GPU acceleration\n", __func__); + LLAMA_LOG_INFO("%s: using CUDA for GPU acceleration\n", __func__); ggml_cuda_set_main_device(main_gpu); ggml_cuda_set_mul_mat_q(mul_mat_q); #define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_GPU #define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_GPU_SPLIT #elif defined(GGML_USE_CLBLAST) - fprintf(stderr, "%s: using OpenCL for GPU acceleration\n", __func__); + LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); #define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_GPU #define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_GPU #else @@ -1271,14 +1286,14 @@ static void llama_model_load_internal( const size_t mem_required_state = scale*hparams.kv_size(); - fprintf(stderr, "%s: mem required = %7.2f MB (+ %7.2f MB per state)\n", __func__, + LLAMA_LOG_INFO("%s: mem required = %7.2f MB (+ %7.2f MB per state)\n", __func__, mem_required / 1024.0 / 1024.0, mem_required_state / 1024.0 / 1024.0); (void) vram_scratch; (void) n_batch; #ifdef GGML_USE_CUBLAS if (low_vram) { - fprintf(stderr, "%s: not allocating a VRAM scratch buffer due to low VRAM option\n", __func__); + LLAMA_LOG_INFO("%s: not allocating a VRAM scratch buffer due to low VRAM option\n", __func__); ggml_cuda_set_scratch_size(0); // disable scratch } else { const size_t vram_scratch_base = VRAM_REQ_SCRATCH_BASE().at(model.type); @@ -1286,7 +1301,7 @@ static void llama_model_load_internal( vram_scratch = n_batch * (vram_scratch_base + n_ctx * vram_scratch_per_context); ggml_cuda_set_scratch_size(vram_scratch); if (n_gpu_layers > 0) { - fprintf(stderr, "%s: allocating batch_size x (%zd kB + n_ctx x %zd B) = %zd MB VRAM for the scratch buffer\n", + LLAMA_LOG_INFO("%s: allocating batch_size x (%zd kB + n_ctx x %zd B) = %zd MB VRAM for the scratch buffer\n", __func__, vram_scratch_base / kB, vram_scratch_per_context, (vram_scratch + MB - 1) / MB); // round up } @@ -1296,9 +1311,9 @@ static void llama_model_load_internal( #if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); - fprintf(stderr, "%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); + LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); if (n_gpu_layers > (int) hparams.n_layer) { - fprintf(stderr, "%s: offloading non-repeating layers to GPU\n", __func__); + LLAMA_LOG_INFO("%s: offloading non-repeating layers to GPU\n", __func__); } size_t vram_kv_cache = 0; @@ -1307,17 +1322,17 @@ static void llama_model_load_internal( const int max_offloadable_layers = low_vram ? hparams.n_layer + 1 : hparams.n_layer + 3; if (n_gpu_layers > (int) hparams.n_layer + 1) { if (low_vram) { - fprintf(stderr, "%s: cannot offload v cache to GPU due to low VRAM option\n", __func__); + LLAMA_LOG_INFO("%s: cannot offload v cache to GPU due to low VRAM option\n", __func__); } else { - fprintf(stderr, "%s: offloading v cache to GPU\n", __func__); + LLAMA_LOG_INFO("%s: offloading v cache to GPU\n", __func__); vram_kv_cache += hparams.kv_size() / 2; } } if (n_gpu_layers > (int) hparams.n_layer + 2) { if (low_vram) { - fprintf(stderr, "%s: cannot offload k cache to GPU due to low VRAM option\n", __func__); + LLAMA_LOG_WARN("%s: cannot offload k cache to GPU due to low VRAM option\n", __func__); } else { - fprintf(stderr, "%s: offloading k cache to GPU\n", __func__); + LLAMA_LOG_INFO("%s: offloading k cache to GPU\n", __func__); vram_kv_cache += hparams.kv_size() / 2; } } @@ -1326,9 +1341,9 @@ static void llama_model_load_internal( const int max_offloadable_layers = hparams.n_layer + 1; #endif // GGML_USE_CUBLAS - fprintf(stderr, "%s: offloaded %d/%d layers to GPU\n", + LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); - fprintf(stderr, "%s: total VRAM used: %zu MB\n", + LLAMA_LOG_INFO("%s: total VRAM used: %zu MB\n", __func__, (vram_weights + vram_scratch + vram_kv_cache + MB - 1) / MB); // round up #else (void) n_gpu_layers; @@ -1387,7 +1402,7 @@ static bool llama_model_load( use_mmap, use_mlock, vocab_only, progress_callback, progress_callback_user_data); return true; } catch (const std::exception & err) { - fprintf(stderr, "error loading model: %s\n", err.what()); + LLAMA_LOG_ERROR("error loading model: %s\n", err.what()); return false; } } @@ -1751,7 +1766,7 @@ static struct ggml_cgraph * llama_build_graph( } #if 0 - printf("\n%s: used_mem: eval ctx %.3f MB, scratch %.3f MB %.3f MB, work buf %.3f MB, n_past = %d, N = %d\n", __func__, + LLAMA_LOG_INFO("\n%s: used_mem: eval ctx %.3f MB, scratch %.3f MB %.3f MB, work buf %.3f MB, n_past = %d, N = %d\n", __func__, ggml_used_mem(ctx0)/1024.0/1024.0, lctx.get_buf_max_mem(0)/1024.0/1024.0, lctx.get_buf_max_mem(1)/1024.0/1024.0, @@ -1812,7 +1827,7 @@ static bool llama_eval_internal( ggml_allocr_alloc_graph(lctx.alloc, gf); #endif - // fprintf(stderr, "graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); + // LLAMA_LOG_INFO("graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); // for big prompts, if BLAS is enabled, it is better to use only one thread // otherwise, the threads are spin-lock waiting for the BLAS calls and are degrading the performance @@ -1999,7 +2014,7 @@ struct llama_tokenizer { left_sym.n += right_sym.n; right_sym.n = 0; - //printf("left = '%*s' size = %zu\n", (int) left_sym.n, left_sym.text, bigram.size); + //LLAMA_LOG_INFO("left = '%*s' size = %zu\n", (int) left_sym.n, left_sym.text, bigram.size); // remove the right sym from the chain left_sym.next = right_sym.next; @@ -3007,7 +3022,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s tensor.data = read_data.addr; model_loader->load_data_for(tensor); - printf("[%4zu/%4zu] %36s - %16s, type = %6s, ", + LLAMA_LOG_INFO("[%4zu/%4zu] %36s - %16s, type = %6s, ", ++idx, model_loader->tensors_map.tensors.size(), tensor.name.c_str(), llama_format_tensor_shape(tensor.ne).c_str(), ggml_type_name(tensor.type)); @@ -3029,7 +3044,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s new_type = tensor.type; new_data = tensor.data; new_size = tensor.size; - printf("size = %8.3f MB\n", tensor.size/1024.0/1024.0); + LLAMA_LOG_INFO("size = %8.3f MB\n", tensor.size/1024.0/1024.0); } else { new_type = quantized_type; #ifdef GGML_USE_K_QUANTS @@ -3064,17 +3079,17 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s int nx = tensor.ne.at(0); int ny = tensor.ne.at(1); if (nx % QK_K != 0 || ny % QK_K != 0) { - fprintf(stderr, "\n\nTensor sizes %d x %d are not divisible by %d, required for k-quants.\n",nx,ny,QK_K); + LLAMA_LOG_INFO("\n\nTensor sizes %d x %d are not divisible by %d, required for k-quants.\n",nx,ny,QK_K); convert_incompatible_tensor = true; } } if (convert_incompatible_tensor) { if (tensor.name == "output.weight") { new_type = GGML_TYPE_F16; //fall back to F16 instead of just failing. - fprintf(stderr, "F16 will be used for this tensor instead.\n"); + LLAMA_LOG_WARN("F16 will be used for this tensor instead.\n"); } else if (tensor.name == "tok_embeddings.weight") { new_type = GGML_TYPE_Q4_0; //fall back to Q4_0 instead of just failing. - fprintf(stderr, "Q4_0 will be used for this tensor instead.\n"); + LLAMA_LOG_WARN("Q4_0 will be used for this tensor instead.\n"); } else { throw std::runtime_error("Unsupported tensor size encountered\n"); } @@ -3094,7 +3109,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s f32_data = (float *) f32_conv_buf.addr; } - printf("quantizing to %s .. ", ggml_type_name(new_type)); + LLAMA_LOG_INFO("quantizing to %s .. ", ggml_type_name(new_type)); fflush(stdout); work.resize(nelements * 4); // upper bound on size @@ -3144,7 +3159,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } } - printf("size = %8.2f MB -> %8.2f MB | hist: ", tensor.size/1024.0/1024.0, new_size/1024.0/1024.0); + LLAMA_LOG_INFO("size = %8.2f MB -> %8.2f MB | hist: ", tensor.size/1024.0/1024.0, new_size/1024.0/1024.0); int64_t tot_count = 0; for (size_t i = 0; i < hist_cur.size(); i++) { hist_all[i] += hist_cur[i]; @@ -3153,18 +3168,18 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (tot_count > 0) { for (size_t i = 0; i < hist_cur.size(); i++) { - printf("%5.3f ", hist_cur[i] / float(nelements)); + LLAMA_LOG_INFO("%5.3f ", hist_cur[i] / float(nelements)); } } - printf("\n"); + LLAMA_LOG_INFO("\n"); } total_size_org += tensor.size; total_size_new += new_size; file_saver.write_tensor(tensor, new_type, new_data, new_size); } - printf("%s: model size = %8.2f MB\n", __func__, total_size_org/1024.0/1024.0); - printf("%s: quant size = %8.2f MB\n", __func__, total_size_new/1024.0/1024.0); + LLAMA_LOG_INFO("%s: model size = %8.2f MB\n", __func__, total_size_org/1024.0/1024.0); + LLAMA_LOG_INFO("%s: quant size = %8.2f MB\n", __func__, total_size_new/1024.0/1024.0); { int64_t sum_all = 0; @@ -3173,11 +3188,11 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } if (sum_all > 0) { - printf("%s: hist: ", __func__); + LLAMA_LOG_INFO("%s: hist: ", __func__); for (size_t i = 0; i < hist_all.size(); i++) { - printf("%5.3f ", hist_all[i] / float(sum_all)); + LLAMA_LOG_INFO("%5.3f ", hist_all[i] / float(sum_all)); } - printf("\n"); + LLAMA_LOG_INFO("\n"); } } } @@ -3201,8 +3216,8 @@ struct llama_model * llama_load_model_from_file( params.main_gpu, params.tensor_split, params.mul_mat_q, params.rope_freq_base, params.rope_freq_scale,params.low_vram, memory_type, params.use_mmap, params.use_mlock, params.vocab_only, params.progress_callback, params.progress_callback_user_data)) { + LLAMA_LOG_ERROR("%s: failed to load model\n", __func__); delete model; - fprintf(stderr, "%s: failed to load model\n", __func__); return nullptr; } @@ -3235,10 +3250,9 @@ struct llama_context * llama_new_context_with_model( unsigned percentage = (unsigned) (100 * progress); while (percentage > *cur_percentage_p) { *cur_percentage_p = percentage; - fprintf(stderr, "."); - fflush(stderr); + LLAMA_LOG_INFO("."); if (percentage >= 100) { - fprintf(stderr, "\n"); + LLAMA_LOG_INFO("\n"); } } }; @@ -3252,14 +3266,14 @@ struct llama_context * llama_new_context_with_model( // reserve memory for context buffers if (!params.vocab_only) { if (!kv_cache_init(ctx->model.hparams, ctx->kv_self, memory_type, ctx->model.hparams.n_ctx, params.n_gpu_layers)) { - fprintf(stderr, "%s: kv_cache_init() failed for self-attention cache\n", __func__); + LLAMA_LOG_ERROR("%s: kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; } { const size_t memory_size = ggml_nbytes(ctx->kv_self.k) + ggml_nbytes(ctx->kv_self.v); - fprintf(stderr, "%s: kv self size = %7.2f MB\n", __func__, memory_size / 1024.0 / 1024.0); + LLAMA_LOG_INFO("%s: kv self size = %7.2f MB\n", __func__, memory_size / 1024.0 / 1024.0); } const auto & hparams = ctx->model.hparams; @@ -3293,14 +3307,14 @@ struct llama_context * llama_new_context_with_model( // measure memory requirements for the graph size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf) + tensor_alignment; - fprintf(stderr, "%s: compute buffer total size = %7.2f MB\n", __func__, (ctx->buf_compute.size + alloc_size) / 1024.0 / 1024.0); + LLAMA_LOG_INFO("%s: compute buffer total size = %7.2f MB\n", __func__, (ctx->buf_compute.size + alloc_size) / 1024.0 / 1024.0); // debug - for comparison with scratch buffer //size_t prev_req = // MEM_REQ_SCRATCH0(hparams.n_ctx).at(ctx->model.type) + // MEM_REQ_SCRATCH1().at(ctx->model.type) + // MEM_REQ_EVAL().at(ctx->model.type); - //fprintf(stderr, "%s: (debug) equivalent with scratch buffer = %7.2f MB\n", __func__, prev_req / 1024.0 / 1024.0); + //LLAMA_LOG_INFO("%s: (debug) equivalent with scratch buffer = %7.2f MB\n", __func__, prev_req / 1024.0 / 1024.0); // recreate allocator with exact memory requirements ggml_allocr_free(ctx->alloc); @@ -3336,13 +3350,13 @@ struct llama_context * llama_new_context_with_model( const size_t max_size = ggml_get_max_tensor_size(ctx->model.ctx); - fprintf(stderr, "%s: max tensor size = %8.2f MB\n", __func__, max_size/1024.0/1024.0); + LLAMA_LOG_INFO("%s: max tensor size = %8.2f MB\n", __func__, max_size/1024.0/1024.0); -#define LLAMA_METAL_CHECK_BUF(result) \ - if (!(result)) { \ - fprintf(stderr, "%s: failed to add buffer\n", __func__); \ - llama_free(ctx); \ - return NULL; \ +#define LLAMA_METAL_CHECK_BUF(result) \ + if (!(result)) { \ + LLAMA_LOG_ERROR("%s: failed to add buffer\n", __func__); \ + llama_free(ctx); \ + return NULL; \ } LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "data", data_ptr, data_size, max_size)); @@ -3396,19 +3410,19 @@ int llama_model_quantize( llama_model_quantize_internal(fname_inp, fname_out, params); return 0; } catch (const std::exception & err) { - fprintf(stderr, "%s: failed to quantize: %s\n", __func__, err.what()); + LLAMA_LOG_ERROR("%s: failed to quantize: %s\n", __func__, err.what()); return 1; } } int llama_apply_lora_from_file_internal(const struct llama_model & model, const char * path_lora, const char * path_base_model, int n_threads) { - fprintf(stderr, "%s: applying lora adapter from '%s' - please wait ...\n", __func__, path_lora); + LLAMA_LOG_INFO("%s: applying lora adapter from '%s' - please wait ...\n", __func__, path_lora); const int64_t t_start_lora_us = ggml_time_us(); auto fin = std::ifstream(path_lora, std::ios::binary); if (!fin) { - fprintf(stderr, "%s: failed to open '%s'\n", __func__, path_lora); + LLAMA_LOG_ERROR("%s: failed to open '%s'\n", __func__, path_lora); return 1; } @@ -3417,14 +3431,14 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const uint32_t magic; fin.read((char *) &magic, sizeof(magic)); if (magic != LLAMA_FILE_MAGIC_GGLA) { - fprintf(stderr, "%s: bad file magic\n", __func__); + LLAMA_LOG_ERROR("%s: bad file magic\n", __func__); return 1; } uint32_t format_version; fin.read((char *) &format_version, sizeof(format_version)); if (format_version != 1) { - fprintf(stderr, "%s: unsupported file version\n", __func__ ); + LLAMA_LOG_ERROR("%s: unsupported file version\n", __func__ ); return 1; } } @@ -3435,7 +3449,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const fin.read((char *) &lora_alpha, sizeof(lora_alpha)); float scaling = (float)lora_alpha / (float)lora_r; - fprintf(stderr, "%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); + LLAMA_LOG_INFO("%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); // create a temporary ggml context to store the lora tensors @@ -3461,7 +3475,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const ggml_context * base_ctx = NULL; llama_buffer base_buf; if (path_base_model) { - fprintf(stderr, "%s: loading base model from '%s'\n", __func__, path_base_model); + LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); model_loader.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true)); size_t ctx_size; @@ -3518,17 +3532,17 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const const std::string lora_suffix = ".lora"; size_t pos = name.rfind(lora_suffix); if (pos == std::string::npos) { - fprintf(stderr, "%s: error: '%s' is not a lora tensor\n", __func__, name.c_str()); + LLAMA_LOG_ERROR("%s: error: '%s' is not a lora tensor\n", __func__, name.c_str()); return 1; } std::string lora_type = name.substr(pos + lora_suffix.length()); std::string base_name = name; base_name.erase(pos); - // fprintf(stderr, "%s: %s => %s (lora type %s) ", __func__, name.c_str(),base_name.c_str(), lora_type.c_str()); + // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(),base_name.c_str(), lora_type.c_str()); if (model_tensors.find(base_name) == model_tensors.end()) { - fprintf(stderr, "%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); + LLAMA_LOG_ERROR("%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); return 1; } @@ -3539,7 +3553,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const case 1: wtype = GGML_TYPE_F16; break; default: { - fprintf(stderr, "%s: invalid tensor data type '%d'\n", + LLAMA_LOG_ERROR("%s: invalid tensor data type '%d'\n", __func__, ftype); return false; } @@ -3549,7 +3563,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const lora_tensor = ggml_new_tensor_2d(lora_ctx, wtype, ne[0], ne[1]); } else { - fprintf(stderr, "%s: unsupported tensor dimension %d\n", __func__, n_dims); + LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); return 1; } ggml_set_name(lora_tensor, "lora_tensor"); @@ -3587,7 +3601,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const if (model_loader) { // load from base model if (model_loader->tensors_map.name_to_idx.find(base_name) == model_loader->tensors_map.name_to_idx.end()) { - fprintf(stderr, "%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); + LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); return 1; } size_t idx = model_loader->tensors_map.name_to_idx[base_name]; @@ -3603,8 +3617,8 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const if (ggml_is_quantized(base_t->type)) { if (!warned) { - fprintf(stderr, "%s: warning: using a lora adapter with a quantized model may result in poor quality, " - "use a f16 or f32 base model with --lora-base\n", __func__); + LLAMA_LOG_WARN("%s: warning: using a lora adapter with a quantized model may result in poor quality, " + "use a f16 or f32 base model with --lora-base\n", __func__); warned = true; } } @@ -3618,8 +3632,8 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const ggml_set_name(loraB, "loraB"); if (base_t->ne[0] != loraA->ne[1] || base_t->ne[1] != loraB->ne[1]) { - fprintf(stderr, "%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" - " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); + LLAMA_LOG_ERROR("%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" + " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); return 1; } @@ -3664,7 +3678,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const n_tensors++; if (n_tensors % 4 == 0) { - fprintf(stderr, "."); + LLAMA_LOG_INFO("."); } } } @@ -3676,7 +3690,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const } const int64_t t_lora_us = ggml_time_us() - t_start_lora_us; - fprintf(stderr, " done (%.2f ms)\n", t_lora_us / 1000.0); + LLAMA_LOG_INFO(" done (%.2f ms)\n", t_lora_us / 1000.0); return 0; } @@ -3685,7 +3699,7 @@ int llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lor try { return llama_apply_lora_from_file_internal(ctx->model, path_lora, path_base_model, n_threads); } catch (const std::exception & err) { - fprintf(stderr, "%s: failed to apply lora adapter: %s\n", __func__, err.what()); + LLAMA_LOG_ERROR("%s: failed to apply lora adapter: %s\n", __func__, err.what()); return 1; } } @@ -3694,7 +3708,7 @@ int llama_model_apply_lora_from_file(const struct llama_model * model, const cha try { return llama_apply_lora_from_file_internal(*model, path_lora, path_base_model, n_threads); } catch (const std::exception & err) { - fprintf(stderr, "%s: failed to apply lora adapter: %s\n", __func__, err.what()); + LLAMA_LOG_ERROR("%s: failed to apply lora adapter: %s\n", __func__, err.what()); return 1; } } @@ -3976,7 +3990,7 @@ static bool llama_load_session_file_internal(struct llama_context * ctx, const c const uint32_t version = file.read_u32(); if (magic != LLAMA_SESSION_MAGIC || version != LLAMA_SESSION_VERSION) { - fprintf(stderr, "%s : unknown (magic, version) for session file: %08x, %08x\n", __func__, magic, version); + LLAMA_LOG_ERROR("%s : unknown (magic, version) for session file: %08x, %08x\n", __func__, magic, version); return false; } @@ -3984,7 +3998,7 @@ static bool llama_load_session_file_internal(struct llama_context * ctx, const c file.read_raw(&session_hparams, sizeof(llama_hparams)); if (session_hparams != ctx->model.hparams) { - fprintf(stderr, "%s : model hparams didn't match from session file!\n", __func__); + LLAMA_LOG_INFO("%s : model hparams didn't match from session file!\n", __func__); return false; } } @@ -3994,7 +4008,7 @@ static bool llama_load_session_file_internal(struct llama_context * ctx, const c const uint32_t n_token_count = file.read_u32(); if (n_token_count > n_token_capacity) { - fprintf(stderr, "%s : token count in session file exceeded capacity! %u > %zu\n", __func__, n_token_count, n_token_capacity); + LLAMA_LOG_ERROR("%s : token count in session file exceeded capacity! %u > %zu\n", __func__, n_token_count, n_token_capacity); return false; } @@ -4008,7 +4022,7 @@ static bool llama_load_session_file_internal(struct llama_context * ctx, const c const size_t n_state_size_max = llama_get_state_size(ctx); if (n_state_size_cur > n_state_size_max) { - fprintf(stderr, "%s : the state size in session file is too big! max %zu, got %zu\n", __func__, n_state_size_max, n_state_size_cur); + LLAMA_LOG_ERROR("%s : the state size in session file is too big! max %zu, got %zu\n", __func__, n_state_size_max, n_state_size_cur); return false; } @@ -4025,7 +4039,7 @@ bool llama_load_session_file(struct llama_context * ctx, const char * path_sessi try { return llama_load_session_file_internal(ctx, path_session, tokens_out, n_token_capacity, n_token_count_out); } catch (const std::exception & err) { - fprintf(stderr, "error loading session file: %s\n", err.what()); + LLAMA_LOG_ERROR("error loading session file: %s\n", err.what()); return false; } } @@ -4056,7 +4070,7 @@ int llama_eval( int n_past, int n_threads) { if (!llama_eval_internal(*ctx, tokens, nullptr, n_tokens, n_past, n_threads, nullptr)) { - fprintf(stderr, "%s: failed to eval\n", __func__); + LLAMA_LOG_ERROR("%s: failed to eval\n", __func__); return 1; } @@ -4078,7 +4092,7 @@ int llama_eval_embd( int n_past, int n_threads) { if (!llama_eval_internal(*ctx, nullptr, embd, n_tokens, n_past, n_threads, nullptr)) { - fprintf(stderr, "%s: failed to eval\n", __func__); + LLAMA_LOG_ERROR("%s: failed to eval\n", __func__); return 1; } @@ -4099,7 +4113,7 @@ int llama_eval_export(struct llama_context * ctx, const char * fname) { const std::vector tmp(n_batch, llama_token_bos()); if (!llama_eval_internal(*ctx, tmp.data(), nullptr, tmp.size(), n_ctx, 1, fname)) { - fprintf(stderr, "%s: failed to eval\n", __func__); + LLAMA_LOG_ERROR("%s: failed to eval\n", __func__); return 1; } @@ -4115,7 +4129,7 @@ int llama_tokenize_with_model( auto res = llama_tokenize(model->vocab, text, add_bos); if (n_max_tokens < (int) res.size()) { - fprintf(stderr, "%s: too many tokens\n", __func__); + LLAMA_LOG_ERROR("%s: too many tokens\n", __func__); return -((int) res.size()); } @@ -4232,15 +4246,15 @@ struct llama_timings llama_get_timings(struct llama_context * ctx) { void llama_print_timings(struct llama_context * ctx) { const llama_timings timings = llama_get_timings(ctx); - fprintf(stderr, "\n"); - fprintf(stderr, "%s: load time = %8.2f ms\n", __func__, timings.t_load_ms); - fprintf(stderr, "%s: sample time = %8.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", + LLAMA_LOG_INFO("\n"); + LLAMA_LOG_INFO("%s: load time = %8.2f ms\n", __func__, timings.t_load_ms); + LLAMA_LOG_INFO("%s: sample time = %8.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", __func__, timings.t_sample_ms, timings.n_sample, timings.t_sample_ms / timings.n_sample, 1e3 / timings.t_sample_ms * timings.n_sample); - fprintf(stderr, "%s: prompt eval time = %8.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", + LLAMA_LOG_INFO("%s: prompt eval time = %8.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", __func__, timings.t_p_eval_ms, timings.n_p_eval, timings.t_p_eval_ms / timings.n_p_eval, 1e3 / timings.t_p_eval_ms * timings.n_p_eval); - fprintf(stderr, "%s: eval time = %8.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", + LLAMA_LOG_INFO("%s: eval time = %8.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", __func__, timings.t_eval_ms, timings.n_eval, timings.t_eval_ms / timings.n_eval, 1e3 / timings.t_eval_ms * timings.n_eval); - fprintf(stderr, "%s: total time = %8.2f ms\n", __func__, (timings.t_end_ms - timings.t_start_ms)); + LLAMA_LOG_INFO("%s: total time = %8.2f ms\n", __func__, (timings.t_end_ms - timings.t_start_ms)); } void llama_reset_timings(struct llama_context * ctx) { @@ -4276,3 +4290,44 @@ const char * llama_print_system_info(void) { const std::vector>& llama_internal_get_tensor_map(struct llama_context * ctx) { return ctx->model.tensors_by_name; } + + +void llama_log_set(llama_log_callback log_callback, void * user_data) { + g_state.log_callback = log_callback ? log_callback : llama_log_callback_default; + g_state.log_callback_user_data = user_data; +} + +#if defined(_MSC_VER) && !defined(vsnprintf) +#define vsnprintf _vsnprintf +#endif + +static void llama_log_internal_v(llama_log_level level, const char * format, va_list args) { + va_list args_copy; + va_copy(args_copy, args); + char buffer[128]; + int len = vsnprintf(buffer, 128, format, args); + if (len < 128) { + g_state.log_callback(level, buffer, g_state.log_callback_user_data); + } else { + char* buffer2 = new char[len+1]; + vsnprintf(buffer2, len+1, format, args_copy); + buffer2[len] = 0; + g_state.log_callback(level, buffer2, g_state.log_callback_user_data); + delete[] buffer2; + } + va_end(args_copy); +} + +static void llama_log_internal(llama_log_level level, const char * format, ...) { + va_list args; + va_start(args, format); + llama_log_internal_v(level, format, args); + va_end(args); +} + +static void llama_log_callback_default(llama_log_level level, const char * text, void * user_data) { + (void) level; + (void) user_data; + fputs(text, stderr); + fflush(stderr); +} diff --git a/llama.h b/llama.h index fa1977f2d..d237bcc54 100644 --- a/llama.h +++ b/llama.h @@ -86,7 +86,20 @@ extern "C" { typedef void (*llama_progress_callback)(float progress, void *ctx); - struct llama_context_params { + enum llama_log_level { + LLAMA_LOG_LEVEL_ERROR = 2, + LLAMA_LOG_LEVEL_WARN = 3, + LLAMA_LOG_LEVEL_INFO = 4 + }; + + // Signature for logging events + // Note that text includes the new line character at the end for most events. + // If your logging mechanism cannot handle that, check if the last character is '\n' and strip it + // if it exists. + // It might not exist for progress report where '.' is output repeatedly. + typedef void (*llama_log_callback)(llama_log_level level, const char * text, void * user_data); + + struct llama_context_params { uint32_t seed; // RNG seed, -1 for random int32_t n_ctx; // text context int32_t n_batch; // prompt processing batch size @@ -195,6 +208,10 @@ extern "C" { int32_t n_eval; }; + // Set callback for all future logging events. + // If this is not called, or NULL is supplied, everything is output on stderr. + LLAMA_API void llama_log_set(llama_log_callback log_callback, void * user_data); + LLAMA_API int llama_max_devices(); LLAMA_API struct llama_context_params llama_context_default_params(); From 916a9acdd0a411426690400ebe2bb7ce840a6bba Mon Sep 17 00:00:00 2001 From: Sam Spilsbury Date: Wed, 9 Aug 2023 23:47:42 +0300 Subject: [PATCH 12/87] ggml-alloc: Don't try to re-use buffers of external tensors (#2562) * ggml-alloc: Don't try to re-use buffers of external tensors They might be weights that came from another context, so we have no control over them (and they might be re-used elsewhere so writing to them would be a bad idea). * ggml-alloc: >= when checking for out-of-bounds Co-authored-by: slaren --------- Co-authored-by: slaren --- ggml-alloc.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ggml-alloc.c b/ggml-alloc.c index 5e1be61ff..4121f3dba 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -394,6 +394,14 @@ static void allocate_node(struct ggml_allocr * alloc, struct ggml_tensor * node) if (parent == NULL) { break; } + + // if the node's data is external, then we cannot re-use it + if ((char *) parent->data < (char *) alloc->data || + (char *) parent->data >= ((char *) alloc->data + alloc->size)) { + AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); + continue; + } + struct hash_node * p_hn = hash_get(ht, parent); if (parent->data != NULL && p_hn->n_children == 1 && p_hn->n_views == 0 && ggml_are_same_layout(node, parent)) { if (ggml_is_view(parent)) { From 1638757767072a4957f52b9e3594f0b67610631b Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Thu, 10 Aug 2023 12:16:38 +0200 Subject: [PATCH 13/87] Fix grammar-based sampling issue in server (#2566) --- examples/server/server.cpp | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 10ae264f5..637f6d6c2 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -196,6 +196,7 @@ struct llama_server_context llama_context *ctx = nullptr; gpt_params params; + grammar_parser::parse_state parsed_grammar; llama_grammar *grammar = nullptr; bool truncated = false; @@ -241,10 +242,13 @@ struct llama_server_context stopped_limit = false; stopping_word = ""; multibyte_pending = 0; - grammar = nullptr; - n_remain = 0; n_past = 0; + + if (grammar != nullptr) { + llama_grammar_free(grammar); + grammar = nullptr; + } } bool loadModel(const gpt_params ¶ms_) @@ -265,8 +269,6 @@ struct llama_server_context bool loadGrammar() { if (!params.grammar.empty()) { - grammar_parser::parse_state parsed_grammar; - parsed_grammar = grammar_parser::parse(params.grammar.c_str()); // will be empty (default) if there are parse errors if (parsed_grammar.rules.empty()) { From 1c4d8bf98145e2a4c0955e154d74aa579c6d19ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 16:52:08 +0300 Subject: [PATCH 14/87] gguf : start implementing libllama in GGUF (WIP) --- Makefile | 8 +- examples/gguf/gguf-llama-simple.cpp | 182 ++ gguf-llama-simple | Bin 0 -> 607488 bytes gguf-llama.cpp | 4060 +++++++++++++++++++++++++++ gguf-llama.h | 468 +++ 5 files changed, 4717 insertions(+), 1 deletion(-) create mode 100644 examples/gguf/gguf-llama-simple.cpp create mode 100644 gguf-llama-simple create mode 100644 gguf-llama.cpp create mode 100644 gguf-llama.h diff --git a/Makefile b/Makefile index a3600e4f2..f5922c95d 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ # Define the default target now so that it is always the first target -BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch simple server embd-input-test gguf gptneox-main +BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch simple server embd-input-test gguf gguf-llama-simple gptneox-main # Binaries only useful for tests TEST_TARGETS = tests/test-double-float tests/test-grad0 tests/test-opt tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 @@ -337,6 +337,9 @@ OBJS += ggml-alloc.o llama.o: llama.cpp ggml.h ggml-alloc.h ggml-cuda.h ggml-metal.h llama.h llama-util.h $(CXX) $(CXXFLAGS) -c $< -o $@ +gguf-llama.o: gguf-llama.cpp ggml.h ggml-alloc.h ggml-cuda.h ggml-metal.h gguf-llama.h gguf-util.h + $(CXX) $(CXXFLAGS) -c $< -o $@ + common.o: examples/common.cpp examples/common.h $(CXX) $(CXXFLAGS) -c $< -o $@ @@ -393,6 +396,9 @@ embd-input-test: $(LIB_PRE)embdinput$(DSO_EXT) examples/embd-input/embd-input-te gguf: examples/gguf/gguf.cpp build-info.h ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +gguf-llama-simple: examples/gguf/gguf-llama-simple.cpp build-info.h ggml.o gguf-llama.o common.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + gptneox-main: gptneox-main.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) diff --git a/examples/gguf/gguf-llama-simple.cpp b/examples/gguf/gguf-llama-simple.cpp new file mode 100644 index 000000000..35c3c8183 --- /dev/null +++ b/examples/gguf/gguf-llama-simple.cpp @@ -0,0 +1,182 @@ +#ifndef _GNU_SOURCE +#define _GNU_SOURCE +#endif + +#include "common.h" +#include "gguf-llama.h" +#include "build-info.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) +#include +#include +#elif defined (_WIN32) +#define WIN32_LEAN_AND_MEAN +#define NOMINMAX +#include +#include +#endif + + + +int main(int argc, char ** argv) +{ + gpt_params params; + + //--------------------------------- + // Print help : + //--------------------------------- + + if ( argc == 1 || argv[1][0] == '-' ) + { + printf( "usage: %s MODEL_PATH [PROMPT]\n" , argv[0] ); + return 1 ; + } + + //--------------------------------- + // Load parameters : + //--------------------------------- + + if ( argc >= 2 ) + { + params.model = argv[1]; + } + + if ( argc >= 3 ) + { + params.prompt = argv[2]; + } + + if ( params.prompt.empty() ) + { + params.prompt = "Hello my name is"; + } + + //--------------------------------- + // Init LLM : + //--------------------------------- + + llama_backend_init(params.numa); + + llama_context_params ctx_params = llama_context_default_params(); + + llama_model * model = llama_load_model_from_file(params.model.c_str(), ctx_params); + + if ( model == NULL ) + { + fprintf( stderr , "%s: error: unable to load model\n" , __func__ ); + return 1; + } + + llama_context * ctx = llama_new_context_with_model(model, ctx_params); + + //--------------------------------- + // Tokenize the prompt : + //--------------------------------- + + std::vector tokens_list; + tokens_list = ::llama_tokenize( ctx , params.prompt , true ); + + const int max_context_size = llama_n_ctx( ctx ); + const int max_tokens_list_size = max_context_size - 4 ; + + if ( (int)tokens_list.size() > max_tokens_list_size ) + { + fprintf( stderr , "%s: error: prompt too long (%d tokens, max %d)\n" , + __func__ , (int)tokens_list.size() , max_tokens_list_size ); + return 1; + } + + fprintf( stderr, "\n\n" ); + + // Print the tokens from the prompt : + + for( auto id : tokens_list ) + { + printf( "%s" , llama_token_to_str( ctx , id ) ); + } + + fflush(stdout); + + + //--------------------------------- + // Main prediction loop : + //--------------------------------- + + // The LLM keeps a contextual cache memory of previous token evaluation. + // Usually, once this cache is full, it is required to recompute a compressed context based on previous + // tokens (see "infinite text generation via context swapping" in the main example), but in this minimalist + // example, we will just stop the loop once this cache is full or once an end of stream is detected. + + while ( llama_get_kv_cache_token_count( ctx ) < max_context_size ) + { + //--------------------------------- + // Evaluate the tokens : + //--------------------------------- + + if ( llama_eval( ctx , tokens_list.data() , int(tokens_list.size()) , llama_get_kv_cache_token_count( ctx ) , params.n_threads ) ) + { + fprintf( stderr, "%s : failed to eval\n" , __func__ ); + return 1; + } + + tokens_list.clear(); + + //--------------------------------- + // Select the best prediction : + //--------------------------------- + + llama_token new_token_id = 0; + + auto logits = llama_get_logits( ctx ); + auto n_vocab = llama_n_vocab( ctx ); // the size of the LLM vocabulary (in tokens) + + std::vector candidates; + candidates.reserve( n_vocab ); + + for( llama_token token_id = 0 ; token_id < n_vocab ; token_id++ ) + { + candidates.emplace_back( llama_token_data{ token_id , logits[ token_id ] , 0.0f } ); + } + + llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; + + // Select it using the "Greedy sampling" method : + new_token_id = llama_sample_token_greedy( ctx , &candidates_p ); + + + // is it an end of stream ? + if ( new_token_id == llama_token_eos() ) + { + fprintf(stderr, " [end of text]\n"); + break; + } + + // Print the new token : + printf( "%s" , llama_token_to_str( ctx , new_token_id ) ); + fflush( stdout ); + + // Push this new token for next evaluation : + tokens_list.push_back( new_token_id ); + + } // wend of main loop + + llama_free( ctx ); + llama_free_model( model ); + + llama_backend_free(); + + return 0; +} + +// EOF diff --git a/gguf-llama-simple b/gguf-llama-simple new file mode 100644 index 0000000000000000000000000000000000000000..d7600282e3315a4bc805a4f6c01550d58bdb80a5 GIT binary patch literal 607488 zcmeFa3tUxI7C(N#i=t`w`YL+giW;0=;1e?IA`j2ii;|LQTA&C*ArM>?O%uCNo^E%d zvKectvB%8xm>Hd>2lOyd5wsUtQ&v-wf8wJPxGWlMnow3PcP+ag($~Nj=01H*SNkSrzkEZQ8@*_&r{A- zdIPoLze`*-rv;Jxf}TbTzFtV_zNf`?b9%Pm(^I{Ml+t}xKb;f!6+KlPoTmGtD4WS? zb9&L2)DlI}Q~5sjJXarnqbcn_h}TR{<@=~a$925Lj=b(^j-3}5UD%&zL_J9ybYJ|9 zoIid+_tYumrKkKVmkHH#lelh9ow!CnPWC%2!^|(e^XR_lfAIF1)1pEA20hhfe=Y9g z5)%cUAQ8<|ri$lT$)%$oFGx?hbQR@_qQbeOM_y4h@3Nx8;)=@4DicRvHhSdc{dn)H!RJSMGZc(3oP)tw!Pg}_6EsN(S!4P^X$A)(GB)3Ht?Hvtg z(Nj6Ez_5D4>n+NeZBhSgEy_=YetRnaJPZ3-Y$4|lE%*;u;4TaKKWBkwqo4Lv&r*x_ z_O;OadJFzHEbO__LhnCW$lnh+_oVj^E$WkHVL#Vd)bmUW{7sAcJZ@n>KUm-!2J|fF z&n@s_7W$2~z}Hy#f#<+~K@a}tVT<-UEXsFT)U&TeIk^_N(?Tz!E$m^ZMLFMC;8QH@ z;c*N8wHE%cw?#cQi++8X1^@jPa%-}v&ms%_Hj94RYJqRH(Bn1>{1S`jeq@0^41dy7 zf4s$lzn6vFR$1`xwD22mSm^g`i+;DoLO!+V*FDw0z``DE7IN#bsOL=<{&t^5eb!mX zA>IOCW`Vy9yr*`(WFh}YEc#uY#kdw>@!a7S_J67cKE|S)D=g&ZwBY~F0&lbMhXXC} z6pM0JL+?HL&-X0&&$B51MGL$7(Za3zUdmifJ#DDY8wHEcc z+Ct7lE%@KIsLyzd`UEWG-_N4_Wfp#HwuK#zx6tF67Uh2qKhu+6`pSYI#rG)Bu@?AS z7V_+{cjO=-NWqI=p%e{GJSs5usCB=DJbLJN1 z@oPO^DfJeV<;|Iwor~XIFlOt(EqRM|P={_VDJ)j9Z z5!vpN^6V+4dBqD#=Aj-pOrCU{JCAu7FKA3|$%4{3WqE0tlX7`QM;Ddk&MC@EO-Ne^ zV48P$e0DZkT(T&;D6e?Fw;(&OtgNJrvrqGuq24_-WGdB9G+4AZD?2MbJ8jzN?5Wd6 zK=e0cO`qmX%B#%HEAoy>jKU+hy>NgeA74>icxy!-6kq0^JS}^4_B1anfv$|4mzO`MqR0!YD$4WbxuLMM zw9NE{3)7e^lVD{F3X8!^vPD%#^FoKm!w%*xf*!JSONw(V%F6PJa~CrOOrBO)R-QJs zkn<-NLgkQaUg~gmHOp-hVpbN_;v`Isl~*{bJTLE-`KbBG{L+$gcS1sT_5$=6R5061 zRVKq7QC3mxEnJYt6@Nk#wGc8&gH6xLo?n5MX3xpJwW6?$8SS(Z(qdt8d0v^a>!q|4@-B!%2a#1ZH5eB_zuI$hrgaK7l3*(@)BtIv3qDFHh7jdrtAZ?6SPMbBg8^ z=jP4Lgme-OmSXfmrkiQ;uyN6ZlhOqqhiC?sKcHW@fZvHZd!G&*feVR?2594GY`*1Oq2!9S*~Ty04gvfw?$4caAr=fRz~@ipm#bz+S*T z61hh5gC>jZ?r-LnlzG7?3*sboKe{Zhyl@HBPUGXeqD<}@{-4l1YjeVAS&q4L%DuX* zxhE~dK!fpsw@nNuVn7o8?muXAVrfY!^p<;zp;)Fs8WyAxJ9~a*Wp-&^S$Rn@9Fuo( z_QH5nBMU>KKCbB_Bd?uFLE-Kwsy&isOU(8D2UVUkEj)!WJmAl=buqGKA2aD1o{a&; zV#+nGkoktZN=)K%#rRb?l?`C9Na(JFk2d-McY?{1;C366rA!*k$g$6x!z_>Hc%yQu zr=a^`yvWF2R5&lM7{wQQx#+-Owb4|5?`B!~9%sfEcX*gHd#(c@Whn^5pX^#+tOSs|io(c@3GsF}0T5 zxHM;)mj~sr$qaf+4^t_~9~Y8z7g$s>AA?VL0m2H;r}(0GMv_-a+_#vZ zied(${9N9a{QRPd@&YBFMq5;`ys$ERLEZwm-TB4J0>O>?7ZoY+H^sT7iyU$78m9f%~SF*-6>bFI>gPy zgax7KQ5YF23%v@egqAA#w5m`RutzK`FD}L6m4xq|2Mbm5sZPX6mCQ#yFnF;CR^W+x z(O8t1QM;%HWOv0SO8%m)jXyiSfk}D6H-#L zM?mKjJnr!+*~2d%o)pfG31=@KW#mU(K9c8W%)p%^FCQ*$7G#3BOD7Y%Gmq{@CM0wt zla!PhGm^)ksUH)9~SP89a}M==DH0+>V5i6mf}Edf{9`B>qJq*IVg> z-}J9H`$tzJk>hkU%SS7H#T|t9WA_qYFD07&r?Mh(?xvs8pV586-y3%fK-x<=m0gWe z>`19>`F|wtIfY%P68MvO$-MO5IJ81_8EEkIMvSPpLGjY59+9kugGCt*&{T&=We9## z8jXK8ly(ZMGeTU-P@t!=tJFSfStPDg9ds!XML1z3Ed&(t6M=szh_&hQyv&OiBa%5( znN0w@-b*=vm>ZEU{MYXLEgpB8G7kV@cQM+blnA=w%tAzR`zlN5B7P$9tQ=8(JZSG3 zbbW|&XE%PE#D5Cz;bk`}9mhV(K|$Zj?uXP6L_*pjRYWg68szG~}V5XTn3+Plq@! z79-9%>G--C{JPVGw+TGXgs&6js3ts~4v64qh6!)Y6OvpD{=33r{t@dgu~`2u-0MQJhN@h@|{)r7Bk zgyTC*xMMIj2JjO!;cZQvzr%!QZsB;R2`?4TRpxisd(Kc^zQcq&TX{J#CcNq+jyp|w zt$1#n32%tuUmfftzYr2;QC;VT4Q zX~ORmc$EqF3w)&suN8Q$34c)Ft4;XB0$*do{~++EP54s+UuVLf5qO;m|BJvIO!zv1 zx0>+9)4BY2nD7?_f13%f6L`>szb5bw6W%EBP7~fFaHX)jooo?!vxN5==2|Uw;9~Jlv6W;Geu9qAW9wYDq z6Fx-Xr6&9wfmfRFO9WnJ!bb>vr3t@E;I$@vyueqR@N|K%G2s&h{HZ!qEW1m0r8iv`|l!WRjAhY7C{c$*2oTi`(xzFOcNCj8F=?=<1-1g_lN-Tv1L zJlceB6u85Lzb)_>6aJpSohJMvfybHfodSwhfe9Za@KO_=An-~PK33pWCOlo>D^2)Rf!CVwn*_evgx@UiH70zS zz#B~Xg92|c;V%fh)r3DR@EsO-n+bnR@CQx!O9JmO;jak1(}b@VxN=K(yWJr0XcN9! z;0_c1rodxN_;!IiP51`_k2B%_5_r4`-zD%w6aKBhT_*gXz*Q3-l`ZVngr6qx876#? zz;jIaxdJaR;a3Q})B>+G;Uffpl?fjs@RcS!Mc}n2+%53cCj44~uQB0c1^%=NpDplp zCValY>rA*;;0-3cO5iOf{62xVn(&7NzQcq+EATcG{+hspCcH)99VYxEfp?nlodQ>i zy4(L=fk&J0Ljrf0a3zP^Rg4KgMc_^oewx7JO!)Z%k2m3$3Ovz-Uny{x2~QWeYQkp< zJkx~d34DeLFBf=@2~QSyfeBwK@KO`*6L_Tw4+y-qI#Y6W%ItrwLyt%Fpz6w?mz7 z5YMI6JiVJ`{D^YQxZZBUr{h1x#OOz_uNeK2_MWM z_B%(y=^Zour$EBb&_Sf768@NkS4ud&r>6f@NqC$NB3&urDH2{Q;gcnNwS;F&_!vmGB&)chZZD7g*p8QaSfa<+Mon0}|dU;SWmq4hjFAgttlf zY6%ZY_(KxjA>j{8_zbE3%OpIel-m#WDr&R-3wtlr<7QRKlN@@Y5uGorIq*;dK%oBjF7a zF8i?-2|rWfZ+eL<nlPT>t5maHkGZdTZEj&zJCM z3BN$X9TI+_gvUsDtb{uy{2~dDlkkfrJYK?oCEumS0(%s3D1=9OC@}U zgkL7%ITC)kgcnHo6%t-5;qek)Dd7ncUM1nfC48lXkC5H!s{e_wuCoG_>B_YBH=elc&mixNcav3 zpCjRI5(&6D53sgu5iXSi)5aFOl#}2``oK84`Z0gy%?jnS>Wec)5g^O1M|TDC*k)>c)Wz)C*g?_zDmMf68>8W|6lfh68N74{wIO|N#K7H_@4y+|0{v- z?dKj*ecwi_{;0p77pQVWRsIoyq9?iS9?Vi_v!zeG1X>jJ|{D{zN+&y@coiL^~K=Ml^*cLkgpB zCfZJP$58+m%_W*bk)bw5-$3+0qFWg~m1qh*h8h?>k?6rhuVeIhqK6Q@hS66NO`*q7 zEu)7MO(Dln6{CMmG=&;NrHsCSXbLfgau|Ik(Pt2y$>>2uQ%Eu7Vst;EDU=wBXLKad z6haI+8GYnj&=fihIT*d4XbKsI6h?nTG=&O79Y0a~cMt&HAI^aVsW zFnTM|7ZSaW(Hn@S5MXExqhBHVBBE;<{XEeZ6J5pVXNmq5(WQ)jg6KG+a~SNLUExgM*o^< z3c-a+8GQlK6nYEgF#1fQDdZN)Wb`1SDbyBnF}feo6k-d-GdhxJ3ay2lj6QM>XbP!? z9E{#iG=y`N|bNreCYLYa&nL^OqRLM}%4Bbq`u zp?F3|5>26-kdx6z&H}xRXa}SB6MZ|;3ZuUvnnF6EjzdiUL{|~r#^{fUUO{v#qqh@% z2hk0T-b%EO=yiJJ;|^8(JlF>R ztop|dI~`t!=wW z)pn`A-5r@(ovjt_9_G4J|`42Wr*8xM5UKAnF22k~+OZF_oH3kHA!_ zZ&#cByeg{4Mv;R5@R`IvYCk2zj;dOR+PL=`wXw688nId3cGNor1zaWyi0*1bQ{XN2 z`>B3a<6gwQO2sADslIU!4@FkhzV!BoC~gNHtjDk&oCj5HjtYQ|xDoeuyFauD7e#%p zOTIpNdh&J2S<}=N-^TQ!qQ2LDQUkBW22tx^1}0*@pCY_{ReyPO)zz0(j8Xj~(1we? z>a8R+1kXL#)%AJ=7>ygw?u$5CP+hT!%9@otJNd@so079{-01r$a_NP{RR{Lq#qg}Y zpL&%IM6nJ+I|(KKs|Frngz9@O7O!Zbm3GMLpZGd6X?!nKWq;|l*uf|~IOld7d+r*l z=x%(7O7-*NReyDC8$eY{jdeBs=%|h*7G&;;ZD5&!o>*toj{|M7O`VH^H@{_5+}dWS zt0ESX_YHws{E2wzt@O~qP(KnhwHGSKW{4+Wx<@-i519EaJ)p^>y%1Z4$E$&*u>~p~ zsC`^dq6>a>NpHpD2k7z7i!D|Csj-z*^GuKST`*%>l#&{7{pitldWIcV8$*$*e`wp_ zXxz{*IjLpY1*qqDyIDQIK;qHfMO9}&N=z%EDyGot*p=+L+vIJucj|&p#+b%G*%71enk{G?_m{4YOx32;}?T}1fDVMM|b15k*Uzu zx2O}l=V+?7OKS`s92ce3G}voC5bWu%kO((X#m+)jYr@5qAzZXqS3rhp?x$+N_9zO0 zbgyy+PlqE>wMJmG)WF#*w7qEwU2g4dkM>mTYT|(=YLT|b=GUfbNlixvCfk~m(T%7Q zdsJUze|J~2C+VoY`fL=wX~hzU?KL&&ql))D!`jt4a*Fs_t|o1?SI{d#^%e zQe)>$4A}li)nTo1qm!vxQ_~R#uC$-6YTLTqSk~;;-om{Vn-ca+35;Brt}T60+4T65 zfvGRr#y6+F$l9gWd~*A#Drsde>@GMRBU?tz9&a>Bbj!cHx+VrZFDjD)ws~q`b`4sz z$Kw~W@^e{9QmOd{4yN6fuqR=Q3gs85!$NA~VX6J9eQk@Xd16a9!<3=^rLi;AfS2r> zOycfFo06<;4&MJaN}H0CHruQ3rM41Zs&+7_;R0ODQy2AtrpWy2_CWytsK*`L>grKn z>Lr=(q-O8HG;K^qV7^VQ*;8>%Xdyg2eK{B{Xbuh}K5b*F77x>L`!?C=37Kk5mp3Z- zPgt}^n};&A&vC20`YIY5AdJ^=A#?>BD55Ia1Ec8^#k|L$R$K9YQ*aE4Py3Oc`DQ2* z%p}ICd$%XYlr*2j`$rc$)K( zpt%o0-=V9}G4Z%Z=y8erS-y($mm<%)R{NRlG3}?d4{0C7$y0b@YhT#zC3oh7tQGG? z?JC28wD)hH)jqQwqS;JVj=G=2nJO+K56s;CP4E}!^}#=}4)k5{Z{Op|yrfS^dTQ_y zLUi^k1p6Ye(=&Dabb)6B&%azBirF~EhMTNunX2|fb06wQ!JEI2QnVCXQ)gV%^3QuK zO`QW%qSZPGkuASfEhVO@}5HC8-hsap5;TlZY!0<3Djd)U>%{#BIXDHnmO+4l%2 z`oZ&jYmj;ZPg2D^+LKQsQJa11kkHR&AFFV)uZ59Rt5;i*RRg{@N;Ds%nr*Q$jQlEL zd$5dkYktAsKUUTJ#M;#9h+4?2sHm^tJ6T1S&n+xgwM+*XcF@g!cFSfPIzzxmXUR}{ z2QD--qN4bg;$~kjz{Mqtic!T*T%zd7W?vr?5AJdDdmNNB`(g;i&t`_xbr-)L&#xyM z*9oWVIsAGizdpmbPB>k!;@3<0^-AM9;dFfszh29)uQsj|j`m;Q0JZtn@v9BuDsJ0B zh~{hI7h1)I%?zhvJNfk>zusY7CmgMdhNq_XI*`(%^U-vIaEV?GjcE2=z(~dK{ zCt%N=&2iR$0<72Y`lJF!Pt8>Qw=oxex|pAO{MQ8I=$eKBtE-{|*IlZAnyNPL>XmRX zn6am;i%e!yW97i!?3!<5shaeISDi&V3GLYn4}>4X3Aufn9gv=Pu-kt%!(nBLM9yng%K?aPMUKV$_XAzVhG7ol zhosZ*)O05$^U__Ec+zv+z6O^kX+uSgn$+lx!>~6DH8JFyYJb@_1+y22?G8+ZY`;;v z8r7thiXbM^SjM=Nge}yvX!||P7Y5W$x3$qr7rIOXJ24yWRYs=l`h<#E8pfi(t@$$%{W&ouSeWT0mYwWbwRASi6!0Ipt@&}tyA8snj4}$NmwvsK(u5khCQKgbdw+UI$~%t8vO=bl@k2xH*k-2pNxFz z!#h4ZD@8q26zfn6J+aY2ws2}y8-rMx9~q=Zw5ky;aP~1LDDOsA-n)mUeeuWNZ*qpq zbB(uS@Y2icV_jb8ln{9=!(Y`=O*V$?68Xke z&W*-Gt0oac9)c+??EV*G8z4Rpma_5Gy~6cdPeT9(B&;$}^50N$SIamuO1p0c3Sd(A zXa_yok>Con6pQY`{sE`s30;la06S9O;pnng`~#wqU2(Vzg)Pd%Q&*dxs@rq-=Upv7 zkA`>oc?0o0|A4h~qOr8BaM1IfM1I9#3Nw_w?2o#SU9nexfZEnK4?M&8!km&4oNupF!xrJ9ihiKcY z$^IIAIz|bo5y@JBa>-gzjne*PbJR0ApzzZ^3-hIg)3L(1V;7mls2hLPM+uhTehOP% zjb)RneZ8J`QB-Y1@MW@U;OExG_fwP+wpja1p4iCzzKsYv1;)J?iCgCp3nDc4e2sL) zCbAqU`U69}q!$@tH27y@9kYHIie^!WW0*r9p^3QRNThDLoo&o=`{F)W?tCQmTd^jC zG>W0p;LI->|5k&)h4X)e1pH%&pZ-h&_^&4ZVVwVAgMT&We}?n-A^tkfe=6}G-Yvxw zed7wV@f{@l0}U9C5SQ~Wic$TitNtm`9{&V{vc_G9+Y=f>wlAnVxfIOyu+}-^A-=%4 zN?e4n1B@rwwu{DrgWk(vUgrX0{QiY8ZvSa+|K#Wle>x`ppQG9@6Qh5j1I!hlp%}hi z9F@+B;Zx9?Lid9yzgG26h{5_BRlzSO{fZ-(W%v_ZXDDNe+erLzsJ%*ScC;ff>XuJY zMZEyVMf|o8aH)eX<&OLFtxST|IOV!GP_D4q`l4?GrHGG}u*NTL+hq zn!A~m=0_NBsGXN>+lu^?tg!xfv%{Ma%pGDyHWA+38#OUUK zp(1`;LVbx`V}Vf_RMYM%>d#aJ~xl~;)B<50(@J1U&W#UK|`aT;{33sEcL~fmgo$+}e(=0cRi|+=NbQTewN0&BF9$Bj#KvsWUE>o-{M+ z>vs{=`HJ1{81MlI6w}zW|0b-0<0wpx(rHS*>5D>R&cDfrGA>r|1hgyoEPwQXhv{BX z>`mMB$8Tlpge?l))BkcMxRki}eMf7ENPP{#>U23quV$=>A}P@8UI8nQz@YyrRzQ-V zZ{OLCEMerhM0nCy;%2~-4+dZRycVgo*>rl>29(4bx658DLGZ`7&9ZPzC4H_KNXv7vEfu zWv#XY!ywlD`hJs`{l0XB9DO35_QDQ&=+Gu4@V)Mj;d|o;(iQ+bbgjYl8_x9y&NY^} zhJz~sopDhx0=69d_FpWZVE9LhGyTIk<+m>OQP~$<+N9)U*z;AonFi7AvDb+{%1HnPp6;c^b0!uD5q@{$0YvWaXL$< z@8k4SI_>B50iCYlbSkZH=>FxLUZvBEIlWt_%Q-!=x8PsE>1v&x&*}GddJd;!`w0G7 zoc8MUb)4R$(^EJ-BwFyhIXzFOQ#k#CPLJcXjaC;_|1q4-(&+?FKc&-`aQc8wU&QIu ze&YV~IK4`z&*1cKou(~Q*w@HY1pfd|SL<{iPQS0y5uA?gFZh3eW5NAio&GncH|g|u zoE|bj@PEbWc{=?mr(e+N_c?7lRq(&b=`5Yz!0D%Sx}MVqbb1}9Q|;pZKXZDOPCv%! z-8%gsr$^GJ3(1@Qa4FhTte5j71{GoQr(HbwblXBRNS3)0B4huIW^fu*Aq1;Bw^`{(dLe~wT+*-<=O1VE$ zj<&r*zo#5I|ImGugELe@cTsK-<(5%yFy+cAH-vJ9lp9L9Y|5QRx$7v$&M;1*90pJ& zluWrZC^v?3XHxDm%AG~I3n_Ot<<6koIh3Z&2=H%DqUrUs3Kk%EeLcamvv$HS{~m(ONhZpxh;t^HJ_n z{AyoXcNrxWb(d38Tz3T}^XuX%$*D`AWM)cMo}`ZZZsvM>&8%W zMO`8#adk+Z+m= z*SIY@xOPWZ7Y3k~%puAw3W=rS2LBYNTC>HA1HZ}9YOe@3+n2^GHINmHMf#LD)weuR z@kXisi1x^Y2Gw7Nwdk!5Y)?gEdnUqIpV?oUFc51jV?1=NT-1%m3Lsj(uKN3!)}xDJ zu#b$8d-WY?560$i-k~9SfDhBz_KRv0w!05~?@rp_wollguPoU@GrdK{iWoO~Gkmwl zC>8s#I>8$J+=>3wGvg-vv(8kp4c|J^zBctt2m2kvemlqehn(rgg0?jy_w4(y`1JJs zm=>QEzfIVleCYf3RjSq^*B8O^_EV(xvG{+owk=uvoZeeDw|{!H zyVq$f=w7vapn|`o0)`2R#JdPJjPv-fk4N;+lhkUzle@@qA0o1Z8Ok|0oPlXAYNT!B z6OM-M*n@Zx-5q}BeaxLE`)xa*4@_h7A*0E;Lmp=J-%jzjS7$~GVG}vv=PlI?o{B7K>22vvnCi;goPSmm* zS|%b28%?=Z$~h>vgK{yHYonZ#azV<)QLck>@s#VNT;fD+NCS^!Z?1Tzy(C%dz-~r| zCvZ5Sl%U*^WM@Xi zM}0^=bW}uq>}wk_U8m?nN(|eCN5+Ya+BIPa=HzTIi>Bi4h*njb5GNVpdpO?^D#OtU z>UR4LlfCC%;HlcyWHu5)eFN=xJj0ql68Fxy`2Ud}KbpJ$r#M%2BaMi2_y0T2bx$KX_qqG`#JN^Asx13uoa>Rs z0%YM|9GDwayvaBl=I+OptvrusB{fE%uxM9f)8W#Vy*!J0+}_|$`q*B5I_gdF88{hR zBW#6sxV{fX0L231ioN)x5c6-V|}-CJxw@_R#u=)EEp#b zN?tj6AU1y;*j~gIaC#g9r#RCy8KpF9pXs4dni(B?-VrrNRMK~Nko~T8sH+@G3+-AL;G=7DFY|l5;JhRu!ZUp7}^Y@Q*#l!jM&z-6UTdgGYj|QNZN0vA#FPH z>&;qcs}`g zHj0hcY}544H9TqHNgE~o?}W2}-`-I1FE=)pFr>dg_J`AI18sM~05g&{SL|Z97xl8tJo$A*us7zUW-=$0I+Ho6 z)S1jl*=D+Z8?jORS;fE9n(f}-;asX)`yN}rWtVolA48HY8!@YBchZiEeHqxepz~IF z80VP$@>4Whw!3R%1`7H;LlE{-t=@^&;VPVY{t9G@Z#V-3xdsSa2cF~r` zM%#TWVSt#Pr_e39wN%_n^M9dc2n}k6zgQM`+vXmnc9!9$?EnB=vE z+Y%Wd~!a~$@(mF#(&Vb3Yr$a}H3pON%V#ew!G)L~b};%wv| z7#4g&YHSRR;6KXtK&aEBeXZ@KXw~I!(n*Z=jEFCBLXSgd0dhxtYS5>c=pVLrzus=5 zKQYm78nj#cls}>l5Cy}W7*L`;z2?CkCSlL4CI{B1`e(xr*L*@}eI4G5;b)rM5zXQL zfO8|fka844ZlUYVigv{1;L75T32%Lfzb?zeTmNigE6WCColOcQX65^tX#|@8)k&rEdP# z6l-U`cHhbQ+M(Dlk^BYxYwrIW{3|5zs&JCD0Ac>M>ByxGIQ6XIoMYD(-&cL?F9qqi ziK^iwhTZodtP3tSlkURZ%f#IkNcoMjyqKHqd2kw%5C0>NMQ-ME-XEF5TbeH86j>5DEP4<$K_7Kj!}S zlNR{f&6{+8>+g+-fEt+W&^BX$484w{jNSAn&Vn-U=4alMZ^aKrM4*j)9adDdmyh)o zTU&{(_`LPx22XY?KH(r1aKZCYDO!|ui@#)IgWK1MDe#lb!4IfLKvB)Lt-x`oE(|)$ z<8AhuztbiEU966|@4!B7E3vbUYmxSv_1&)F@SaNhuu3S z3B2ig>Gn5lD!+sWQ_wE_U_X1TKWb;2u7%G@3W1>=NT7wM+o6S{&;kqWVcAt@FuueY zn>pjTSSi*YCdS{g2SnX#Fa|i|W1O)S>a9Nj#`aPaS7>m}=UkPXOC_!^iEARbQVgzS z&NY>DjVG>eh>NzI&M~;o;#`+OFu;kw|6|}e0^WyBJI@}Tt3dVigQ&H*Qvxs+RHY$Zsc6W zoNGC84F(sq_a1s%aD4;s?&0~Jp})V9NCN}@^5U`jd$pUiF^e+kE-Y`+-@d+5oDVh> z*4(WKJNg?G-e72r9c?puH1)agWoYi7-R{OERsnz1{7-e2 zUqW1@yDbJ+Gw0gDxmt)Tp14SNPa0g0bFSw(*GA$R1up3B^o`J6`Dt@>__vIP2-#6uRE-G#INn>TWF;|ml%cK zZZsHYamFIf_!u$XM6xEmjWM`JajtaEbsBL^AuiI}K!eN9xtyHKL0nnHMS5%hSZ_m! zhN!^MBS_GOQ;2IOxS+S}M(Azg>(HCwFGXK5_AAVErf+%Xh>9rxge;6Y=>Nu3eEcb^ z_*44PQx@^397d-O?tYEO`_#Z)q!y!mHlD9&ChQd#eg$s<<7OXiUC}88Hu%cxReJ4~ zm=S-bw?ee6*m!z%E|EfME()co)eT;}$TSS^yWzJdHV416VoUM+k=QESh|E*$ZAN7N z61#>YFT}3nnb%?)kU^vxi&2z_m~M4!8zCU@mX!n9ct#@l!D~&SHWwg|wmrBUgAX08 zSh0z!I17yw=NZ5EkEWLz0+SpbKMTeOcjE*bdj;$PV#Va;0O?X|hE~pC%ia{cg_llo zNL8eGNv%T!>u7c&#fNCW1urLOVqYQ#FMxQ*y0wF< z3zL1zu?y6Z;s4ddz-t#$C2-6*_|tlJBK9Ls#247c!tM&46N%Q}*rE0quRpV>q{m-e zX|xR|d?`}emo=@q*+nnQ;EkI--m{baW0GmRNjr>lH#oF%i1>DXNg~*kq0O&!Yk#O> zHRv?f>|+0op9eiweowKR@d}+TzZb~7@&FPhRFwE&OUI9a_ zBL11MrMZ0C|hQ~f3FFE#>xDR^!jHemLIJgkRC ztu$`Cli&6`e%pf>A?w@tZ9l$>+rEC8oL?2{qmZd6p_gEyyg&AAU--I!Z;1mF$qcZ= zvOvXvx`rsb!fv_+kSh^D7xaGjswzAmVNE?L>1ep zHRlm(Uz_f9C#9?Q8hS+yX3KSk#SL`kErWK*PK4Rq+TC8_cA4xYktSuV@7;)f20ZQL zjHMSu0mW7%AQGCMuqXV$ihIL$@G4G`!48;pi5tA{8kXQjhzxzx#Kl?KULP_Kye#*7w4h3^sl~XK+2kxnAL1LE^H33zm1@D^PB~m*nx2 z>vuHlit9J?W~Iv;8xzD0P~9d6wwk*6>##qeog|Oeg!k^bD}j5VH7tA#eo>fjJR*G4 z9pqlza4q(=8?bldMB11EWyaC#JonN|n)(1Slg8TG>^05MF?@o%6JJUp>0^7s(cLpW z3OGaiHbZ-RT?AZMf2tC{Ud#UB~xor{gW{QPjuL8=))6wx0#G%^%G<4H57xVE~1W|8e z5voc0J9A#tZ}su&PQ#xaM<4&6nGdNrV%=>%Gy_I#=za;l=y1}yr_+Z2o%zr|KGv;+ z$~>NRXpHk@t)r*;(Au|jQ~B$&%v9DRfvLPf@fy4pO{Q|;KVT}Su9fFQmBwug`E7Uc z+iEa!)<3{+`xgB^_~GAv=6vY?et*yU$fWam=pBa6@5C2XPFm-5dg5gI`%auT%C~S z!+O29??cw!|7^YY0=rpS?`;d<{R+B=L@h1%Qqxb2PjCGFkx z0SQZL@B7c_?LEBr3!g>a-s4V1?u2d?j;Z(`7UHTBQpz3dTGO?>1` zJCv#&!R&!9)Z#oGrm3hXO&6^zTEO^@t*BF~sJ{PC=O3IZThC8Ib@c;gIKPk{gYyd% z6+;Yyoh_lsv$m5@oulvw)MEhZp*Fi%h(*;_R}tyk|D-;}{L@S%@J|D` z!9P_FWrNK6zra5|^R(oj^!hj>DsrhlzT4v>DrT}(Yeglz0KzT^&FQ*iFJ;k6fHEa% zEcRL4$%r7axYH3HK{}Q0VsWSaEHHGGTJ68hnc=?)rii!`iy%?7ip8BMdW2}z4sh|f z)A#JI7SJASy=M6HdEBXu$DRH_IFCEwOJ4|U5#UCA(4DlU;tV%V&kk`X{ZL`gNZM4< zk3Odrm7yJIzk!0Hn0`Yq;hAvrHa1egfaskH@LI-x9%Y;-J2Hy7&H<`k^&GyK+PG0y zZ{y=kz1xvMy;ZQr`X{;K#-PUqFZdG_SKG6`CPQBnf2O|NEKWG>XX=Z`31|IGeepP9 z?#b!PL;CV0eNZvPlk_?1tL;SkIvrobVgpJ@Jsag_<@gyZXV zr~qrY(PDkaWB=RKWT^&`Mh$*-5;aKHinAzcPbxaL7C1RipKW!W%>G0nt4OoT-Y%js z!HpBt)>^-kv76){);bI4oml<{-ZHh`M%K&Z&%dv6{QU~Kvgw(}wx8R>O&d?f-V0e( zMEfbYeuDNh_Fvq7IyNe6-}VQi{n&muk^S-Z^UD6>_Oq&p_ESjt1np<+zqtK4QQE!z zj~VU9{>90)pI7!5x1Uu-w4cJGCul!o|HbX!Vs1ZXyTbmlop5sP=av1%?PpaH?Wa)N z3EI!te{uV9%&oirA2HgGeT$Q8Kdv$PSAeF{)^k+Y;OPWjrO;lZ2Ng- ze{uU+RYdzK^m2msGxlHH{>|q0KWwz0wi!;aU(Nj=uk0^wKdXvpKMRT+fBa+YzqtK4 zF52DxA2QmHO_7spKd>*9KZdH{TH_%$J@HMf3?wmY&D!*`*~%L+kR>O z8D3Aa<-6G0Ee&ry(C$Zg(KVITkT%@$g57b$k#UUkWGC?mN8THrA7TAYX9F(9)YzCI z(n9=w93jJ_u|s5Dh_hh9IWOt6{&(|fA)On- z8_(CGM*JlZu)An!fI|dWknHxJC3dJhIC{=b(6AlvKj1!k+Z-o?aD=A&2^Rj1UBp>- zy`YP(?S3Y@eMlg>H~tLKi7mP5zlZ3Oe@EZj#ruXvT@4NCbNI{gz;05z`axZ4x!83$ zk<@tG@K*G|bT*f#)*VlRsfzS{MI%0f*AD5>5kD@#ESL=Ffkvtt1-}s5W-)_fP91$s ze77(@7;j0ByRY)9iq;e1{>0wgq)*I^9ghviUt!@8?qL3jPVCD&5wKDFcIbz95U|0U zzZ66n66!>qC}4xHzENl-L{;>cChn-WO4Bz%T3SgHaNSM+28^su)zrrzy|+??%RF|j@69j z%n1hb6`a|{nG=ZlQ{LDAh;AONUDeffjDE2BJ06?ApQ1(jhHyHzfX8DCcngWo*aD_g z!#Erh?;blHqJ57j{S=ITa_K&ROu9IBLGJ|nry-^9J>akaa(HhyEM1RTx{+__(nXbz zE8QLIiSuNo3qjq&1r_~wU8=qBWl}vI38Z=+I(Yp-k}7SsGwf%(`uX25}7n z7kt~j4?>Mg?!yRZoag;z=aCTGr!z=!dhEQI_5r0hhBTF4_@3^dU-Sa_^f)?LcF!fu zCw^~#DK&Nm8|Il8syKzYEPHhq#QXOM^cq`+){HYZ6#4a!fN!d9V6P5)r|QE@GY-wc zF@+Czen^(f6#63q_I#MZX+&iBX+&gP>@*@W{4^r_doJy#CF~)ezetovXLg#PXDgBN!-w*D9_&KU8deD%!GQ|&&Gf(Os!#PZ)nA7Ms^5X&Nc}in^-hE9 zT+Vd`=lY4b#0b6$oj17rUN-rW=Rf-Snpk?E@7VKanKWkkC*zG#obd8bR&Q1^gPlJa zytT(%kH`$}kgwCxQ|aEKGGPRQP0dRBk(u<{404Zt=Xe}0~Oy* zTMQ@RLezktZcnaSPl@8i-?SJt20w$#C>VsU#k zza3vEL_H!+w^uaM#Lv>~$-y!H0 ze}(tNiCj-$=!BZyikppqi03C&-=0? znH_F?^?xw?_!n*W5oxa(fl{yq7ej6jhd*09_)}3e=Q3~-`D6SEfD|0f>`xE#C&&8- z;^CM~Ucfl6#bXyIf@=f5==1=49{ZvbKLUzxM>XSlhtNiLOd6yAyCbtz-Dx z5D;VXgMa>6?*{Kd;8;iQMuKi& z!^7%#lEbC5j%y9BzjChioa+GkO#Lq1Eo%nXU7Tw*=h{tNU+^yV6M9oHsL^q!YE&KW z!FYdyoj=&doSZng?ew8a%uMKeZHA4o?**Yd4PArlp4|S=GoA5IKrkL z4n;sy>|8`J_63u=hN9Jmx~5~#;Kb@``$rG`oCYalEkWl5F8GtKGG{eYSppKM?Ap5^ z=A*hYe*B}(b%dlC74QEDLxYacN9_t-AGC9B=!5rJ zJi>7Y>0=1j2m7j+ag3V!>_M z(oG-QR&=9Qc5>vpHR8i2619siK@6tOZ#k`JS{)rVcft6wEN5?2e zdrBAiT@VE1v6|!&h^j+3s6QQZCpK@*HMr(*t}@QmAJwXl0T+7rhj&9U8}5W+B>#zX zC-h+xNgqm;u8-4YeN1CM6wj~1bx-cuTp!(i+6L%j7o;bA+6LWykv?XTK3X&IMYoza z!uo(un+1Ka9&@Zdwi!Bk-p~pCO)BG~PNL6n#n8S@aQ`}!V!nWd8a*Z!y9_5*%->iU z$0|lTxq}|l@r16Jy(F=~&@K`T)`fUreS)r-tp-;M=lY0q)e_fmu9&Ogxq`!L*x-Mx z|AKzD%lgT=UDppkFfYcqS-O7OaNU!871z&^@ECWxzq`jsM6z?y$k$!e$ zBz>-TBGzAyc#>{|e(H(sSp97G;JpI-+QYbX1b<}IUh^{ec-NuNe;H~*4i>=u8`l-h z&x7e}O}ZKbr()=;5<3DX)>R}c<3zejeO%Yo_~lGj(~v+{<>)2#8M>~{HMq{^T$gdK zC{(L{GS}7pcR^RneB|Av`J2hVy#;-dBcmt=oqJQoi(mm8KC;t*&k{D!D?|_v_wrZ& z%_N7(JI+?K)3i0-U59)gI=N0o9R!-5XQn`St4LV z2f^qYi|j;L9LsPKOg4+a!Tsr2RJ@x%okYEbiJ%la10;f*FyZ;qjki%Uw30-K!-fBN z9gd5H{v2SmVj-2!{$e4OL}LhKuPmMW2VE>@FJof43<<>Y1I2KJSnfwR4K7<@(tkJk z#9`H)fps;v0YCo4q{;?)xl0%)zHJx02o>)pl~H7uOe!~E zcYvglN2}}{blo;uX&1P)DNH`tZ0jx`OOt7Q&O$hHtP$=m;xSz~UAHmeIF>Tue1<5H zE}V*L2q&is{Z^WPIk_O{@30n7Ounb@@)^bvrwd^;34wVDCo0%Y2=QdIObFLudm$_Y z5&&LP%8Jc|g=D$4>k+k>;({a$D-kjN9y-pe&ss>KyU+je5nTvJmM|fl0$IR9K0-)P z7eXnVLvYq|vJhi^*Udgu-w~VL_ab^lvi6SeF!I}xPsJY{^>u>SOBWbCAd*-0NvE%?#OVufB^R@8DCf$fPE{oeFo`-(LM1D7SV1^DNdoT@=fQ zKVt9lW#F&dlG#y2C^5MH8Io|58-L_jEY07BZXsg+=aRmoSU#)_uN$V}H8FZERZGKr zVySp7RZDYG2GO82d}3wPH{?gO@A!Q$ME9ZYJ0WR z7g#n9-yiF*eXgax#>jZE0eR-N79E^xhZcfWQ zhZ+Wd;8MKjd$bU(fRg zN{oCO78PeOUd$QCaK`tDv5Odg%S-HfNN@E|i&(2q<&5>js36?J-gEoN;QEkr1(Bf1 zPY_oGanaiE9|qUoIoAfxz~J(8u17eRm$-U?3$xk?xSQa&w=sv@Lw`eL zjWz@MW)>Ywmc9DzYkyhm_Uc#z|X-$6Rv0IMf9cwH95`(BP zy}bM(oEBbL;PPYb>c?)>wrE zc2)z&T%XBp7->|b&Dus0X+H{rN^*qe(8ee!p}*_(@;Z70cs^L&78vhQjL z=q=<_?M>CUm?XWMDBo=)!jFjCKLPufDcHZH<*3`YoL-o z13)iX<8y=flp#Df?64tt(NoMZLyAry{H0gK$hj{Qt_^`tVmHH8GnSaqJKjN zdr1R(<0;X;&R$F;_6+R)?Z(7Fc<|n6GiC-5&-p0JI$-tL6U-R9zY+z<-hFI$8ZA4Q zx9kceX#bP&a`kuemaT%H2`*p6XMe}^zbt+xx2LlQvrq*(dyoNt!;T}*Akm}d7qFUJ z9X*J}ZO0xxpjj*aj4Ye8-n>fJM?o3W$6_R?^#qE@h&k(LOHk`A3nhJo=OdZo{4|bB zNZ%OI!@EyCPLh*O6Tbj$8!NOA5Ya!8dlUzXXJGWg+uP})z3gKp>})~zqZT9C^N#QK z!si8!Bk%S#7V^IExbl7hw;f;JNAB0<-C4@*|5hgN4=EfYyGWpov4+=*`rRrm7$4UI8Tw;Y$HYDLt~Dpk#p#;)j0+@k3Rc3CK-Y25(f8fZ^ z%6@Q2$@;q^$=X|K^x+9?nzK_`_$=K(tfBO!lfyWng&{ao@p^=Nqz#oja2yWfZg?B+ z{t~zawmN+o!Fw`670V{1iAWQXx{$h%sz}v#{B;WUnWig6Ikb7c3YAXP7G$MrdH4** zve{S&*|fAAcCL%6mNt)N`%|_63!aO?PYWLWg_YAWIiPh;AjA0n)9<-U!_%-1e6)&q z*|J8SR;m7|^M9i|w)2XZV~ab#JVx8a^evH8t#SswC8Ev9K_~kaHDA0c>Cx6zfs{wn zvyJAR$(wfx5(t6py}p`+K#P}y_vk`6a5ED^6z6(?xN3A!wHjRSaIVjhP+5rK=%Px+ z+$(rRNze0%X_;iGPf(4u@0nfVtJOHx$-m~Ly`8Lmzz%lEOPcQ=qMj=43in!|n`0cd z8^|0H5D`Z@CNOiPP!X9U1xnJiMeJiQ%)V@NgmA)a3bDnZ67J>+l}Iq5l;VnTIr>bb z&96)4vqC17?~tIqx1vMTKdwvV6@#meb8X>VD{wHh{*T~-L%F;RQaPho8b8H(N!~xP zE5pTb4;PKmJM~%1-qj=oX=FG`%4B`=!oTZ6u-=hI<8870_+(fP=`(2dGK|T=>SJaw zjZf~@h44`U6T&x0AcQAyg0p@DZ{Cz!(Yz}c$o)gNZ~hwa5?WJy!x~5HHCk5uiwPdz zn8RXnYzj`^vAr2ChOdZ|eFUuL6J%4hx8cm-(%_RFDS`gisJUCzUXvW|+->+v0&EBJ z5dGb7cP`?$_ya0@i?a`%hCJLJMO!xqRQ&CA%@&ISoIJ3LwE}&>iiLC83%;0yOY4+n zzhnK)Uj1kGe522y!_gn8GIcn-qD%`DeFCoA&l*h^-DC5a=++{E=zOr?`ZltkQV4j7 z!Bxq*G|rX03S3_j7xmCAgKH}1%H>>cDr+ZkQF!5UgX>bxHI8$gLg585mwN)8GZ?^8 zz;5yW@OUcDtBSLaQy?V1K}ZlVT&f^Jd~g32M)1hOu}H#_&My>RX(Hw!^x zNjV*AE#!)~(X!Cvay5Jjy$N1Yctr1-dtM3MPWAGYxM1y-3`^ zNnu-k5DM7NQdd)N8`h%sLTR}z$96FKo7Df7bQtbmC=b>mcdLtW@|K3N>yH`6F6}|a zVm+&Or)%%jMLT646KxI>h&F~cGm>?oU1D$z<6KvAu4r1$3!(j?2tvEN;8=e}=f`M1 zCEW2oR0?Af1}p4&AZP*! z0bh92`raDxZPx__MF@!G_x+wbGrOAvYWqC?pU*#^Pj+VRJ#$~qIrrRi&pG!_JI(2{ zppEHIt` zC%AwjS<`{xKY}74{zZ@{(Vh@xYW8$EKiE&c2=4+o&%Y;q?Y+V&HS{ zT8MTgOtRkaW^ec{@qXd(Mj0j>k8sGx*?1%QkdNM^Dg+o#-k1o=?6%H{)q=aE@6Jgd z*6DWmygW{5M9tmto-cn4R5O-nG?cN?||Ihg0x<+2unCp#5GFI7+9-9uwmy(>P-E0#2StK9xFZ>^;%fx z2B$%6yR+qR zV{~g`P`>T}Zc~6eZ~+UPPQdL1T&w{9>;NuTfU6W>Hv+y+K+Ba@zJ)SAf?jz^w%QI{__M+W8LPc?$3X1^6rh-y~p^XfrdqIe;z&*h>LENWixU zXt~n!M-#^t>=(Laab7<~l8P1#F0xA()`&h^J- zq!vDOb^#RGn|QC*8*;|--;L7k%9(*NrjEophVkw)wRe;KSl<|k`GW$GnE8*9Y=i>F zyHg`7M80_oW;1)d`B8)#Yw>Qf)9J`XY_p5N7c)095Z-D;PwSkVgOG3WW8_pCRq(ZL$%im#$NysT5z|Ix=e`B;=Q0TQp}BT{e>X|`y8;*V zcP^K$7F}}K#zQ4CB|7#s#@k%VjT@bW=(iwZEyX0Xn=}_|NH4e>K)$1zgnTp!!34z4 zbutpDz_GZp=M25W1^|6>SxPeg3wqvk-9*qf4oh6I4=Q{$k3pGxVy}JDgvY$n}NbeB4VxOvhJbLGN0fsz`zKdy^kt;k@oq;PnQ=M@uV*kRWd zq+`Za4t$A%zd^wd=Q`P9pY0gY(Y1h2(WlghZDU2Sw#t4YCw}WF{2%JT|35>Yj>7+V z;a~Cp9J|B6yG-#vF64h$PVEllumguC{~P>|ptMx}pXl&EoDJWX|Jk`C@_!P)OIReC zoKYvqE}&Zy%tU^+j#D3n)=`X1#?r|WBR@w?WTy7ZChLuCyrJ;OCDXrTq{U#2qFZge zjxH9w7UKfEo&npdjn`EWDbYtKDS0pS$9{SYKjqI#*w-z$P&AA%H}G^~;5|T|6S49l zTn=i~F=*__-!kwa`V2!4$BDDcUfhU*xs!wC?eT#rQ_i2l*E^3SZzS^a6>E_k{tn;W z_D{27Wx2ZbSzoyce;IAAW)C1b?%Rs$5xw>nxXu0^!$G(Z5XS-0NXda?(M59LSagvb z7g~~Inn;decs+iDwQla~E;}woh%DfOSwaq3zqR%qi@HhnYZGK=DOfDG7a`UQC-a4H z69~UcH~V6}eAGCP6k+n79Wrx)-)4gmmk1m9Z~+_qh%04FK0F1{5Pj(K_VU5@$0lQ! zJTVdRgbq@pWmWAk8X~(1PyCsViGKs1-sU2U8P-+;F&%8n9r;o}H8@L{POQ!Mf z0tedsnW0@;?I9Fegl~2D+q+N@z8L*WCHs-IGj{EzN&e0c;9gKt1MH0(@CG6kaRcV? zxC?Lt?m$eWVsCDE^mwc$h-YeVj109eG=g+p;;k*gL4j14;c7WYulJ__nnF3>EdnFp~Eol#8L!Is>$yAS@yw+=wQvI>|iDP zXZxEw_baqe3`*N+frweyzP4UgXsD5tx;sFG{3kQ zatU7vqO<+#-*6o9(;r9EXdssm!m7gtn+-s)UeSdALbKxwL^2tWyP1qggP~6g(u-F3 zJYo}M9D0odHBCX?s-QfCvN-g;%Rs;l zuurM|wMp}}FT7dNOD4Mg31Za}FA2eIO&rm?DlF>Qv&JLj8llYw*z3QSj00_a;WCFT z?PMJy_7a#$mgK<5?Kj#Oxy=xa+<^-)at&93#0cp2B?e;j-3jQcvp)FuzK;R}8sxl} zHnjcT@OKDCxA^xW#lMrlHC+4wJMtM_D2eT60t(R?h?N8F6Vq$?xFjL^+j|tQ^e~nm zm|t;)W)4qu7-H1CP{&mlPQy)c_W-OYN^-CP;hM=!1!c#>S(+4T z$uNq-=aMI1G>eSAd9KCS%K(8UQtz@DdqPqbMZtakzDiPs-%;lQ$?JFGbqoiQvIZ=L z*y7`cnSdJxU#LWhM%Xf-KCLWkG?%6__`Voy{ABPIGi`GI`640b3%G!s_rqYiYK_g{ z3mvGSg1T2hl@iKg@Pn6t=%0+U^!FtGR>VZCs#1v(#QZIq+6jOE4Z&+ET2TWTP5Ap$ z80^0L;7Zl@f0s(Cvgi%SJBp*6ebk zkAcr4P}tGpxpaa2{*g}c8jk<#Y@FPttMQKuaIy$OXw_b8{A1`x-!+oQzt{&7n5bkw zZ8wq}Su8f zyFO=*94Qn7@1iYB5f$q;6dYvP&NZ}=C9YoW1$;mbmHghWiVKK_f<;1gdox=O8 z!v9MZK9z-^t_pvch2Md~lle=ILx9uAJU_bSr1UgzX}A37{BGX-z8lIi&_OX@ZJuHG z@6|EVzklF@{(bUG6dt-jT0V+}pU1+J{TqyTp`9fB?ds*#UD5{ciylOr8|GzxJ@8kk z&+HnipWmYmXA=0=V=8yPAb1tcNS8bH7OQ%vsd{fns`q7xis-W_wB32i`0yi1MRB3r zT5Zw-Bm8OfF|@MKbh+W|tBvOZro`>Sg5`?A@5N7M>y!L5dfR#(-tYWZ<_+r2~hjY0ede47eQTRL2IIw8e!(T+>uc7U5hj0ze{Yn3XDLVR_ zJ6So;_GsJe4ID-Qx++Io*mgXc5*c$Cdr zLijq;^Mqf3!1f2K={~|6`}~ig$}Mt=P3%k@e$Sm!NU!(bDF#xHd3~{A&MQxZ>^TSC z4Dees)L~!&GZdWDFf&a;m5dob6BlfI(}FK%*{ZZPK~cxD%mfVU_h8SREA}X$Y{Y26 zY|K2V$iZ%{=zFjP#@>VZ$7y?Sr|o+9^XT=pHmXbrgU~)LxDmiOs;k@qi!IG$JyzFK zUmsw&#=gd}o=0e1Eu3bG4@|^J(w80o4vxvvON))t zUrsVgZ!a-QOQ*m;QLdX4vUO9(X66DkLr2?6sC#@+SPTh!Y2hW&uFb#-oM^O+urRbc zTag`iILFLdw#gKOpzNwwk$9wcyKm;*M5~HWUdW3%K+jMKGlg$|=ez zTvT*?;fA6dxDiX-1>owgY>19;dK4jv9;TvfXhyXCfPFpIrfKX1%0t13T7kkXQ7b<} zI4#J_GLJ`@Il5VdcSmQV#3FobH>WEjYcCkmEiLv(ypd?<8SUE%ByxA=rf3f=B>(r@ zdCN`Tqn&eqifJ(17F`HMn)rjv=!3(5gbrxgmn#(!tgP(Vgo8xAlI&V=Jf2bom=WHA z*M^a8`Ut3!L0Zs_Cm|;#2E~{Qd>HGIQe0Yx;wTSqhh;eS%7Ns1-@;Ss0Ja%`TE*NO zrQ?u9m>H9!EM&=0BDoi0?fY0I=@WjE^M>%SFwj3^r)MPb`#DKNDq8v1PPqL&IFl@q zl{}z+iZ&`arQ}ld>kXtb)COVZp(skR_XpsESPNnVjx@%ud$N;Sr*&TI+E!JoEtnNO zbgULIb?Ih-5`*SX+cS8Z!`>rqV0Rh^*NZjySm4HrXU8!I?JZrhKk`_4A?a8yj7*pI z9NfQ<_t>`#NwFi=l~EI=zbw{EM^BPLiW}&(7@LsoQ>;K5m_a1EsV6>cMD1gES1ojp zmX?-ZglXw%+;}nL4ezFfw&GrW8gv}w{}{;saghJQSf~x*mxY($8;g1TH>@M-3NMX* zRSzs+LZEneq(>b)h<0F3p&^h5`B{_2gcR9sBa{qw(oBU;`JMtgh5|-UK=2gi1Z+t- zF?F3PIQr~*zQGEtxE`>EA613lm7@X-GzW)N;b;!|cxt(v4}+%CGa*A!jU0sXQ4U@w zE-WJ4o+F$0v3QhWx4ln7k+IA@D2YwK3Jn2u^~k-Jp?w)GiL#`;2VMlBdD8rLq%RPk zDE(RQh29B$afQ1QNZ=%gTEO4Y=+pRMQ!%;VW#NLi*jot6b;! z9e^p#?5#(79+QtI4R>u5isHbN9@OsSQ?6sOakiL=Y+q^C-6+mb5z|`mSxiyXBx} zE`WyR!~B7Hcb66%gSX7llgzOtftId;54)O|P6_baRTHtjy+5XsW=~Mg;AqC)dooNj z_T8hXG$1JtWXA`vJnnuT++hY+;sWC)dhttWUM}XHL_hwqm(2sG1Ftf)KG(zVM3*-L zkjD}v`1%j)j#iETC683zZbohhH<$`ZglAE3Y2 zrA61gf{tNmSi%#^PV9xR(HSQe4=xABN;6vMlZn$H80&=yl<&0QdZ0XTpsN-< zC^ucSP(Q$WK~-p)7|o3Ex2c7IW>gy;xf-PU4&Z2y9%${M)f_;B!1cwLiJfIj0!2H& zYCqN4>Jx|aoiJX}q3V3uCH6!LyZjYz+YP|LME~?~EPC6Q)`S8k67dqd6>x*=WsM@> zNwNxfrs{5u;CG`t#jynGgT-5H9LtZKGHED|`!oB#?lb$sg16QBu9h0V!Ao6SJ#OXj zMD)fwIVG8Cu@Z>+#PMz0G3!DE@aUs4edM#t^-SS0th#)6`ELSHG|0S6qyx6mOM;Jw zEfpb{*`*{x@==1i93}V&;8@RYJYqso#T4KzAA$ilYN0BopbXSgq-&wscvQ^wd2H&Q zWZ5tUPf8NYRP%Xs3MS!L5mFQM8bKI)qLj5oGQB=P9IHbw3IOc%nUG}pvpkG-0_Nx_ z)cBM1V{p+NMDD0ILOK2-wt6?4X=`Px@RZtw)L=EZ-KV0d{y=~eq3Z@2-kT@w9Oof^E5 z-Ha5IY>~Z*1!B}(Kpb&-PB~+#1bfg{P7pqhEi`H?T7+Ou`OTBxeubvb8J+&~F)aPp z2M%CJGR_Zk6GkNH&9fP;!&r z!b=J#a?s=!UOFNAujrO~UNPeFiWQ>(Fv8LbtE6IQD-VVb{F zLX7M3;EpdyWJnbp^Tp!{_{*~|Ni4z4Wkkni+590&F6-w_f0?K<#}Y4s62qM8&h$?g zywKSM&VtT>!w)E^!k<|vdEi-~~vmg|#v7MkqO2ca@C|Rs}m=}lUtO{oVNhpze z6gDOR_mt&ihc1H=xp<2p0ua%VxT{8&$Kt#A#a&y~3H92KA@>&2JQOL|V^F9>O6WMq zc!WCI-}LK+dl5N0_LCqE|HN2t0I2BVuO&$fxsW13it00fy>M?ysd~P0QY;Gq_UD2N z@ufc=P~QsiATf*1OR64FSiO`cFCkGH$3(;@^Cj)c*MWvta3x>80+K^J{(J(4MU>8F z;30NkCpx=N@q8!?mX8>ZCN6?JDGKf5Xepn66HIOK6B71M%V2ivL}euNG-g(o`_ueU z-JDcjXr3>6!at+Wxo8;e4#Vgc7)G1@n_!z&ZF0hGigNagXqOns1 zL9KdKBiBfdpIDRGcI)iFIIq8E5LCu@2_*^0^FzoD+%}@lI;QKV7uOS}G72wVZ zcCSAlD82p`7xemR0@f37yaHVB05&MV?F#S?0;Ul?XDh(pI)L{pz{eHfuL+pWUQ>?) zt#5V!Z&HAj3UC+!GYI&G0@NMAF$!>^0@MiDgMd#cz|$PSQxxDp1^6i7Apl2xodl7Ju0({f~{DT5~S^=Ky0bpMO zc2|J6JAk(-z&jLR&S(IhN<97(x+vOsi32!60sdM6b|v6x1hjT4^mhP%tN@28!0$!@ zFpq%NPK7iF@G#d0kzSetY(^Zhd`Z{dsqoe~8$EwlfLu?Y*T=K!SuD}osqj|^aD@U~ zs{jwO>YE8@?NkUkfB^-#Q~`cKz}WtgsKEe_$2fqQ3h*QaxQ&1xf-(}E{;|*Q@{V%_H(%p|E zG?{wW-nyyjk?O5rSZLdGZqm*1*z`0iPY=6tV1>%bT*(~VWbs-CSFe>_ zp{H-tBNO`=5d<$|64X4zW;|!u*m~w1!B!J4z}AC+TD2A$GC#aXGd|OZ%!Mgx#L;?O7#X4!?AxGeA<@iKul2|* z^txWxJYVVIgt!ISXI z3g^-;mga;po_i3pya|U>8MPO?W{bGJS}AY52%BE;1s-G$V3e zw!2$e;hI<5Pz=W+_%io}DV|Ir>4;R0XtG6}l`3JP%2$Two%Hd3N!i2V$0|!TH zHH#qoe5J;{X}k(I_5$U=17v0l}Ors&slYfjBEG1>Qqb|~G?^)f_#%fQx zUWN`iiV8c15PteW`rUSRx3qDkS$8B`#BN1H#Ud6vUCLO(+RWpnU4bk~s$|Wu33DmY zY<-|Zz^sD~;v(l2RG^#(A+#|iP|jUQCj&Qzd3CnOJO**ST99@J1hgP$8h{E{;V~`t zAbzcm8s;+9(H!W*!Li5)90agNUl1W-p>MJ)Z@;KeGPpZM5#JQ-XG;P%1V-GL^fHm=3FLgb2`zj;7X7scy(KE7r%=BwC= zd}9@`ofa4dyg!Q@4?b{H7+7~&VPHo~k@n;pzS8YjqFj>a3GZn5{4`Jcj=+w?I<}T| zh0m45Fw(a-?)cmOb^AxHjRkeEC7kFf#20rmjcO#!}0z`qgD znmac;fUhdRf8c@=?c17#^9rgPCfZdGp*Wd4s~xCH1$CE#I)KS> z)$a+#xwFK9xR~`ZxNe1TjlR&25hqT3;AMLF6PaVIE&Y?k#s4seTj`i= zALB9B;X%g^Fh90~&(DNu;6o&s-68W!5RBiuef$SLjg@?18uHyVkRupTbjs)y%o#DAq8E?cB5+&M){ zJmI2c_@HQh_074@CD+uzN=n)yb4<02Wd?0!N69DiLIs?3nFc)v283w&8 zx0bz%^I?R!eBm4M!v1J00AO|tjNFW4#k*+LkDx$vH(_4(|6B%P{P;wB^|l5a{^37W4{z2R zzUXS?e{svs#vOb1WB%TvH+&Ii~w6z81 z)pw~F36BmoN49j;BYg`B*YrWf?eH_!9C0=#!;G_431d)#H_!+R>PKZCNQl}j+e zdT{i3WZK(FOZdQ!LxVSQt8-fXq?CA}K!Z!iry7E%fR3uo=){+^Pvp-){&?|o%iHnk z*7tG38w)F2Ml@c%YX1grc$2pw)@^(b*QtThmj8Srze^|iT3{BM5Z+Qy z`Pqnq$}L?RcQouj?8Wzv8lv6C_88F^yO=FBdww+vbTs>io1?SbNQewQbr_hK!2s*P zzBBv7in3gUe8xsV>gF(jY{(OKe+w6|`_uiv?n?@V-A4dE`s1@$a>o4*{BIQeV+uY% z_&+N6EW+<+#n}Jrz)x53a}<0I;jdNj?}PBsW(7aYf&ZC;zfi${_G7?br{Gr*{yqht z>A-hW@FyzxR|$WEg0CR_HGsGDs}y}|7{b_~wPNzg)Xf>02%Vw2G{_wK-0!=m1rB$S z4+Ut|yzJDLK4{$&3KJK|~&>KEH)`+;Ppi5ECnIq@oeIv2}W9dtb zrO#TULxDja{G`Ux!D!Q&ZEd5?;8|&T7^@gK!^TDypOprF4zxlIsK)>uynT7P|8&!J zmy{m^(;c|wc+n#ocO2Ni3Ezvv*vZ-u{g$}k$T@6;zlzR7cPRrtM+qG2H(<=tr*2R-K5 zNQLhXkv~i@5jO(t<>3bF5jgM)1BY=ozQmG1*(==)`{>J=Jn4G^IPRH;PDra9@e}%o zJ&|01BhG@6Ez#j$tEvjyg6@?l7sp-Q6m-|&uIa$H1vpH5?gI6wx`jZy;e)jZKbYey z-4`Fi>OAI@ent6v{a<^ApRO(D=|-_}_>p)*mqgh7q|%TsnPot@w9bN z83>s-##`D5dwN5r=?Uf9?FD-ybW!M_)1!o_0T2?$_2H%;{6vvApe3Qb(Ffsrh&AH@ z`)Yf#g57Ne{p#3dtf_`23Doe0z^8`?Z-0qdn1t>9H+o=Q7ieBu@KluH0ekzx4G@xD zePHjNlzLkz&&GqZjfjhfIzfDIbd51x=}CaAQvl;nKcEt3wQO7}fK) z>Dq;6~(v9By&GW`xWG?+Wvl0bjc7(lo^+pzMVo73Nq(e>I zi2qU47p>(AS07Kh-tv|XS<3Qze8sb{%I*~5vMzpS@)9*^ydwSm06LRBN1 zhfrfg*knu{Vb3vF1Ful2Zovg}%tf$K+ZKhl&wz|xe=1&0|4NUH+$#3mp?v^>2`k<= zMITJ)g3^7Dd5xOl+We(8%cpSfsv`oZ1VisOfL2uxIV85Ani5 zG{R|7go6`74H$0SPAASzIY`(@FE&n(ocFOjo%fDh&e|-AL$8-huRqA8=Q7F5cu+1q zf5%IBKj#P9qTq_XtPo2Dlo&xy;U&1Kv~O;ao9MN~N}zE<;C2^k9(l!2I9H$dH7;81 zbL5%Gd1pgq0&+pWi_G9h%JV;0~zy4)?MJ^_Bm*V38}WfvgaN@t{_K1?@f>AG1b zV?IMTFR&9vF^{`IzGT55i9yiz;iHt(!_j2KZ>Y~bydyYioM2ZKD4&p~gf1qU+KgA{(;7hZ~+>2f2;o5)*Y@gn22Tiux}2X%Gaa&q)m-e z0>gphd@kN1XRZAi#`muHIjQS2ryHfGpc`#XdTCcZze$U@xX3d-@k|mz2O+YyzzhWk z``?8S*`S7wA+V$)0d&q`1;Ba|?lEzLgGZs2iUgOd_Dzr)Y_pwrZ)ZYF&cf#coswqIXu^Tfl{??lblc9B5ZZvU1k=U#)J2Q(bwx>X!Tv z)g6F$RQ0Y$yXr(;{NL`~9Zq$>#RQRz%w<1B?|$S|cW!ERmCL0%C}5+$gJAsMW6sC> z+W1`cBh|rJ3ltbo0{yik_Wwr79`648Imb(wc37A zEM(gH#9&?Vi`uQxk5yBAyy*nC$1CNQ(QA~ zbthWNZsxI=-=mn+WPbD{TGJsJyD0ZWXR%TVZ|4cLMT0ofvlAVan4~??i}-R<@1Z~J zi4Ik-_R+)gDi)rdy{dKg>OqhnwLwvnS23S-_Uape>QL)T>eYVMt0I~@Hu!6*7?weX!4mhAIE;d6U};Qt$wcEaY)|wJ3qnssxu{ba>6b zEW(}7T+12AXa0#ZgU<{tlgnS0;{xerfWP}`!H@6+^%Ls!se`vC>=pX(;Oo@K{3G-> zF3QTjFn@~h)qPs+S};0L-P|N2mP zB6`_$YR-ixZ1{@xQr}pf-kRTl`c4oH8Gf7Ery$G**HeNy_G*3j%=J<~fY+h^I{}iM zpCQ#_E8+RdUf6-3*_7^9Q$l56^$rji3YMt(;6(M6x$6|7dJV2$Vhmr?vAtZ(vAtb7 zw$~V5k<#7|AV@oHZ_gvN*J?_!H05LeO{ys;11+*1GloCfu|1%Pv}ZOdYllB$_|%m4 z@Z9SEq&iC@zRv*U zKWU#E?UPvfbH9~k7Wlk3ssjBiejA>|ZxCi^&-}4M-xU6e{~VQfR)6f>gEZLS8&b(u znb;{g4q7rc4X~HyH{^1i(5o1iqHrs>Zlt_4>;#MqGvfgVK3BoJae?EZ1AL}}zt(}j z<`^lz92cxSjz0lQ|GuLv_t9tvs!%~)qM&jJbqb;AK03vL`jLVfprEn{bt<5+(7fw- zm=P+DlMTxbeUsMfRXe##q`3lTl>8khNLbMl74Cj8=lm2eVp_Tz1wuGSmu6EMK$R-y z?o7)IBHwz7<|B(B0@$h8aXh@85y zT;u|wHOJ+So(g9Y_J2_9X~E-Ala8Il1^nJt0c^T2LQ*Lmioxwq4I?ZLHLTl+WSM>u zFKL87j7C9JJwK|2c1eKZXVIT)jKTg3;SdI#R&{_{pceWL*@c05F4!u2qpl*e3^SuI zYgEKFnC^D?Rl0e*V)q_QwEaBc*@*04?50o-f__i+?gEjyLc?d6dp%cWX^N(KwwN`Sw6EYS8BmSUuY3H(--HqGet+r0IzxF5I$h( zT{HswC_Q|b#5Q0VHVg~A8G5iY1EDbp3azbN6MRt+FU*Alu$SSGL;JRoxu1uBC z5+h=#7ZiJ|npPms2rnn@aJmsn_plj-(AXvy@S&d({xuY-#Zk@*x>?u{p5YcKHH9?o z71D2Bm}mCDXu2c=OfL}%kzSwjUWFuBpEW4!21REN&llJC@8MY;Q@Gz{2A1<)?ZU={ zr3m!ynb)5KHD3?A+~Cj!IVdaAYiD-1v~WzVxb6eJu~zoxf1!o`c$gFryrgX?!1m6- z8X;nu7Mu;aHh5Q&ISpJ<<|-1*UzX)H=K=FrA_MDvedd*dc^#481AOMZA)Jk|@bv-d zWgZ?5G|5{UB95?I18A;r&+@?dRH22sg0@iXk-O5$)q?!8N~A7rcU#*?PJ7(Bf&h)6 z2wdpk-)8EX(W;x>^>EO=3~%9O%2O7t1-twkFH@1g%QC*@zjoQ;YCi-Sic0v+RQDpjcdVYih$A;X%(mYj34U)4g8Gl2zQoRFTc<2;Yol1` z&Y$slwxmc%j_j_uFDRyfgG`{k2}2e)u>KkDDYz+s$k2`OvP!gpy;#O;co7M~N0Jeb zBpDy&8eYUyO9(MlfWwQHSwNM%h8HcDM{aHidty*(bjqDQ2#L|2)a8egN6*tE8Sj!5 z+LKvZaerev$ueXK4?XU&8$QE4%%$7hGVhn=Di3BnH(7s>eFAWKP#TSZIBZ0{aFjtP zZLo!suH;bvo+5{)W+mj1cC5%DJ%5FW94)j2$_+8x4^9CNPREWeG4r}=2-s)+u~Jqa zlqCv9wCNa-2eL22?ZEUzCgk2;fPzmRCBPW~=;{Dy3gC4Bs6r|M!-=|ICU}slG{E0D z(^h)+t2!T5Tif6g0w{Mo&H!7f>nCm@RHKEaf-ME4s+^e+`CTSdY>m#=6Mr zOV@lG&G{4b^sGab^Zhu4@pCP7Gbzk-O}*IHhkdgQd(b@FVv~_>-U3I)$Q;$%`%ww4 zQS(O`k)g*T4fX|_elWs+t7dk@b|K8oEWqsNYJ?{32_FXEmEc{mGtl(jT!GJi?$aZu zd&_ZfftIs;ts!OHZghQi{n zp<&0v7lI)|en||GSONGpNq-IO#OaN1h}(~v>T6Vl!*4}D&p@}SKYl{506#7@vI1E= zfki2gP==zUmdFC##2OQw;3b-8~t zLxu7;mhCFUL4#WGZCdO!FsiE1hYI!Z%V^{pP`5)PgTDkM_a){@OA(aY z1aQuUZ0!KNjMGT!qm_6Ud2+Ik7Q6)71h^Rpx4U(!!oy^M#eB$>_DqxZ%$aemKXxb5 z^tzeXUNggg^~^GA+=B=*Y+72(kE}v-Jj^p%?dgg$KFXwgKIQHqK<=)%$C~e7>QBtT z7kLjv4ESd)pi+2dI4tYrnG?EOJTo5gRUfKNmP8zyGvU((mamq@Rvg;(a(?70UjBWQA`7ISF>KsDU;MLeG~>unCNsSjovR zLvlI61zKI&oo9e9$=1V>XX91&ZYX$-r0DET;UC}={^)oX{LTiJa0j@-CVz5naCpV&l3!9w8uaIl}_3zo4zH>EIRhsu#=f0%Xqh1V8De zGcB?lN>5!cAE}WMz0JsvX|*NnAqZ}ZmJiswi58=E(z_TIgi4NRwOeVFjhw})6P!>~ zKt_p$I1m!!WpF(NYG|LX6}^eiNP9KUtB`GYdfOL{`S=vc(0)Wl8+sfsng{Z3Xm?2& zDHqLyF#z*E*CyF?yJjXv#$AX#@P^;U(3l3vnxTAj!n-|yLEXHEa3UaKm)YdRw?K+)$@Jz=D#wtwiHL! z`%l&pI+DM;Y*+lar2Z2ap_H4@~1ay;>k#ad20+t}e z1wFV1L`P4lX`-h)@SD=p+p$`;qX7C6QRY1aRh=D2@tvU8{}=r|6|Dx9PvJ%Nx3{E> zl*`fjyR47&cTSr0m-=>R{WV?aFL@fM89>yS_Ym~|tiJ=%YV`L!UQ~b2mXwilIa+^L zA^-)QsA)S)`f_2>S$}`m75$|R7N8@D8uK24{-5>t0<;?a9m$L8@6RMPly(fYf!m-P3s z@1(z66QaLEu*gaozb|0bw+a%C{VgGC%zFr0XyT9vt@gJt3@6JY59JZ$)iENE_LS{{ z1ma=DYCVJF2-=W;dASyxh8NXdpt4x(ay-}yKwpd<1kKzFv4Im8PvMH)vA;49z4Ss% z>Fc$~UNxnEz$x7evs5J-mF6EUZD(X^9s&%p!aIZotO(j==%&|PDsV=4fTiC27&ySO z9lmgjm-esdZw~anyxSKHB4PIt#U!s=HMv!4rknwktFpg^C{+D}(d#c0z5S&TuAZ z0mUS0wt_*iGy?#}#86(K*cLz=Lu@<98{bP_Z`2d(B8CAin#cV5}h z1k3-sED0gjhPxF8qape#7|7FDEuK)WCl|MD)OuAyfv&IGS^k$P7&?TQ#43~flZb~5 z?8J%c>SF?k3cWxoE5I)-Q}|V*2z8Ii#Lu{X__%*U zdaxWdLNg7mgQoynHQf@l@gXRdIQx&5guV=>3kNDu0-^BDw-k`*q0=N^jkr$o-&gr+ z0CkeTM&+yV&`JJ-Du3CL>aS4wic>pzzeMG)#QTgB!3q_TF<3#i@5d0p!3~5HwCey0 ziCQU_4ne-=PheA+xC=a)8tsc!f{Q`M?;{H#zt1OyJp>zpYCG;FwiXXZ!S{+&1)p0{ zd#Ctbtn%MJQvOVp-*TjUL*+-0ls`b_??(Pn@#O-X!dELmeWlYz7-4ruwElzfLg4@{ zTb<^wRr&n<61z)V1oU}}$0&qpGrnH|G4TG#K({EMJ^%vsh`&xkw1s*P#E(e5Q*m-h zD)s!cY|6!Fs?e2=k>YRfkvW)Spp!b=F^UQce-pLbm*A1_Q_E~s3n~8t){>=aS@Ofy z^544!V=X^yEd{EU`+wM4-u+Vg!V&cY(#5Z8S@y%$a-6D#qyGo&%knP-W0cwdX)W`d zQF+^uM&&3~3&j%;B?|I0ssCYhTJSF@MXD5E*O98+vP(L)@_*2=W1NoBfs9DGj^gH8 zRZE@KvCuW_7;Nf}21Ol$FT)UtZ3zvwUk8R497%budYcmYKg5Rrd$h4>r(l9|{Rgb& zSE?3H3_oBk2jbEfHM1nP`u-g&owfz%JGo?eGN|N&b8gVIfj4eRTTx})k zx6Xd);!D+G5&YO&sI6=uwglusabCk(Vaa6}O2)%@P=P4-(qbRr>EO+3fd1G%svdx!4&F|s@EV-|iPEM>lp~H={3<#W%!qUG zVKFab>|^wsp0Gl8E0iSxzFvyWxGiK)`S56Je-a9A)lPnKKo7reeZmfbaerP0%7#Bo zxHPB^gnNnV!7pK0`KU1!WI8~6=A-V7XqI70um_^*tau=tZXG@AbI_D~X8#$z0|N;@ z1$Bgoo{wYq%YKRNPS$}VpGD6h`K^#V?7`e2i_NI#;WqpG1SsW;s0&bn4h*t)5wznA zN8#{chn!(~W$YImvS!Ah?)KtlAgLAU^$T3ExVi8k7B|+pA&+91M(>Puq<=EL%5j#a zfyjb-4uJ40diWbXyvAL}r^uL0a~uxBOV@ECuna@^SU|@vB?gDiabn`e@oR^V}5TlRq4sQG4_HA{`Gc#UOw>&dr9d4EPWD4pV zya`@7c++J2b<%oGjHp_0G@E7?zmi!w7st893ai>Ep|r?E6yhz#W}2mYXJHL(&hDc< zIU8Fkn{mm*f4{=?eeu=byX|JsUBNB@X)94aKdO*EXAq8H4Z3HskF5hUu6`>ct#B>Z z>ssI}18E1d7rSSw%#B*$cqE~e5~~zWVycrTpOVzcA&!0JU$k8rkzu6m+gblB>cKAQ z88*AL?GSd!{z%wm7|oj9#$dl@1=QA8kVI+z3~%Y1@e}c*=U+Cf>^Kh&eDn-o z>SlPpAHh~LbaNglt|*Edo5om3T2xOR{@`(r{2Q&W}Z*?H+I>^F86Ea{ByyoI7$a1|xilcXQzKGe_GE zWKtawD(I}TwLju6r0&U9qYr{ZD+Ok9(Cv{dIOF?)zS2HJy%ls1LI>4~po}tsOw%mbXzFoYZ!1J>RfD=%^h3Ub~#2yA`?KL{ZD}$P)=r3?~r} z#^@kii$ZPlD`1R!FMeY3ctA+9g7g2%Fjq8^>k+8M!!GCXpqP0)>~dZ|eE(qn>xfR& zOJOcSxC*zYg9Vnr#0GCWN%aReqFt)y=D41Lv)z}!zwBdA`qw(dlDR?zL42uhD$zjx z^_Xjf7}&&(!T`cPXtfXGmxK0aHa{!%q!Y!0Rc`tCaLSh1U;o=%W?>>a_zy6-wTYni zpSW+giC}J<+C&h$6*q?Yj7T%=b}a|7Gye2CXS_MlKb;I`=u)>^=xd2YfKpI`p=RL{ zy}MCdZI}$o&!itMC)9|}@>-88BX!7kaMk!vFpFk9upc7}ui;zM0C81S;@oAJaS+!g zywndID0Mw!ampIEUCLIxEiH{Z=JnYM zsk_u&0Me5!XXvF{eWh=qf4!Qvqko^5)MFHY>re!t>j52fx8SKL{4En2Bq0y41GYh6 zTTo8|b|qkug&~0kN&fDXe4hCuW2cya2T^*q++gesuv0Q|b5W%4Lnt9_Y{c^rf;8Ur z_J7j=;>hshBd!NHH&x;ulc6vCNm2MvVc_G_3=Dfp2Yx%}ZD#hz*5ajN-u0%#Dfo<= zk{?^{^R&Cy>E}M9Mq<@r7F3|!y`doU&4NYGxJLlU6aHu5(-s}Cv>;QS5Nr611PU0? zM*1JtJG?gx?-@9(r2ppjd4Bf-Px#%y4h+<#vdiDVK&(W;+T9Ia`1J2ym-q6bV)uOe zjQz0@;~~Lm&`miaI4?lyUhHSQyWWelmhW!JYghznB~>n!Lk_$+a)>f*2GaDvN0pqO zDGwWW+}0 z*)Wb0!bLpk>EWUoc4-`er6@2u4prbNR~@a|2r$OM&d_ShaSO!*g73H`^oI{$A#%}Q zI?i0Plk(1M-b7dPP4vLuLk!~XnU?0yLh$Y_Ha}wHHd;>!q4wM0C09567~Q1(-K729 zrZgRzT;H^J@}{Ppleag0Jo!BcBUg9)7~KJuQ`{Y3IVIge=V&GAyo!akping2n-Aj% z?|Bf-{%t&e5}8|>fK9#ZOo#q79Yk%I1#8FwZ7-MMoWcX$DGSKHHrHy^+-N2Jci{2l zS?IDJJvF$)GroWz#d&J@Ed-||G)B@O(~G#deuHbteiM+3(vhU5x&i%6-z z2^X}JQd)IYQnDkzr9JpDYQU)3#Wi5m?2;O(2Q5SkhzneZ3tYY*n*N-ErVir~{lEC} zc1Xi`)M{&iFbqSg2v|X37*1P)sTF*k7=Iv7wkwUlMp_;(#c9M)c1fB-N(S?Cj11=G z6lX9mrzArL9(NZ?X)QzqBDV9lX+h4!&cNHZuDkzCd-SRC4$5Y`DoI~Y2&cZuaEq(q zrd)msuMR#_;Q8RGbg8#_!BeJs;ssAx>WLRT@$@Cs!wa5r)e|px>Z6``!Bb!LWDl^i zMTqHKkmk<;&MH#mg~H!*_7an^;4vkixO1iR*GMPMvWwG+v+R=eDNT@J>G(kgevyt} zBvXcQEFgM(vB)qRl_Iii83t5h%YIKKjwA=wvl*;C$)3jUU=ToqP&YH8|=^A99%4k_qm0pInkc) zDHJ`_WTEJ*tLllFjiQA$_<6fk7Zub4D*15Jl=@UwSvaX>f?zq}{E@t5~gL48E(-#fWPDxE6T&6Mh-Sxi2Z!{kFbNVeyJO(_Bn z5jhUMG52K!z(iRA3pZa-u$_@1%lN=|nOfjuG>(D>#tvxrc)%iVCD-`aQ-k(^&-xIF zc5m$I+@^iwBbaKQhDpK5R|_rwkCBw^dYZPgQH>dCv)42oytuvtSlK&+>jU5AX@Tn? zWWmwc9)lYS1-3y}20Bz_N{HC=$g$TiXwRZ|xi&chwpo$q>AsB1XN$5)fFRCUXiKze z#rljit(yDI5-nq{=V=!k9btB$8?#Z_k5HK^)1_cC33IK$ND+Y9|BellsbI1QQzS5l z(E*n8x&xD?U|^9Sm~pzm9JFAbbYQX-OfF%*TgUe7vtUdICRf4W&K-VvCuESPH? zm_7;ykt_oLlpXuTG*q{pzG*=+m!GD(S}IPBlA6)a)E4WA5pO@yEHRxoi{HkS#ze zzMP%);QCk%Ns$<@9re+QY>y}r*-DAX zR!T%Rl!&H7SD{gRuVSNiUd2Xzd=(l+`KewxlHSo=wwz;&4c1|Y%zq0X;|05?=pBO{ zy`#hY2+gDE;8j@0G+?MXTF58%JejKWKcCt^nIuJCXB#2Rm5E|7X{mV=Wvb~jgy|D$ zDH=)$vnbM1^pp@LQ>1Y+b+nZb=2WDm=qw>jtw>AJTtb*(k=9Os(L%*wEX)xQdyCy^ zU7yV}9jb^SNTAlTS za4j3=KQ+ORjSaJJKPnGB@5iAAi#&&0D<&6DC}ePzLC8$om<=`~;qWiTzrBU#y=AXyiQii70| ziR4`*Di)R}B$9cNsCZbOkVx)DqGDosLL%80iHeKm35n!iBq}xz+WMjv@(CNa%`fK( zg>A5}@q6gQAGGXiu$I5C>}xmDzIG$+Yd6xqcB8Vdc}tsp5qFRWC))ZEY--uA6^Qx6 z?i9`)hv3+iJuJJ*J}yfCQmiuZ-zNAc*{5K88W0wD7F1TPmLCWZO(Ow+RDZ&PS5y57 zSO44o1oX}I7Mu!2s6W+_P!?i{L_F9|zi;nQD1HHoB>G(}`y^652~cupf#;(73fq$~ z5F2Eh4wgi?s%zR^3>^pg5Of=x4#KgPEt=23ZCcHC_lnMy=*uq?qEJXsZv)6;@wDYm2ubE8gFpLT!0p$$|o3<6DTaE5B}QTL~Al z@}_#U=T?YY6~~LGSI+B;a}7Tq2z4&~d?9DI%$59 zrz|5r0Z{N&LKc31KP9kQhsIm}%C&D;{z_E=+>~l97@=`fH}7qc%d=6rtR*>$;0r^4 zXak@_GSrGRpK|TdAz4qK1`R9pUQ#X_mfJ7k?pq>-sj~wCtfoiDb6AOYlRSru0NlIq;F;N zcutJuYV1CRewX8VLe4_{lH++N^_oi5AHR>Xa}g0~RfQ-TKmDKfj~c7%i9|0A1il=hf2qz-Hb$#`0Kbq> z*uHiP^hs=b$bwoLAND=v)t2T|dG#M18H^Jz^-}-6;9%#_;5$TAg*iAX1(W6F7{eKL z_>vrDQyoI>L9HV=J2{l2e|n7SO`BG`P&$Y`AQnF*`(JJtXFh8AUuMcqMjR#%0g8h?W05HOsM+eka=F#irn-(Y+oN!( zba#J$@$&URJ&F0XVJ?OE?unQpa4S?y?Wxa1G zJCp%Kx+>66wkV?}%SjM>lu?uIBv1uG8M#h^*rkk`K2Czzri_}tPC`Q8xB>bG2nB>f z(iH&tg<;t-F@GXXuGu|tQ&V)}_NJDJ?>YU2EO*!7XQtSD{WU;hc16vU#vNDIz2CUw zvbw)F?(o%ZZro8+$K#aYRC{~&t4$wIf@5P692}GC8$X(NQbT%E>!gOJZ!56fzM>w3 zf)#u6cWaT~lcM+IEP4)}{o8eXBL}YDz^C2)C)xG{&Y6unZeE4YR=}9Lx$Fx)aIpLA z8MeNIaRqW5geBSqsK+-p?znc<%Xo%!0z`Pc0k^=WWJzjvA~n}eEw1T<*B;*{0DTFt z4t;?mqM>Y=Afiuj#k~9r9B({2jy?JNwaDW{ZfSF8*zN1km&}xkVAxZE;F~P` zTls)W!7!6;6f1sII9BNj$tqo8nfErD6-+9g@3-+hyBp0F@iUvCrMqVlt^c(V)-BY= zFc*bf3;PtWoZ0CcqcmxPsr52^h8c?fuP{2VBJCHuX8-~dDn_faV2KhHICK^~2M7sa zr)q;0}o(Q$XJ4&qiU3dj$=7jPbO3%NnJrPkE=vOWDt5)=@R`jbr>KOUorC)XO z0O^E&XdlpDoisv(2Gwea@ZP9SqF;3q{i>7bSFMJLdiUt`JB^g9+>jU$7R{1r_4t#R z4q@*ljydUJeVZ{))c2eaH4X;e=%sJ@N;eBpB&3g%j?lx20AvdNuma-;u~8gWymCWQ z+9_`X1`IRR&T!&l`XB|y%0al`e6?2rW*=&TGfmnyA#E$tio&~n5KS;{NSO63D0 zVAM%Oh(b-wefCR;UWw`bw-JVGgp?u?P6x|}C)^PDP@;5pqeKMv79o;`cc%f}0 z61G|iv+$N4(WkKb-BMaTc5kVe6uv;J+WFMeA!Q|h2GV_zQBzc0%rdEau?p$I+fhJ5 zsnBY#K{d*6h4Zpxx{jAAf0f{zxW}uh_6?5ABirYW;;>p*hOcrcD$!x3bOks7Utqf( zEdh1j&jfB**o|ZUsoAhGDe_h9aS4ajPDfdoeKYhIh_(=!R_t}Scj4Y+IjCr1!-H~A z(X@sKQ3zm(kOxr)V1bYaQ3POlkOxr$U~!NKTLCCr0{qQS^E>#PVd0N>v;5ug@5zB% z2t^?B5R1SIf)Q{zx{arS@hF|3?8#)5(Uoy3#|iO9e-re7%zsISoh1B}r{oK+FwXkCQ2qzC*ra@aA_Vy3!V_i zIo;Nhn%o!7sSNm3(T&|_D|{j!ST6x4*WLU;E?V^KRh%4<22B*NK44&Gz*8~s z35a6M4U(6w@&up7z^CL1)0F_9c&`LAgh2L1nJ7ir4xxC!XY0V8e}5b{rJCNS0gqoz zUTTSPgiX?Xi^agw#w!8lfr2=vkvr{VILGe1J0v;5rQ!3y=aXO^} z9+K_o-u(KqB}f%U7pE)uqyT73n?H@l^h(^Nk)O#TWy_Pj(hv(|VJ1>YDCaa2!O%>k z$WT6NCQea_B0%7=wwe7=5Bz_uPm4T-nsjn7S#>v(upF#*(m>m6h<-;tp*?672N5n) zQ70}l?OM`U8Qs&k&^?XI_i|k~lszgRdsLx(jH9#agMN(u-}ZQl7n@A|G1uo6XeF#Y zI4VfldvFLoj#zf$BcJ)0_S+-bd-T#KZ=@td{nX}<_V$nw*p6@8Lp0+Hlco>D{Ik_b2p^9MyjN(sQZykW2nE`z`j$E={tAEEa2s zsqop}Ci3W?luhJO*&5ctUMt3s`+tLP0PULZ2bySDOZgsPvOVO|i_8^b4>1sbkK-8U zW%DPGKaPEkf`TRqe6UdB@3F5!%pdM=l#5tg<&3`nv?zUV{E;O|R*!UmU|k1YKv_5J zp@XaPL2>1{Sj@mp7-EysR*5ksaxmD797dUDb8ys5#4m=$ zN?fo`;dy2<4{e`+Rq8zCjE^61{`pFaB<7#qXv+Uz<{ui_IjvwK;yS-)!5=fyZoL_I zORXT^fm^kj3B3fs-aVoWj#`Zu$DS7Rs=3%vX|=#P*lZZP zmpcwqkTCm3N1iH4nq3TMoMW!IUW~17Wv>=7sEs>tIAcd2Sb{q9>VnIsbd>;s% zQThmk%UmcF8$q+#-DtUiWdSSGPoL+?)LLSgmQ_KGKU!{Vv{=;oReNf+qt_2%zX6$O zzY#L!2_}=~qa=gaCY)E6(GJtMq_j`$JHYM_X1^i2?){PG{l%*h|dF0KB1mZgd4% zJ~t9_C7>4!Kw%O=TCxt@a@D%4_f+&L&iDmNN5*0|Z2(eY%||ETZf;I)6(lZg0crCu z0EF0p8XL+7uDdlCx%cKFr#Zy$ee*B=BUA3_k7F8y1Ygx6zd>OT99%D>kv);VjOn$f zu-T=dL3n_^2xI*w>WQ=X7|Fc@Dy}^@Xtf(4vy^#!J!;a!ZyNo-ur|VI)k@)fi%y^= z$Tj*m^QdeX3EC?8h<#?TN8inq$Pl{z)AX*-$if^S(F10dph5~7&q%o#BlGxFJhFh7 zl95wzDRB)$q(q4^3|a8^4?`CG{llg-9m?@sunfP(1@-tXzMuuaeJ{`hUm{-pK-uG* zHd~~7hou=t{+6<=*vUwNvxe=`aWv-sw~X{H!r?>N1uh7sS-67PgKTh`jO}N6DRKP` zGS>AJj>Yn}Z7lmVo@oQ?fwu0l3ec)$YDeA61(zf4T+;r1&+{LvB(8T#Jb!bcOwaRC zl}7a%X|m{key2*KmW?!7dq2NhrBT^Nnk>Pe->cH7b0bYw;?K9LG^*Z6lLh(nhg2Fh zaHMfv?s{HF6r2}^@X%$L8z@WrNxO&CDw4kh1OQP+e|*GINUBw@K#~`>sQxX?O!HQ? zR^noj`Zu`MBO@$QuaP~0bb;Emu9CGqez~^Cg=>3UxVEnXsa=g#AhiowAhiowAT`21 z9>G$m`!ZgP?uZkFV&A=mm%iP-2(R*C^#8YnSE1qVSs=558X@nCio7o=@~#o`zNpCik|OUKA@7Tdye}#8t`YLSsL1=0 zBJUa@?~97OFDde_5%RvM2bw|NRxNZn$cqk8zoQ{bMdPZWh@n}_(bB4Dcc+|Rm82bj zekuKZ1WTS8QUeYHQUeYHQX_jrP2(S~tSec#tnm+*)fF$SZ~TL=&RE#e_=lo8*FrDs z3qH7J8j`gHt3h#u6HXK!$wu=!QyF9j7OikvWG8-wgd-B%I-=Gdy)_`U|_Tgz;4Oet8(^APOHjk#X~U$Cf+H2>JT1WXzH~Ops6x0_mtg* z1_;sG=EKCW)FM*u9mxuT7=fJgM4Jv}&wL7kG28zX1Y>r^Q!PyxbWhP$_6c2O|D>zz zEt-8`T-^YF*+1bgOEUU^8X*1=Og}WF3HHxpWBRG#AXwsHqK$GEI+6PuV&r9ha;a@g z*%wp~kl~?jk-*?gBokOzrsfs@L&s+(leIPG4H_UBIOBj~RCnk5($M zs8UPr0uF!u_`873x{LX*;Jduw{B32+I0Lt!aU9!K=ChooN!8^@%HJQ}qZwP;>K}kM zxgjih;d?nLinaGv+`xcC>$GLCtZ>hW&!WK+IwvxfJ_%27uWRn3mN0W-Ul=ZDPMiwi zhRaL{H(ZJ#+!`RPy6|^u7i1w`y$iA+$GX54p*APfu~0Q(!R3vL^!&qG@M~19!qJQ*O=GduH~T*sJ9?YzCBvYO%a+Dchk34r8Acto~Xx`;oBE zVfW}Me*-vHaIdRZ@JkiVPOuDe#c&NN(n8Pj8A&dc7%L zv7P^7Xr)A!(RLw;J_{%@BCZ;H>LQoSQugbn5)%S|T47af?4Lcj)`unv@k_0sL^^xJ3|MsDtX zGQs1avEjO_a+gSw;`yETWY9Q4At`;JAfG>r*2H&{1ZFi4^shu#8f4!O;45(*mO2KH?b;6$FN_ThKtYPP|@nzfEt-M9k{UbT#unXBq~@vmy( zWyGqcLpjL7FEa3pbo`E3#kOJYFZ-*3&<0?L?zl9ooj6GooFRiM_`)GJca0^@T{RYn zEnk`rX7^nJ!ad&iZcsR<@7)mV6s(`cs_`Pd++w+fSHeS^EeM2NL6w48x$sGFDi=Np zHs#`wU7PAg9m9oCH>cf5^R^*>Nl{NS$50Ij* zrbrgVuc~~2)W`4fA-;L5gB-uwA31k2?5*gk_%FD&+{sgNt0`ebElwN<>NgRzd`rvOy@%UMymXwNIru6$-?l3g7rU^KQmB8XY$8T@~4>c=e7s3Fa)7+ zDf#1}{DH=${_!l? zz(NBjsR5$}40fYQ3zXE*CIXs;jhw}^Tr^6ss4+o{N(CYD0ZlNFL|B(qYSC(IEmYfw zKDAX_FL>=P>_P&95H5m-%sG2WLh#$y^L_pP`SBuq=JJ_2pP4iB zxy{UHn1&$>L0}bTF5p0Kn1=DoG|c0q6g0MMCFmokN$2$D@lNQ?^F$#E6-jDzIzs_D zZ10o!;diQ_H=ub450-Bgzf(U*BMQ?#$m3(i2Soyv5BSS0JeY+CvJ9Go^X(mIj#9lv zr!=$hnN@HyeLNDKD10Eof!@sfYoeexi4l79oG30$7}$yh)8}$jv`(xZK~lRJc9CVm z_)Y2U*@6aG0pHyRr_>)W5W1~2riS|1K3<48e2slvg{P48W`Z;tn*bNG=N5b}l{C+|U5W{Aj~cU=f8s*7N$H3$;FJ?Aktg?Aks>feXpzUii^^Avw9A z7m|~w_CkjRu@~^OGE^jFz835S^oh4Js;}~&RbA(}9Z;$MF%YGoC=rtG97Oj;y&p;h z_M=5s`>|h*XYHW_&wdE>8T4GwN65|-^<3;1VWF4ubUo6AKFX8z2n#)wr|JpizDZ2vJsTqsY{5;>ZZ3KBV* zlBOmWaZs5R{%Q)dk_!}OB~MlKep?UvJpvU?0<~i#&{hm>@6m6b52KyphK* z5Yxq6tVeUVdnxvwd`)Zc&ga2`J$>)SVhvKTzxB!&+uABC2E>)7^LT+50}LCK3Hc8R zIR=nOp*On2zvYHX`i`S>w}|gJK*P~f>`)1h1f}W>h(>TS4(HiEuEsP7p2p;|eO$?4 zPz3Q#Y*6Et{K-sw1V3;s_Lo)NF#vOLK;kth55V~yJm86L&wKDh7l8R*T+Ue;hMF&- zo|RC4Ce#Oj!Y(#6=EN)T`10lNV*PQ)+;5a&b8WJDZn*{w*a23ZV&x^Q+yvxsU2qS! z!bo+ExY0v-<}C1ax&pqI1iR}5uZm1_fkfD?sc>wU%s^7sZo%(7W3H*$tvJ>I68n-( zUTjJBYNl3)XXMLv_$3^I%C&gj6oQeOiuqzw-kos5wZ3Yf;%MVeYhTcez($dN@)GH% zE|Grv66t3yk=}HP^yW*Xw_GCq>?P9AT_XMbCDJckBK_he(pxW)-qv}#(n+$TYTftM zwpyiz7YwQKQ>|(ANR}I-QfpC`3`J__Hb==<_>A25%$4j|eq!_FUz1;R#HKhnIPl3m zW-n*H1$Hl+xEJSSV-5R_8Cy z%Gv1mWj%VrTs-GvOZuk6fq6T|cSCu5kFPHXWOUI?`QqOJHODsJ=dv*lb2 z_#SOG7oS2Mt%n0=cg*hv6VdmjiDEUjmq47wDhe>e5?v2-Ag5q+z&jZ?B+eb$qCAiN z)z8%xPrAvKec*{`B>J26&_e}DMViv#c+IugNm)J#KQWtqCUF4;9 zk=MVAytFRztXi@af7)QuA{)etB>#n#zJdblUzT$gYf^Q*2F(5|1{(rzVXzM@ z{r~xEM4tHnhQFr#Z}ZnmgTF5LuKX3l1ZKxh7;Ijg;IGrcd362?rkapi3BC#Dnu4cb zvwiz81;>VvnqL2;WV1s)Mu6<~eYbdy^5Gf$0&F%~-}H~JNL|k#FV;y?df;bj575TJ zQ#bo<*;DHMHtW=#4DAjKF5FJ0TQ8Vyy}vj!XGg$VxQA)GMcQtE@t~Zq{Jueto&qEO zyCwZ=GU9v8sIvf!m^FhcMc3?Q&0f^Ju^l_#1cn`8S`^z~>-Xh8dK@*sV@Y4fnr}%2 zdrk?n=P{UB(d_xgyEg}Jyw~Tq{q$}$H^o+g+FL12Zh4;itY@A%In%1bMJ>rsj*imsW z%27LRDzFg=st8!Y3FEJNSR@fed~U6GH#l35J%8$7UDwyxLv+otvW+~Er**tRs>{NZNxqpE2w6k#?UPQ76eI4J6Ln7-Cb z&DsbkvE?`q0aYhTlRL;k3gd1`_B7)$&YRa z63M$8iNq2A=)PkQ7vG+ck$$w*Kc*u0s0n$Yqkh}GS!eKT1n&?*%;iVAfeG{rGXeM- zBojdIJ)TtF1DGUQ3cP+>(&QtY?nlFFS|3+ZQ}K%V3!tp+HCqa5i{F|bx4I6BTA!Xt z&7!QOc*SFJ_g*qPvFq%luCr6R&Q9$*yMNc&XpFX4*VzSKXBT#zJ++Bg_XV2<7yR_@K-4gMV=!O@NN+*p2Ya0aqc!N|*W zMvf@clB&+a!m5#vE69-+Ru>M*gNsQ{4zR8``O;Ya?Q4h!|ITa2yj|-X)1!EIjJ^uq zo#)mzng!z~fB1jQuT#OV%dn*>hF6n|#_;Ov$g4HAWLGq=7WTk){F?U0Jk?VgpvfBT z93>gGs4$vQ<4{K|qef4$ByEn-S6wa@{5e^-H&(4pqRo*yk$H~6qFZN!%fj`4ccf4! zD&9NalGOVnq>@<=`EO=JD|&Ulc#irTpJ38!_ym*QP%55%_si#i^YWQ~MH!g%M0_TG zo``(qUtN)UP%`auvYm3UojkuSaq5{s;uLUh^VExA;b)>)c)&d60yy}^C=Px}=6wrH z8Q+z23tlbWpH{rLJR`~8BVN89?<<~Eo;LMJySE0GO*w$QIo||Ye{nzt`=YLXJ(`R36;fSVkj}FNI11 zN^en~`4Iqw#s;skTu58v@Z=?s%%{>t+;y?@pSC<~?QN#&0iA{wcz%WoI?zEZX8ZOg zsGh+#hbKGZR?oPjjnyX8T44}d-#C_BfPE!;O|CmRbI+8oZny3P@^2J3+d8a1RiYMYA3F##Li`g?@Pea9tQ#!XI?Bm z<;+OG&;tLe&3tteU)?0=^t)FB5Onm~Fdh9C!zgebB(yqETW}n+#@u=)u>Rg=G`YOT zl7P3MB^+BC6I&J&dod>Va!hP_OzgFo*vgpL>oKu6V`6W|#8$<`K8lHXV`Bc8SZz$~ z)0i06lA zLF^#P?w{B9rnrFb801z-=N`mfg}^>gxgrMI-FGeNUBT^i7TDHDS(FXah34AeaP5HS zQcqteq#d=yyihf5Zfmf?&g?8{@tEZSl@}eRwX}nHvY*0c3e9z{apn`aq5LF-E48E! zC!wCfu7L5lQ_Svyhp-k)c^bk_*B1YZ`7_PW`CqLVV4jS7>#@HDUxhW_RWJO(u6SGk&(nd;5t#D*`M7Gra}k3XiTSQI`I%dtxaPdM-eFqpsM_D7 zbO83RtiX!v)8qr*IRC5j`kE)iIof=Vwhd+9IN&EqS;!Mt4o|`OKB`B123s`&Z+9Jt z6tIsw87C7PqZ3c-iHD;ToAgA^ApL#zaV>h{>FC6BdSXj-;srhNLUdxQNHn`5Y36b0 zH6OfhV_9GLA2oULRf3Ph$lv6>KAfYo9zMa{#4)$$VBYqi|7O3kf^3%#Qn_fGy4 z4fEitN1hJAmLOk?1uXJmZ-FtBFY#%9yW1Sc;yei1p*Xj8_!`WYsp7SZvWhlvnhsSw z=zWv3P$wssK;ox~2?B}zo!3x*`5KmI$QsslR>Q;IM=pcFTZhdsvWp?H7AIcd{6Uv4 zAirQMOp7s>NMfx1XI>wCRc%>ohi^sU1Kd%Zrdz%H&ERQPEv0UiMSbr|qj3AkJW z`Usdyz#Apt^f2H967aty;NJ*%GXaw&;LTycQ4(;x1bm)=`2=i4Z{gMb!hk6fFkJ#Z zNx)kPD2_0?6OR1aXiR9PXv_sX(3l4acpCx5*+qAPKMc590&bOnc?7%%fFbNcPk1E^ z^|FNeyM(%$P}2xC4p4K$P>)HdMH1>X?A`A_L?|nuZVN+Alu-9cs7-__1Qf2YUHf0y zD)-i$*0yN*e!<~Y95b$MklnMfCGX(egt*4TWw`qQgra&)J5?ae$7J2py!hkHWnwuy=iK^ z?$Oq^tljJ4`C@h57;eA<@Dt|1*~#9(+2k=v5lpdzwDC${s#dj_m6{T4as;3ylFt>R zJOZ73%mBIxK$_>_#0U^>xg^MEVUQoN9#?>O3Rd#nuBlj-@r+5Lnp0?V=VPN^Lp*3} zWhcFXJAE4iXh4U)_y()c>UJwGlAXUBqF-&(Tqm7B-SD5&U zAg>TakA&CzL}@yok18z&yePd1+X#Wbfnaa}07VJfJ)=qhE&Y8jhf8!5@0(zh;045@ z#AtMKDm$6IoyI^^r(k_AjU9IfsxkU|+Nsp24!;#2o*kaX4p(Xp8|A~D9{dnVK(ci9 z_(+GaNxVK9x38l9y^Uu;8}x>ijl-r%a}*8$1H{${1RFBmGvib$zXXg?Etc7t_))6= zh+3<>&G0`s=b_-_o_K9mTP)z5&-2~)oAJq(JqY@#ykUi7(u0k^1Z8lz@-4)2sCcu) zUVgGE^351De6cwG7WKVw$umEAcZ^el=iq{44ZSu|Ou}4*IsO;+<$mP`pl5U64O{Tn zD%NTnk7KOzwYWvR^aDDuZ_-?FjRUCA4-9)ZS^$5-s0`x8nj}Z=+Br*!Wuw!5zXjrb zf7?29e-wywth4uTKUIv6_o8SXCyUK(77hb>lE*VwpE9mThz5#_DHCbGTW-*YV!;t` zJ1P-Vr$zKaFMYig_|e${kq+KY(FgN6&mE^d+>mn#(lhQ?5z_M`lHsACFb{!Qga!-J z!(Pl#UAEM!d6u}cTOF>$!FVyv`rb_-(xV?D;P;*SkswFc-Yv*c0n$K@9>W9VsODjk zqnN5n-&qB>5g?Uz>%^=&VPSYB`Trmn1C=)v%LD;!*qsik)~S)MBa6LHBwKkO_HWk0Yq zdR`rr<9lAaO0iQs05z`4Z+7HvpVMchscb#mXCIRNQ;n1Ef}+=A3KPSO=o-&94rXP7 z)%`aCTyW4;K5&?XVNa2bStN`_si_h$euwFZ({$LFKKc++tMH+6G~7fF-*5}3asM<1 zbcVVvKP?Rvx$_f0YXm172)c;~{eYd~ zg^^dNnOhyEFGo_ZX#c{B;r6RjU~I~T$LO=CeSx#HYL6D5ylBaTOQ5s!y<^?wS7(ebN&FC%YCUV& zo>FXc@v1L5n$NN=KMc+UpC$rfegxYAwz=T_*u_#a3Aqz+!$fPO0*GDcIRh$4KOA=U zj_4ev=4*Vm;1T>pTIj4fpj10Bqakx;SME2_JE*UrI~JNM+hEK{bivNrT{WGk-mNP6 zwJJ_eFGu54_%mFIC;u&$4u^!}8$>yyak^(F4pbo)IQOL43KgL7xka340f);l% zy}PVjb>|NwbP;R~)*=X1QaZ#J1*h;frY*zRrR@1K=?xseyI0jB3*OM=00t$LGiEI2 zu-F$fM*!}m_=RmSRfJYu`8l?=`N<|RjH`0u%B~5H#~VP6@aEVorJC+;D12t9?lEx2 zR53g+CD8gLFmz8$a?gTDC-DS!Y|@jz0fM*#9k8l<7LEXXycutl(rx_o*0!}ZT;-&X z*X+2mG$H`3_>lr5eE;TN`Nm3ga1>4=SlEH2_A9+W_lRLEMfW+26qcI ze*2K9H27aY4S+X?0c{fS6jUOB{TpE1EtS@IKi`f}MhHhP6CgeShn!p8+ST}|!;591 zPedWMYf-q|5R?ny>Z*hb!s90k{HqX- zIthaMmW1njT8Q_3e~$+g%;~81{aYPEi;F|ujiLLAdBW5%{9O|MVF{l@cvZr0#^-4K z5b#m-1h>E7Bqd&Nn{2z&c89ItcGY!Wbw8OTGFBs_>WRcSrTSk8(dlBU!~LYC@mq`& zh(Yc#iI9Mls^^fYxt~ay;IY1YtGoOlC~RA4LZcT@W8C(GDG&zBQkjv)jI|RyGY+OS z{)uA^no(6t&$54oGygilQ+_bjR&}$b>?NCftQjog*S3!=;_XL5SMa`W80xD05WO}k zRWtE@9qtL1aqf|WSoPjUP1HKp9NK|-@9HWvW|J(Q%lx^ zAD97av9!aF<(ZE~67yo@RpO(y!@oa1O1M4ByKK;<-*?&k_byZZGWnuEqwoRV;R>Vs z(`f(Y@Dc5A-yZi^(WTM#rSf||TnAQQw+w3!4}7BYJA98-WD9E|-}`?%p5+x7h4F14 zZ~wmb`TyJT%e_w=oSy%H%>{-$*_)qL7wyW3`+HWDCt05s?xmhi6Yiy+&f}x}=_2}< z>hBWnrCe#ky_74D?xpN5_*?N|5!x5)UrIWb>ge168*=3>@Gq(*;H_Rb26LEN9Imqt zIKwX;6g!_V$lwlw{-YLP0hpe_jyT{MZ1!V@KN02wNZf7!=L8Gr`$$`0=HTT02JkZ*^iryJ7bT|7A1p`YVRM{1tFGk0~D+leCc!>+v7e>5+VOZ**! zHvfXzQxbm{IC)oH;C!J^{{&y)6U0leDqyjhti{mxQ7BQCU(BE^znJ9_)kb$?)hW!< zYzRLzC1o+|4IH{_y~u|PI_5J?z##t6l*CB+?os6tjxI0otMZz0->1AkK5+gnZ-?)t zE;vjfOl;%oq0m2wuQ6f4Iri0@9@tV~!%ksdsL-N$7SS({?~u7_A2xlb)MHIN;U5#A za9^C+do7-jcCYGN<3z#*I6hboSj;AL>eV~D%e+U8yh`MWMgMrDFfgQ5)63r>VG688 zqPl@bG*3itHlk@Fnj@nrRuTKDk=Y=kKQ^LX5w#l8H0lgOq5-G%z<^VFVE9P{9#}e7dGBS*qYxut*?8o5^XUlHWc4h zk@=kY1AkRkTDhghf`P**(cw3rg`D=V2Fz0xJKRyeQ56yfQST-gE-kEa(iJ6Fe z-CaIU^x*#T`oNIgUk1`!acrsl7rXtokvQ0d*B1Y@BJ-kl? zahW=@W;5#(!sE!u$SL*MVmA%L#g={Nm*Ju*uLSzv`85mG%R;O#EC0q-jLtv4S8K}$R~P3HtP-9r#IyEWg9ZX+VG#XVw%2qgHE}ln(?&WjFeSyy2E{KyT^+mXp-heMUZ@?3=4e;m<_@mx{xAX?A(HpQ`Z@@vl0cXEM z1NMheiE6<9%QYaHKZquO*?9xXV;k^{-hkzL1OBNuz^^x8hu(ljy#eRGLj&3~2Z7Ev zmuo=u@E0Y%>bwCJu?=`uZ@>z@0sqn)5YQX&x!!=F-hlJpp#kk@fIz1!{#KmaehwsG zdJfEwZNO5!0e{jP@V4H7wR!_~>J2!gH{il$8bDM!%mJd)XBJ0+tbG$2d-P`Dceq zu|J|?-}W7_?=bp_OIM>mkR3C00`|@a&Q#07D9o4XnEyq`e3g#*dL8pGbK*>}ba7JFyIBqC}XlJ#mJy(vs+&CdkwYkt&{ZFJoUF81N$!aE+hH$~3N6W&J&G5MOLNhE(IDeDW1A8|i z5Nq#H-n$XXNY-uL0!Mc541CkND{ujN0yc$PKM8zEWg&UW^1u;l2}fGDAg>;&^+=sW zEimi0UeU#j0^ZIFBxB1Td<((<9h~XAU{j}BEKD0jC~VIP{{G#zX!|#-`S(0Oaib6^s3eE0rwdhm8b`x zh3Th8>e}sinPr#(t~8=~qQ)E}nkJ%IGOC*aer#lN09@7AYDB#vI>3l77twx3RG0yV zD2NJsKp#CYsy71C9`HRQ)Vrq<>f6Hz4M;FT>G4Krh{*^IZ$rC8Eu$_nBWiaBiI8%{soi#uUn`( z!Xz-sKf~gJNr2Bkz$C!uTQCXm8AuQXOZ0-L{0kiZw=L9h^n$x!D{%NrEpAl6=O6V7 zj`SeKkZJ!yWz1Wb02FVy7}#>`h)me67lxssxZ;MjD6zS?9#)0H9|Y=-H5ETs@uTOm z7Yb$lejS1HG&6i;p=7UH96a5mGo&iPY^G~4Ye7*~c&gXhibUwcVMdwo* z8|r}fHfbpkc+=pKM*~L~jg(h(!xl(8L&p}pCaeqpX(Reuzk3^|$F_mOz21hUdK+HT+ptP+!>4*1X!2njq=}(p z8>U$s{?i6bwR*pM3%EcYHI69V>n-@T-hw~tEr3N&V7yUp0j)i3fwV4k)`I2#c?&AP zdkY?lZ2`r5y#>$dE%=Mx0=Q8Y7}M-V%xUJ~QyLaJwm|N(`Og}W(qSSCJ7dR*%rgK{ zAK=Ld`oGc9he=hSDNUZ5h2}P*P4f<)(x%WJeONF$ox&ZZ+NddP?tt~@os*v#(O7Gi zO1;OI=_s$%QKq4jDAKUTCrvqgN<%_Bl!clXZOM>yr#Ne5XZDN;)}kEbGu^bC_<|_0 zrE|o86^r-+9dVei1!8~I5vOUBh|-kCC+#_i&&Jd5*+M*Eeiqwl9-`*baOAK)CgNQ;L z3kCuqTJ?z-jmdn{c*!R%W_;3Q!>2SDbcT4e!WM;iR|8xi9?fCJP`ShypB0OEwT?KA z`9zGyV?JrF{JR&Jgcpj1!G6$2bv>8RIN*i7_r$v2(Q~Jox#)lM#Bhs(`PN<)(dOEYFGo7L$MRqfX57Lz^h1svgpF!Z`)Y|PQI^O1C9)Z24k$B>-zsVGqmH; z-2!KJZ-y?BiuJ!g!36LWRNi{tI+4{og!e9yWB)P!Og3Z-nBX zLdE|tA&c0`qTi>1i_;qmagWx#ge~$4^>h8U1Ww{A^(Zp{?Ga)lz*QzH_QHQOwz+zG zPlf|^s`oelM7Pu2Hbh!8#-lMnp>RMh5*SDyDRYJt$mR~8A(|WMKeV?8zum|MP|FI( zcJmjH%C9A7IAMmC13hb0K73hLWZm1ncusmo`i7Rk@YYZLzVz8!;c0S6M*6N6*tUc+ zR2Rse@>!rfIk7vr^?E&~b~r#A8Nw5D8I3L;C;G- zS(BgBVhYTA#C~J9;@iqHJl?>l(CWab(ly1mf1Hu-Rf^{X;5Xp_EJwNi(W5N2{?+*w zTvs;S+!hF&xrn&|huh9o>vYReKMtUp?_xQU{(mg(#>@3=5XOt5kBZ*c{#Z76AQ~mc zOHY}QJ_HGX9Fi|uYmZ}lF(@!VvKO=)D(0x65*a3zK>F_e#dC&dr0;FvFxqP7_xsXc zoWJrb437R7Mi@19ExJUPPc1%KA0(f$nt&J}N!W+5TU4`4RHF|NS#Aw$TZ?jbIXZli zIEP2reNN5I#_))Y_u%eWd!e8g+%GG~h6OYOM;y#g#?Xv|Z%djnL5_t1zvOezWAyGb zWysa&ItGScCHrvHUk69gOF3RlIHWH&6@i{0oQMIF^}g*w;PIK-jkr1@^HJJ4hUb7N z%zLEZMsx9Pzsm4nD@JH*U`XjUbj5afb#kNEtrX)QcXhtR zNgna1|3iIzSFPh}v8&@KV}pd*6x(u3`1`ZGB4&^xofQiXn{oe~a?o)LvhT zU2VT(xF^Ec)%H8~c|AQ{Ozrg*AJ$&{ozWBbN8WEa#qh|Uw26a=I?cNSXU<@0fC~k` zTDenH3Bx+(gshU`DP~b6q=~yNewffLoLpu<3SuB(SfSt=waVKKyA17)TrW=gqJg;b z&{R4ZX9+QDIYeP29SMix2|q_Ocs)zvDkPY2Kl2n^K?Q{C0iHpfNJ#s1)PR7?T%&(H6L))gN%Se&)B0YV~0N;an2NL#r36 zBTgpyZMjHg5Z*vV>Rimi53opcF8Q7dfjtlJ!B{>R_@?Abbj8nhV=NEESOy8 zXdgxc ztg??s$!Fg)Khzo8X+4mBJ#vCc_&}1raTqk|{);p?a;49qPlXpFNF#4|@M6@)Yc3ik z3}cF{@5w=@)e%Ydl=5(3cU0#UO9kp-~gC_*vlNR=A+87{m+p1~nnp6bQ67 z0X(Rsp$9JikVe@L4P==a5Z=1ide zUO<9sEuoJjgvxs?6=`0?jJ#zc&xpMkhJ3jLA}KYr~MAc7WU*1Gy~>xw8Z0mobnHVaNj=ARA*K4~HRTKSuZG=@`hCFyw`H zb;5HZKMFg({+=K|DBeH02`-S1lI~xJ% z)WQFruewh9_nmKoTa(5_^+E5XH0Xf16)DsAK~~}Xi#RVB#V>J3)Eby7af^LZ=>#pe zsdOCgR$HqoZ=6(pM~6Jv#L}6^RWRvoIJWfX@y4;KS0M#KUU(Z1P~;g?aBOMRP2!l- zRl{*?$(+R_eoadCMoe99VZ>p_Z)HT%~I)4)^%EI7NwGii>OWy5d!w5&DJgo;B{<&3Zi#gzHH` zJ#AQNmh}{|p4-i$B6I-uc%%Fn97LBDpgZq$8r@j{K?dDG)k$4QivgN+&z9`TV2)JQOcqZ%}DU{ zec^h%wIuOr)wHE;uj+miO$6NP!eD>}^tgMe_&_dSVepf&ohJYCmz^djJ<)0MS)8Qn zr0!x!rJW>i_@1N7whTH#Yv` z+DJUAXHIX8BomE-&tv1;jz!{8#fISMrt!9@agEjExVT5*wcy6_09AS#d1b_8GD9GfUdV{}LPj z^Xrj#RJEVH(P{F*y^&-#;`-yU@&7(;igT+4RcE^?Rb@DWjI*{ibvEUBuhDY-6-K{c zW%kad+#)Nyl9HskQ8hBpFq8svJw=)8X+d zybh@0jzOT+HnD(jNO*6oL7v}%j1PLb77vi;vDF~A+j9lC`!VjIYwY(UL7o?d;cF!P z3le_(ZGb;>qkum@1n}Qx3i!!k_&X&00}_7Y2*AH6;WrTeT?v1E7(QLX|4hRFf$*%$DZop4ILaUFg2TMSS9NVw-Q$PZ-F@IY;gf3<;wHHJZ-Puf2SV`4w@aycU5twH zIPZ8%6663ol0AL1A#YD8F+UCNp!J03xkG8CiFB+0$-q~d7c*&LJ#D%03NdI1ypq?zQEe%X>cfjbs#1L%tPL)0O-jw}H*;P- z0UsVX?+@nzszZ5W9gYmAsqWk1Aw{#|B>yt280X3x>rr*Bh9+z7J5$jH0Nn{7yEP5s z5dAeEn1l+BYjl3D!EA!(EEHPJm(N9DE%)Em^ubGwDYO%)(1BAPj*h}RlY1`uRL!GH z1|i_V~c6CJ-61TIC0%dwRM&Fl&IPcmoh9%Y^H1QX@X`7zgco$h*EPDwZVBH zdhGy0&fIfK&29wL5vB0xTeF$LD%>6)r#ur+(E;H!gqxwbjaVjLqj(M>f-hAL3`!_gwoF`eU9~r!T{9h{3LUtEH=!>Fga(b@^|qC=8IoJs zVWO_)UHocEumsc~3q!fuT8ZblsJIt#_!4dcNf$jIx;u7%BVUn*Y2bjEi{Np9^P*&) zzaw7g&mvyxn=0a;@di~ITXEA0p{ZuVN0o+4XJ;wZ=Rnvspup;gipa%tC+h_Iq9C3v zBCgko{x4+f#S1Zl=4RE0)EIeunYES$aFYz)D8`A&j`0P|CaNZS1~XO5Dosn|t8kkP zhQkj*(OI!5jLsLGhYPCNadD*&@N*Z^Eydid-Sn@Ns^JDrj7xdyC}KZ#xIbnM8n#q& z%z{43=(iefN1bpT$X441EqrmsAbU07Y!Q%DWV_^Ai9d<}GEU}CMd@%xOjs;Ys0nrtDbzfP@CrFs_;!2cO z<6pLV@pM%AFDSzP9b6AyuDMIXR4xxh${$VTKte#V&>=dByAz-WiF+)=m_$z3W>U5^ zlwJ+*xg5X^d?BVK{x++*Ta=n&uo#^M_r;PO*-6;F!rfF$c`Yl(%q^hBb~K)x0N;%W zBMdyM#poab!}xm`#O?_|j{y)|6(Il!LF{GS*3YKkIpF6wwlc*KkXWYB^{@#OguBLPZ_v7YU0_zZwC_ zX>*)U5-FX2NqPqQwM-{kf|^LG2Kprlu~PkWD94;xl2lbovWIUlmy|@(vt@E_yFSuM zmUL>y;UV6M=W=3OFJ?S@KQ%Pz()7$L@RU?uCuuq}1XbHCGBvy%Jf#v5hwF$^-HRV5 zOxW-R_@#CtH?Fjp*JCe5h_iY*p709=!jly9e!*C!BO+6?KEpA+=*98LK{`FdOm*Ns zEsVH4(lfp!^9#`!zKowXUIQ)*hY}Z1l2u57-zwGfxS(SA-adTe&&ZuX#rV;cH*jsc zAYqS3NEkVto{@&K5&8vwA~QZRh#XjCwL!?Xka*8!TSX6HmRiE=^{PR?J_pHmxX!2% z61EsfY9{!^+t>I)8cCR->}tGxhoJB}xhu8K(gA-J4G10|3pHJb8XF6x#zKL3iI)xn zl;%3h?gagM+xjN)!P_~y;f&9qUtfTJk=I0I&DvDTny^Yx)`a_#LzH$2fr!8CN86jGlR1*3&jrMd?ui!P{wB*}R2Z@3!u))Cojm70&i#4)%yo>bh3KmpOy zcP#3GpRa|{2;8LCb0V0Fkm^ZTcqMd!5rg5x`X@NYui^e^(p~JgM`5*OB(Ya}o|D)w z#V&q&JZla zo0OWfXE;5Miz@Oa#7NUh^_)Sn@h5|ih>(!c*$p*+#l=AE^|jG=eQ~JFrg%CH z-mVStw2)y=M|g7W$y!r&mt>rpsTvJ@4QRiYbIl8sC~00SG0H?dAtPd zOfqPPtJdlI6bs#muS5;}T11$VPhltr@*tgYG6#&KOLw!Kd3$d5QU3*!8y+sir zCxi>%sjKCn`PB44e(RiUGm-))r)wAb zWTV4Xj|Y8On%1+f9K={-ypD>Haj?FcI7suPfBqY+uP&JPhW?Q2)Kw^Pd1Vs~#LIqWAYC@B3xq@02yrZeFGgyjXpHU;s1w|<9-f#B+!nxDIeWDW z-1Kf0xG6Q=VmkIDWVJAkXIp z3hz$p`}8vPah#LLKgs=#2&W-D2jMVfdqB@1WxI>v5{!0nwb4mXHpuMDr1n=LjTXX% zAQLOuw9L;z)1mT7fNU5BJ zj1~dBh$Kqbbwa~q<`Z?TLOP*@wSXjwCWN`TID^KrLLvYXred6FP%KGPow?@~&(C8> z7`PawyZ;i>Bb-gEbfpQ>_y<(;BC1Cz96EG?MBxO5=WEl1IyEOx*8Nf5+xHU7rCRcyC{ghal+ZoUX4C#%P6I z#~Clge5dK011@!Gjw?bd`v35OF|Phs>Jv4oOeJR{*6<#!V_cRg=4NNuhOWZkuEx8` zU3Q1QCtD%b7h^iwwE^9>2kuJ6dnfXOLl?ZzzVVg7%n|cVZp8(;D;x8QM{2CbN#%6Zo3KM3 zM*~Q}TSRw)C@qn86USd1hGUJLD=rNz62STv4vA^vqS@u)*DM=r8;Vt^+rE4|O@ z+C+O~IgN!gtkWM@>Ttiu*FlAp0;Ojpr0{)>+moS7n!bVY(JA%xiDlb~QYB#3%w6mv zu!BO_9Z{9C@D{v1lqUF>4%8k(+9O{VM9h)7QT0rcayrJPuY*e!KR10F z1(l~)1)HbrjdcKt%1>fGMhawfTywFnaV4_J*NadM{_{?B6<-FPyPUxxE>G&UaeIYet8|z zJ!YIS=sob-?iiTUd=_e?JzJ#5Z|Z)Gzoa%}9IgotXPhp*xd}lm)I7u*Fi0BEqhh3a z@l>99DTFbE*(mRbwU64GA8 zCc^UAL2*%q)&j|ut38E~1~wBCDvKE^%jSs6f*~SpP}pKldS0J(L{Of3-Tipfc#!j@ zkvKr(ft_kIf~pA%94#7HUISuE^&7Z5iX#7X5KcLlDVXm>9;eK5JlPOY5>HMYgCEi@ zwq$=y;nWG`MUO|maWTt7j)Mpw#w?z!0vNCvA{X*`C6bhCjob#8)X(J=3U-zgqI||G znW*-^DEppKYfRZBMkAoO*ZUvPAbH&A#2dtJ_I(7Ch1 z1O))Zieg$}XfSj@pda0Tpm@TN-U1C#8}YXIoJw^HLVQxf zqQacge`6~SCM;`!4lxV;i|;8eRqLVQ8VTkkIb4k*9So~Qb5RJqNOQai-SfG;woJek ziTl@j!1THr?N@4e%HQY!rDhCbMkgpWMW#sC{Tr=g*L{w*iQWrd4_IAa5c-ZcG$6uX z>mAqzs{vW8c&=5de+*csYX^jzGY->NQoRsXiR!vCFnBrQI-nHXFSt5U3GR#pX+MmN7Y)MRQuH^z5J#97LCS}< zcvk#0c$9oTW_%fM>oC5RB|gENBV!!QULWJ@ImRKUb)Z-Wzm1_-I}z0>)<%&dDVC4n z5?CBG2 zg`_@oqxfF%`c2`X5!Hna*QQhKk`}(fbX;?-g*w2A)qn)0+6v?}*D0X%vMl8?IGwSQ zDJ@qdH|#V)DYIY(Gvox!I;u@*7Yt*97)8%#TI%^R;Ky5E&6M;_bDh^fjF_c`iP6m% z82q5@7fw0PO+qn&g`aI9EYQT7uyLP(UKJ?`0Yo1`&zT;whv|_-4HRWMF#w&wA{2xr z2lzEgyvQm|8=#xXg>qoB7)eU?{UA5sf+lJ!1Y+Z6NqY;NuClqdCa+LPQcG6|tx-^v zVbn)fvQqae0$oU-B)vlK5UL24+KkDw7UnB_?|?IY5n*9!MC+i0xD&4ms~}@siNg%~ zlO-mR5aaU%3QEn22-$%t=Jz57;|u)czmFP8glJ?F94pw2DFk_o0^=!2eLvLYUs6r6 zh(a|)HLU^Ha^!AMs<{VKbH6JvlNyNX+71M9RgY%6=yZJ!)Gvx96D?jWsUkJ3fsA!$ zch7fa|3so*cB3vp=Mi4MEo^^L%d&J4KO@ROAaM>2f<3_~5lW$a7oktMJ{bJ!KSnX% zojovuqEd4T-O~x0KhX(>S~ld8Xa;>K8>a8|!G9XWsxJ87G8lXuL`!~&;LOX@FHEG0 z^dRZc(oS9q@q?3N$qIvB$ss}IL{L(-6!Z$piRd5kX@xOH>T_Mx_u^&hla}XIye5j4 z=Y#7b-`TM-W1XV&T&w9kCNFs4qbamPjpt_OjwhI^I8Vwx`Am&n~8 zoTrd*!Pgv_n=oe}%n1`TZJF)`Vs=%vT9n_2Wm*d^ljH?qYi()R+;m2*`lbaJxnz{D zcAcu)4@n>L0cD?+Vp6>E8Hw3)gf(hVSOip?aluT=^GJcPQh?j3>3zBPuqLxO=%L~X zE9qPF#R1wW17@8zV;ml_F9vN6UB@+7fem_J=Rhiix#O%$nyP&bXgFxNfKuLACi6h& zmW%f-)1@=Xy;RrwEOZle1u@52TPwvY zbg7SPk?|ktuj67RkP?MxVP*~NpU}`~`YrB5;=n=|=BTh{qaPVU%Zik0I%LSN1~Y(N z0z0w9yS4}_Hl48Qcu(R=Tvi032mQGZf5GCD+cWZx7oXgdk%zlv&-R0#5?r-&j>jHk z@I2!1pX)z~>*CYx1wK`A1&Wjc_k*xfhk1 zJ4tF?K{Z#f9^+fsaWyKJ6dN(ALp=?73AgmsaeDyTo5VSg7gst;bu~k(L}x=g9q;MC z5>sEN56XL25`XJ5{LcTOqiwwkVXkV8Sztl> z@t6UvPUxt3?r?bKQ9W%%coSucoQOO$20ipWo|VEIRy>|;U1>wYh)BY;m1crm5uIXT z3Sb0UE9T9gwlWdAqgjI#th&2`3B^$mRJ%_^;1eaV$zv*F+Xn{oaAxaQWdwoKsJ^wPmY!U znGSv<=W7}V=ES(t5t?f?Ub90rogD8OxfI(Gu@gtm@4(qdZsb(G+0j<3!p4fL7O|tK ztkDTsJrf(JS}EP&%o7ldjyp_EvOMS6O`M4%m)`10fnT=AlSX!DynLyd zc_WkX^<*3ehhyT2?cz!8;wkOoshq(hll+*$f{6AxOE_E`m%WTpHYk7 z7B^XVzZZ}r`-roHVaUXOsB^pijuqu`xFMF6E<~l~fqq-sWa+`4FX=xNl^&6=FJ4g^ zH@PkhnmE*NOPYKnTRC*SA{kG_IrMT|}?`13&uiWk0NK+o(-7fH+6C zIE+Oasb1_Bhb*MZZOhk-d?DsS^a$C-{c&Qu$myM|O>P&F);gu80cnxiq{R-4BuEuf znUC47FFNej3*PVq_QPYiQ(PNHAcWHpwjvxB*RIqvD6V~v;TU3#O#@+ZO_1v4n9gc@ zaV1Panj z$fcqLHrIxRlIdcfg)XE~Wi_(jmUcDM4(fLpeRl}r{IU=tVGd&58@1f+N)3&TTtCVUiSM9PH$yWH-VZ__CGj4vm(fb}IjH3x zM0JU`UgSvXx|ZQ6BC630NCE?icMC)g%tvrwN686CW)r^SNyGL>#tccVcMqCJ$+%|@{L-N}|YXI!^u-eT) z$Dk!)A@wFq2c4>kB{AgFQZSKi%RO95ZBpgs47buVIc5Up-b2 znhgFW>@-Wo_(Ql*Ejf;xzo~Q@Ow3Vo){)r?!hG5ow6;uGQ;uU(Bte$577%_|T3AWc zR(WyOhvQACuwX%Cl=?RLPB%tkkEz@kiP-|nD>g=6%fNK8D{?o#eQGG1y3}S?DwoU~ z8N~3$@fl(EovVt)ay4xC9{d@Hja~+|%{A85xL33gmQ0w?Zh8Q^z$u==kV6zIjlLf4 zMfEgo%Q*tmw!Z46Z)#Dh>0^@xR}v2>#%v?_tt`PQIIDD;&`~&?Ihl2NJF;P2^^Cll ztGnw@Uc|RS8#U8@M;lgLj%m3V-f}&h-3XP#;rfa%VMoK*g({VW9510X*iWTi)(ONQ zKybI?a+DHF;*W|Y@nQHZ$EmboN&HbRiHiu9#2=;2K}5uoIGev1McJ65@OU)#qnkGI zNQ>Tt9%ur@f*%^0TnT43#RtHovYGo&T;wu%6iA*9l4gK-0TPuD&8d}Z-5GE#z#3MF zY#<|gORj}$;L}(OKPA_~??sD)IJzl>Ukbw4Z=Z#kggKP3|Mk!XWq{6Z6#Jtbrcb%Z z3t_VeJx9^M)h}o>XNGS6$^-20V9kdG@nm}1g{~)kQo6|R|Ey&p*1g`M@38yFf z_gEbX+sFK?D{ipgv6&C;js~i4bp0f(bt#KP6b|=9)cmXjI?l(`JK*RCusbIo5Si9z zMfCXOIuT9$qu*DaT=2qjKI|{R#Z7tgv=9E{y5)@ zKu>YsF?AA@$-}1ZEbmgrCmK6|R8u4OofJ8EV|9RBHkIS0{rMXZG~@h@=E5w8ykcwH zMuP)ZE%#tQn4@iD*$%#bBKA{}v!`;~kr#tQ#Zje7Jj)g_8w1nJVU|aOdZV7*fTvPZ zz^OdF4&C6|EEHxNrYVN~tTC=8O z=LycTYT^VJ5I0T-Y!+(~PS+`C=2kE~EEc|OtpqHrJVFL@X70fLaa4zvQd8l+&CSfO z`lcasGdQ3Z5oGAQ=WUGXky9H*Gb9W@d?BgR+=`WwY$(tC8DkJ~ z+963m^1ss?BPrPv&MJl`wJ)l&VaQdJR7Bg38f+QTn*fP4|1c@&S!oha4s1;$svE>ChW4t;8wus^D~v>7o1`z`HwkXIePYX)n3GAOV^K0stop9AwS8K)SIz`uhv=VzFHz}nC%n?VJ4Ra$ zyxqKfzzofz@89u&V}$#Y;fSC(4a3FT`28an+Zr!6l0_#x5{924;VUG(@4OkeqYo7D z`wjzsqlCXH3_nuBJ0<+D34hl$0{)+b|AU0@6NXQe@B<|L_9WnQn}lCT_+Lr*uM!OW zzr@KI;C}=U;QwdB-zxFHjqsx+{6E6*uS@ukC43p-|3}t;E#Z4h`1xV@atZH|@cD$_ zBinm21o%A%MgNQs!)p@$ZVBI)@Y`hls|o*i2|q9l-(SKHk?`Lrfd7}Q|2e`}06yk> z$M}Qbu}{0ZANE=IQ5M}dfCFX6s!uEE74+53sw*45Fvu(@P%Bj{5=dcgNpg(1B?-&p zS98|hljsgozI^mrn(=?aEy2iyQ1qD8(~A4e-moN6u`eDLh}Io zH<9}&;8rk1ZWDFFmXX3KM8Pt~HQ1iVc(M0`3NT(km-t3GW*Av9j8T8*eZHyPWHSrRn6 zG7h^HZv?Bc^cTeYybb5pwG!z#WVAS*Y->L2T8-q*rp>v>73bRA)rv=o4(Lkv_#Ad; z5dwJ(6e0i&aIF7UPvUc!N_$&ypwOKtSP)Ef+9ogs!HgzgLc_eIf_GM}l|Cpu=C?@3 z($tbexLz5dj2h_M6dFOgp=vQ8O~kv!n(jor8#1BvEFp=m3-(sCG!dYg zf{}R4{gAf6U_@01uOa!Sph-UEGm2&fqL#zeZe%p&P2{E3~-4zlC*u2Ue!s^*qH3v5;A)4;F19O4)SUk48YOxF)p*HI^q zo_g#>oja`G##fvMV27ybDeRz0>PIMjp%FGC+so* z4Cxv+{&?&?ig3sTpN=1z{0IIb?TeS=S6l4GZ^7aQ{PtfA$+^(J1mbjQp|%9#^o+uS zB@m~l74~03p;~0Y4=}SJcQXsVf|qbTMa|t*_CGWeK$`X9B~WEnfyo&5NtNT9c$@Dor_UezECTy%cf~MAB$-v&De^yg8e}p~7{^kL2+(P+C#zUkv$}~QZ z)+o{r%QQZac37kxmuY+;jeZyHZ=RNEd?4+#NP`<>zJm{>wTQF}GK~+UT|k=oO(u!o zWb>OK2NoKu#Qucf0$3&UYwDHifo9rQ&S6um2WMq%?(RG-I|1Zd_;>j$(n!Yzt(C<5 zslFtY1~H#2Vg@mPNW{ixI7f^Jk)Dbe2dza?bSEP+&HRQa3uJt)VMg%b39q zUO{6g3nDoe0YE{wMgSmK>EI;7qQU4KA#fuAwHG!p1Yk>`)}kLY(zhYG|IHbumTZ!w z8S@x((C*rSv%n4*Y$-Jg;_dL{!TL)245E#LzGFp9Ad#nfMvE92-2nA2D2t#N%TcRg zG74dztWU)Xxu6mvM>jty)oWTvJZlZ&*$ZJwJbSomCAFmS?j-2r=gi6kt6#14{6acl zilvKA*BMYUm;j*P2_jdhmF#o6_9IbPQ@OYLBWooPlT=got<%*20I3xq0A>U*>_R{o zXUD`BGOMnUR`vqyDUycHv0x4dzeZJDIO2YRR2XF^ct&2Q#bf#3VOr-j?csJOC>Gd3 zq_4+gt4g`;9A-x1hYXHA2PgZ9AHo8f2uI4WgZ>}t&ILTG>gxL$2@-8|5*szPsH2TG zv}mK!N(yZg3_62n6fM=D6eCcjt#3bsq@p4Pk|^UiG+wG`twn1sty;x9)z)wc1P}#8 zLGcn5H8H4w2m!f#zyI3jOfmu6+V_2)@BQ*TnX}J6>+G}7K6|hGTAM`5wO1bM4c?Ju zkXl1NlXF! zLaz@=O&W}7gw+fj4Xck%6Cqbze2;=6S=;hJ>4Q{==+!&-7=5A*3q@~px*Xu#9lS?lx>yTB!! z+pLHRxu~M;I6)J{wQZfF9-aMhfY4KPv&uJvuogtBReFg%)EGS_TLtvgv;w9a3y35$ z#Ay*@p%?VU9m(}Du_NEHhBg~rOuUaCffH4Zr3OPmlYFx>Sctz{VuaRU6&#jkA->!d zzo+?!Z1@MP_}=J_XksmWLWkSh$u~&n#wZ=d=hF)Mq)i^|-LUAmJPl!hMMv3LfJKMd zS%5`P=d24BJyjPCi=JR5VbQNyNm%pe)Y?KUAVrC;=heW ziSHQ)ys&7U9^zxsiT1$%zp!YoaO&PzbiW^cF)VtG!iWDq#iC38Z7kXae=eLlzS6sH zO9t-P8+SS=d^Njc*bKbspmy#J)fHz7DEGR(VY=YU9|!ny*uTV=Q@$v^G*meyb@EIBeFEv>_n)rXkIHoEOIfT|B|>jjjlA-B1^wtY!= zon9PIPDUf&7+@ssYBaj6yAEqx2~P<6qcBJ+zzz+vYY4G}>{1!5TM4Y&ZZyuJ z=8Dor@)I`3YIkSL3TzZG&D{hJ$WA^Iw=NvdDD7x27K4J`4 z5>lD`n=7KS@RHMla!xt13iy!DRR0QV-#(gzHDdl6^{(Q7oKOS{R;99p9hfsKw+bY6 zx|Lg0MpQw(j!@TIun$w`r-$s%Jyy5Y;5q`f&gxbe9NbI$08>!->>IuGhX7%3mSE6V z&(!^#H`uv91a%uMjXi0cJs{9wxf(tOsOHA;V2$FyDR{ z%(qx#O_#|_H8*JTCSGhm+s*dPL@$(jd)30vN^NmZkS-}Xu3#ZFd2 zx{SQ)0|Q>79b#_GDCUPo;||L_oR%bj!&5&`0h=b=fa4nF=c>raDH;&55S0MP5kL&a z*z($4tj2*w;o2XEPz{_%8x7wmR!fZafP|1oy! z+G5RLedJ~+4{U6LlCWE3-`LHi8Yzo|4284)1$G0YT_#d$1{{SY<0s&-Tr{)-72Xu* z+{%Z!OqMKGm;9O#qI(yG>kivbi^}68<1884Bz;+G;8&$a)G~_^_ZfQg>{M^GE-s6x z7g`nzts9c+{arb4JY_H+d!f&Crmr5C%>9BEW9cj9;hPqK_BdeQekFX^ZP`#slu^XA?; z=ATY=-XaWx3fu8*{C43BI4Ab59P{j~g{K`?C{M+L$`Q}9!B4+R?^#?i`&Je_yRaPRgr{fe zFyd)0fwP|G(xm)?r`ath?T{WV-5#!A4kC?Vt|ohFjxcDr?q}ev_8vrBN&4Ql#vE1S z_3NuI^AfMif*pQ{NOzH@qoScUlnisFWFQ)VSRd>XP{WnX4=V3;oDTKHMk-Ho@s?1h zfyssQ22~D#VvzK34`l5VeB*20LxNAw1Ub@$r@cwmMWMvdConoVD0&hjTb!J6!iBw#nm;G27%}E3mzVo-^Ct_%!@D2S3%pe^2ngh#35P zF95%CnZXZA!1C-`y)|417CVFy3Y!9OMV{to_l!RI^pX=(T=4*o_5f0y6~IQXs4 z1OFD8i?I!#n}#3i;4gOYR|_^A$l^ZUX3 zZfvsm4LSI43H}GJ{YwOYj)R|{hHrH6D>*PeM+&~o!GA~a$2j<#((nxq{%!|<(Hp>* zJNW$tziElR@2E6~mP+f&W;g%S@MRAE90&i2`V(^SPYC{Q z2Y*N!KF`4)_=_F<5rTiuwZBO4 zbAWHiu-}__n|QGzc~p7ov>mk_2S_r_F-a?2D_3PN`HMVnz)#U!E?4rNH3)QhqyFDG;EnCgBpCU?N&?Izp3+`}(>(`Mt&FP-$X^fuM(wz2S@CKYiPiyS# z7xeU}^z>3Y8%a-x)6?Oc`JJ5Xit6M9S5zm*x}rMyV~T-(#8P&`VWA&EArh{CE01yl=ZS8`oRYJ3-eXHbeJpLU-ZDXSu}+^H{yA~@H! z934%x>nn|m_E~_fs7fq-1EU^Im0Nkb%INKRDDG3`rh=Gab!a=J8cGeK4mW3WSd#-M zJ;8?Ec0`Y0Fr1ggZ~tLl7O#fuhnO14`d>~*Wmc2chuF*OF0xCP6O*<%eBCsi47fZR zzHT<9z@=hsxONGr6{San>o*F5samYAl++GDzjF}ZN6G@*=;HhwH17;FJaq-Wd2Dcz zrwuN4m~(~gaA6uGxmWxexr_v{GW#>`XW6);cXBQkWr2$Yf<11fsh*S9^d$XOE*hL0 ziZmLrq_>f0;h7svUQrlJT_f*`?PLr7hd26FH{9a}*MF!n8XBH)WiJg)tv*T9j=Zd< zjy`V82PZzNNVM`z;^Z3QJ80}?_Ly)7y(yES=1(AUId*J0r7ZYfJzO2+SsCss^=cn!W6^XUdAd-5X z0Wp7N6WODqTk&t;<(BH6KFu261GdW}LS7?d*^W)wx!QX+$kSBDj^gM{Z}bOvq1k7M zI_sV+Ex3A0DCzHfBBCzWM=a@lB^k*o9-?n2DJFzZC?4GYOF3KXNsZ-%U{5&Q(w^$JFK>a-`-?0%U3l%b5!F?NA=G9 z-nHx?itFy;8e;GX#C3`@zEjtSgzFOUaq015^U%udI#cY_;awWTM7E*rQXm5V3c?4- zrpMqPC+;<|(L12k)Nfc-Nof|_wyV9D{Bz5gf@W5gkVIUbjeZpdSQGI68HYrgpgYAaF6+bQ8_xWN1x%7TZR6OFJIuy{d=doqBdE_v<*3$5MUa zw=YvtQ!>khR>o7gb7gzYBOFNjDEuiZbN?)c*?Um2B=OSLnszVrF?pFaDX~5abMVtY zR{I%hu;)wwX-5wf54|rVUMlxlaro+z+)qL)qN(hsq^f7<6Y?#iZ1NhU>GE#mm$GzR z-i`cHmUhd#QJOBVL3%CkM(Mb`25Gds8>QXy8l=neZj@fjYmgSpyHOgg+y;udH{wp2 z6C;zt?QQ+&mlk9*67gY5dSS>C`8JiEt0X;IvvDPJQ})s>>vQ4V)s@50FL4-weu={v z^h+H2O23ppY^EuZM$9xN(wLd1MCv=!DUsII;BEb;Oliax)Jq-K_8{mg*+c#q#VEAR zALEn*g;h?^6E+ojh6I)zVB{GRD0Tm_c6J^lP%8fdI}3>Q{+#(F@SCnE5*Tqsk-$S; zQ6%s{iUA3%l#75!pt)m|sSD1K2o)rIS#XZtUBxslG(!Qjuzw~kbl4bL7*eymZy}1Y zPf5rfO(umc!%yau9B^vs{=%EtH~K{KDkFlCX!0tsiXwoMSAl1YEaI2zCC}*Zds&=O zra}odC{Lsq@f|FDA~{nd2T=vJmM2=vEiCTcogyt!sGNCG*aL>9JuKvmD@X5~_Ae_u zi1p3mI38|hoyz9H&bqUP-xW8aDjtvV* zRq}4Qz8dDovhv^0H20)w`iw*;qt$8L=Q>#T=s2si?xV`^m-gSofnq4@wP0>W=NDXg zXu%KhZJw_br&^~YT{SCMViQx;% z%g?CPatKwd$DcJ*p;r}&5B4}xC5kN4{sArq6Q$gS8nZ=6K#dD9=o84L?#+!^7gHns*i8sRa%1;U{-r5#1t}+8XT#hT3d^H?; z7;R?(diar@1?b@n&V2N6nJWrCTvK_q3z;HVxf$z11DCC))5FmudS-hYv)Pi8Kt1}V8!~h|r*UP_! z4@S`>nxn#(dqO#-i975uljOh70WQDSb4bpo9BZ1j8=gUOz5Eue<}_Cgo9t~;QzD?_}@B7zOUd9ckt8G@Yg!{+Z_C7>d&1He&^G` zzyGB5r!ozHo`b*C!Iuj@=KAxT;GcBx#cBBC9sH>dzC!SxgTGVo*E#r()xrC2eZ=0k zmxDiA@Nox!nc&ZN@XcxXMGk%q2gYZI-uEmAKTz;rcknaQ@V7enKRNi91V7Zl=L&w? z6ZXDi)9{x#_$wUz0>Pi-;9vV2@QWRM|1|t}9Q+v$zNg^Nb?^@e{x%1nlZM~l!5{A6 zw=Dra{!$0O_Qha)UY=v)^F9aO_e{0FpM&=Vf2xC@orb^9!9VHXZ&Uj(xbNFr z@Vy-T#5DXb9sG3;zI`e1D;)fWxxlY^+{P!GhCkiGpYPzq(j?yI`tyw7|K#8c)9{Bm z_^&(oH>FvtbMQ9^{t5@*x+-|z_a1ihp9Al^wHf$7yZ($2{231ZsWki(4t}wNUnls# zIQYJTKit7jPs3m9;BRyAJv2TAu0K0jDy{Fc8wKmJG7W#8gTK_lf28&gbMVgz{z(U4 zoQ6N%!Jq2jUlIJ_4*pKTU+3UERtE38^&vO^9sC5rzwP>enc&ZN@XcxXMGk%q2gYZl z;MY6&fr9_KgP)m(ztzG2$-&12|E`1275uhGcLnlfhW>6g97pZg5cxs{tyAVCtI?mWowL}{;ozVUOeI8KJ zM5}k`9`f}jc1GD~4??pDZRgJ)Y33N>xLi3R*}kjM(#1F;jpjDruVXVc;rer_ z&^CSWOxv;g5n5vp{CN~gMf4f_>6Pe7wUebxJ5ELI066cx{h~y{)E8LAdAb{$AwIA@ zsNl|(vC{LR;rdU-fs8&4w-xyjjXteXg)#~wROnBkDw@bHs=azlR`ven$?T%`T>VOf zIxv)#^=AaCJvG{mP=lGNbBvAlin0eI7qEx>`FZi=MR>QK9f>7>RUAv6QKIH^1}Bdn zlB|B0c(oW_FfbbE1V13Ieb5SeNZjTKHbks#4|(B1YmHDNfS}ilCz@lmt*ULLmhQ4M zPOP>~6^f|!M2zQoIDN#GCgU{RQhykwUTvPHXw%c!zN?3hks;jP#HTzPS;u9s#Te(M z4r;nTJB#ghiNHa;uNF*DGH)vX92YwYRGq^5wFWIn43DQbChVL{N1gWPVRNzg?zr z#cPat7hf#r3)1Mz11t|7a({~PN!4LiHB_ZQY1*ubfSZOit7u_$A0?cK zIV)6n&lDctet3iL3o%`tLEa``(mBjn`x9&1}s?Lv8A_U3p3c><{pM9J%YEx(l&*KrB|o$I)S|IT$>!hh$wg?Hn>!(AM77YE(Nq4%{hRDeqP zbYC1zCG%u#R?Wd}zht7g@sc5&SIryThaDhyN4JshNrJY+TV}r61Lx`})|{k9;krFdC#W zLp}DPef&4PKD%R~9VFSq;p}gE+C;*b@RM5WCQhi5!mOgfMI+tB302aYRW!I@q?^n|>^7;-DjHlY(oLKIB`sP-g9}Bv$(+P) zlQONM!9^n7WL{#oNvBrP-~y3uGB>f?sL5`ycfLt#wrJ_g;rfvv}N+>bc$+)Ht4n!B9l+J^u& zHx2cqgKBnAYXtSYpl~x3cv(+<2v#bGGuKYAM zwY%`>b*fe37Qp6E=^4tf=HqK+n8A621OsQ>}j6-`m694#*a+Tm;I>aaH+_pRXn5fwKJ|Bvu)7XBYG>}KKr5xsBbQhr4Pm%#rGTmt_$a0&e1fDaG& z|7GF-PlW%+3IA)qn*jf_MKJikqR$)Jn`+yON{J@?k5g`rS{wEhBwn3w$!zaz5$V#rYhAqqjjAByyGzTE-F zDjGF=$qhyj+PyyOw5vJ;^c;qnJC%@eibhp2yRGmlGtFWm*Q^{10>e@34i(BQTD2Q} zkpJ+AAY4^$#Lj`3Dn}pWM@$tYfhtEI8GjT`X@KTehWLyMpfpZN6QcC45}3}2Q(&BqMAC_BvHQrtNv*2fG& z32tGV0+r!NXc3R4FhhkZG443uSLEX|%j_bvqx@x~wYz+OS-FqHM)}UN8yQ&37}{h> z$b1J`V}g9+d5eY~)B^%3H@8HQ#p&SUmm|H`*tq0otc*yyw?KOsFRS#SvL^lfJTlTiH2C~o$Im!VOk#QZ%%jo@p-`6r0>URY-mJJoPK@*8?F`JKrnJQ2rzA)O<9P!_IVtm#3k5!l=ud8lhLkxOuhZ)TAam1}ktc}tnDu3hVqJajk zVC>H9yu#jL=DmvyGMb!NIcZ2 zWb053C`PM-Ii~P^6*!W|wgU48jp#3OFz^u!j$|!}m2Rq8PYgx3Za6>L4v?Wt7kTkM zTVkPYI8Ece@}#_Cg@=^g9n4SK!-7W?J0esROHGzOVHbvZMYL#w;~)@Eou};qUV?%t z7j9^2JT(~|#Y4N|p^uey3l{=*m109Z76nT~J3PriJLwLv{h$#8_s4`j=`NxCNq2zk zsp(}k!uXT!0M}EqpVbKA54@vkzJnNe7rVRTNBd?x^lDtcZ^TMptGTc37~%V5&r$yf znD44PjrqQu1I%|HX$j94i!SDSU>fSX4(dz?H3Sv^+@k~~=KHWTRDpv!&Owz5>S&-~ zzOTO*=DTK=G;qkv8Tz+khST`()kN%(Gns&XoY0is=4L9Q$zK%4lEVuXIDCM4`B$Sd zXmzntgzOcx{TEOQAok<@Mslj?zUuO>L^Mun&?{;o+gYb9GOR2A}{n7`ErDY$v}!g z^fFNiQ35FXYzzFTrY$jWIdZUlf&jLX=JW4VA^~lm!XZ3uHoxHL1KqwAVli@y;A8R? zZ822a^QCgLZ37&hh1N^ox6;WYCjXcw%k*E8e@v4l{ny=-hD-l-clB@&{ny=>4Ig$L zqS2V)zW!?}m!ODKxrF>Pl}pG!Q>Fj%r2i6Y-QOu4YrfQW3v!0^i)>SrSkwo>`eVqP z$c1E|pQ?WvhEy)$h(?AafP3IvZC=&moku75zE1r>8F=M?RSaH1$`4!W3pt99f>OiFqRBmvUI#8(tl^b29 z4pizurNTyNQwJ(7h@@hM;zp>{fl7-ashA=2Ih8t4sY4#g|ESBpURBxC$;+POmA(+J zKTGWva3&jc!xI_}&rf5{gMEH5jpnp3_MJYO zdnL~9hk=rw%|0nRNvtcTbLp|VVmd#enxv5^S!1L_5+b5uhw&_8umzc`(!T6iiCa}s zA_X`oyb&(QRu^8L&93K?V_|)6<6T5;tBDI!{8Y&eak*bxUP@Hs#E>>tnHJU;!4E>i`IUO+hqxd zW&4i7_v<2mk=hQW8&ioLpsRlwC)JL*NfJy>0^kBZ;wmrnYScsuBg zYA%jNDwX}P$yvHutRJ#xuMgy{smNQikWGfWU6$@)!xA!ad!=vHtoL=Dg=pT0MjRz6 z3!Q48%^o?HjLh^#pt{pAp_^lnq&$0IE)Tq+$YI3uwDlS(0!B)r(=F?;?v0-5T8D}50PiI6M1gC4UuQs&E{nB$-@EsrWppmpM(D!yKm3^ zn}fep@aH=C1!?$s4t^B}+Fz;mCp!331%IrAzc~$mql3T4!9OARD;<0x90}<0)6Awe`fz!rO1L#`-h{8&GtH-_7Bgt zo_!!NdhKoha58B*DW4S??+44Xnm^C}5mhG1&|0&9kkxxZI*G+|JXy#9Rut<!VWkLBkX`f@0sXb z$fx;w&0k`P55@kErAK_+nuFSo<71ls1Ge3Sor*r>4Q(eTBp&+AWPln&vvi97LFSFg z$EemDtMrRRfRnhyu(|Q)!nuW*KmIE7M-)=q#r^>j%YG8{H8T5A5E@BD*hL1)U-KS$ zHnp*glkYX<(4CSyML{FiArHU;#rw2Nqs9oy2=ahXuFYc^LF7J;AmYzrKPg@zIxr*y z)ca|DThk7#5tuwi?rr~|s=NJzs_ym=s=C`hsOoP2psKt5gQ{-!kC9#NAF`CVfnWCFvh-+IwDNe~? z`9c<#Ydc(!3|K;@4l}62?m0ULvlRDYo9wn5;6W$;%EH{27>E-zMt={NnIBu|p zGpM?J{b~CJsn3nqp3NA<5+4BObOoLzDD*S)XWB1Z>Rv-bPjMHw`}T`xil%VRP>Ll1 zxuUScYA2o=#~w*gygr)Qej!+CTxoP#BJ6|x2~X|peAX8@Ml61+!16$iLoFK%eIO(< z5n0>J7FaXTM0^5woudY1#fYJLpeVCNACsA%ie}6N^bFlzq4{U1mw{cv>>A-&Aq`ZE2EnG0Tup`v;+{kTBJM;TV zHJsjBmpmdh5BWNK-c)1wcQ8TwH@onQfxbSo@16LDP&#rv32o%W>(^EP7y9{LWIy2( z%(1uN{1B-$Koz~I!G9;7=t%49TYY_fGp3n9A9Gb+(aj6zT`)2t9<1nAZCMV_Nf6n3 zgr3Z9kogHlQvNQIdvZ|MlM4w2JWoem&qfNcvqDe8Q?QnV8%R;lIwwNrg{T~aIu-Tw zX$|TQ=7MHKYGeqoH%ZVU`*dg-SFrCUOp!rdN7Jh# zi`f47s{Apg#-IxP1lWGA&h!>V*9)qbz3L3q7O;-GFuFF$uB+@PuLFG@rG*g^fmK}{3X zD#tUvh-JL>AJbj>`p)m&MSowrq0pGeVkB5!!F@=yc41*R&QUyK+ApTQaq?>f1?LHk z&ZOu8N8W zvODjvZDuPT$my)faWJ`^HMy=Pud^o4)f9Br6u6qg&YD73)4Q{#ceJ*N0L<2K{TOM& z-h>M=^>^p@R5Dxs(*6^0KNi@VXMw$W!)lEms%_diys>G^@Fh(fhOcT`KYX3|9XSAU z@UC@C5NcB~LD#w%-~~00li{*m@zdx-a%|>Z)FVCf@R?m0?VNe|&N9KG$#TuTa`|MH7c=*; zo>+A(kGZ!zvBdHEh0tE8A|xfToUNv7-!irzZRZ|EBU#G(^!PBpJoa!Ytp2rTNhF#a zimTG>X6B8}nrJf0be?JFFo1cz{1j0}hCuui!XVT(tr(x)`ZJOo{yh)~E#oz(De-XUeHm{!?Wl}v880{OYZ=!v-m8%J3YyIB@`>w@ z)QTbf0h~Gl;Z*3HCRwp23B2dS1g7e|Bsgc^7)_hvuIst=Gf;T?qd%@aCB4#5!*%^= zOtdTeQH|~RwZZc%0+5=w+VbpyJx`hyEZA>Nwgp?pfd$*~7Z&VDw_yLsT5WBZwrkHm z=)mX$EdL6}bUT_FYpSPa2@+GN+xBEVt@Ce$Z6 z^C^thc^3-9o?MncZQA`kA0)zn_$by2?Q%9n|T9suq;&_vfXdhB>I8IH-YwssRdIytIx_ z`}mZ-?f2m0mH+weyCX4_BHZ_QCVJYwpzZj61H4xCzH9d zhvXidR1k)5rsB(Vh0lcg=Pd9mR_^5KAb&7))On`GQk7&@Alu@5y$CMt@KTd|>p|?2 z^_@(5rq4k_M6&BjVqhT^%ljRsn;d;ap2Hi4>pnFVyx^p17sf88p)Z@f&aFzChjz4d zY4sy9((xyDoz$qknCqAvAl%O?=V{Zl(DFE&pq6%2Ki{gcX+P_>F9ez%b;>kQ(P%4P2Amf$9L+L3v>v-94p zt}ScF;y9ZB`m33gab>_CMB(hzh~H%?puKZ`PrWLy;15BYC~xDX@6~+Jb|}DZ1%~Th z)mrQFWy|=o_OZevzb`i;XxLsiCO_Htg;I*P*`n9?rEda~oZEi5^leNHrCSL# zD`_kfqNMtM>@j2p;Ul&mf|va~;?7CK$S01Ke#ZBOf-+?KzT+=%Ab+=a>1Jgy@lx83 zz`GP#pz~yF*Ap}romX~rJwY(&yt2FVB==Ik9IIW(=WD5Huumt3_N>&HpWw!*{SQ6M z_Wc`B5z%1JlRxDQt{cuLAwi=hBxvM|f&KigZWg|C<)L7Jf~LU179T#Nu0a1{#qV5iJ$P804yoIfG(cpfj-l?czXlvL@fi zWSo3MMRFuKfGpX5_&V5x@O3A7kO@nz70Zc)c7+&b+;5Y2B=9bu(NGGYAu)5w86pnx z)M(Jq>*a8bC~`X3lNH8oN&62v*OTlf{bSmM)9x}OADbpCH-82)S8k>~woJ-@^L z(%Q87f+fOtV27o4)U^#(tLqyLYv}rCmGkYsE$KT1*^zO^_8+viBd_2uU7K|BN_)-p zc>Ra$nZrCL*8Z<>H37aA((@kA3YvnQiI;v_)7GYZ4N+&A$3Pz{3*WM+Eb=^FH?i7x zT1?f}m{?HzPA~S|jct45tkl3JKA*V|zxs2hQ`^kZbZW=^%(tsQ_0w!2M^&CsKxI?& z`CE1Jz1ujE=y}W~4PZ$7dS0?Go)f#g0b}$z?Mr?|dWpol$R0Qa;^x!0h|hMEk*Ci2 zK{9J)4}c}omzz^o2WDPt_-Z>hVkRalXgu*suy4HlRbOOd^mQE9bKJo3Q;xevz0D5t zXs}Oj9<}+JWB$-DH0tfgyH$*Tu()i6K|FFzpFi}=A2pstt(g_%Z_TXuP4|ja_lo+= z3i`8{{=9%Lqs94ODB&!Gr|uBGQ2Tx-Er}(T#I~)Bu_GD!qT|>*n~A;>orls9m1UJL zh8VMU_uzO!_K}xsK8p5Pi9Xobv9IS8-6MN@{+5|7x->fY3JE1{~v#ngdI6F^u z_-X!UMM6pEb>J`Q%d02c(YDOCH1_$^% z2Y90Zw*qLK;WueDUF_}Cq~FW!H~J!!=5cn=V}<2_0SC(ut_>u~#JMW~J5 z=@swuS~NAz5c_u;I`O$U9Qa6_Hc;>iKih`p!snoJu^3}oG>;mlL^9tef z`Jac+wTA<(h2{&N7kA?G9EZZd1SM2dalFi#4>`@w^u(XhV&$G z`mSE%uWS$|pDlboV)kC#r(HAV)2?Umo^~zGdfHi<^RzQ#JSp{_-IeWh_=gcgpvX81u*aVqpKdTIEbuj%mna^ZKQ)O^{YY5d+6 z;P=LT;P*!1cXoMV()#$_&;ZUf!tXh0{Jt6-k3I5))$cYPxB5elTRH9?_11&Ms~3-Y z>yO2&Ul{c^7<^?K#~0*}dK(PxD#nBKvq9-#@VC-9zTg;3!|f_g2I*&00S3>60E1^j zkgYot;?tnQwXv}Lgw-n-mLIp8?Er=4qc+9C@^6Oga)sq{JJ)+BzTYl<|F!P;9(g){ zzfNpVN-rPVFEDJsy%XCzeg_scBy0~Kke?mW-Pw6y`Y%#&`2C3hzn}Z-02>eeFT=(c za{wD3aR-=e2>2aLwx|Y7Hs|uRK62O(jkCQCG1cd1bH&S!EWhZ-#>t}95Q1ta?`R1s zj=VG$LIj>-xQC!oC*=%IG{#d=xXAfvx_x1GWN$1#JMwzIQ-vGf@fq?X{0sDQ)1XA( zlcjmUc#M^+bV2nLc%+j)B-LWGBbL&}i9Sk2H|$YUi;Z{`4(_6*=zEq9PHd|P58hP~ z9ZMH4Wkq3!czD zf*+jNLdV}N51+XK%o5x4Ld~0Pe*=>E6Y+}Ej?lNL-Hv<_R*A%AwePiHbL-i3vEhw_ zUgs#5@6cR6g$hUh;i7Ek-1(MHPuR(6(~^%JU-&D4qKR#Mur$4Es#!h1>6+C`Iljno z9mjV$emH9Ur^RJkf|YuAzmrFe-_^YWj&5Nm#D1g3!_j3{fDbY&zz3NX;DgKxIJ&MC zO`ooQp=sCZ#bmPg`86dCsXI}sKNpGCn^@(w$=hj;nx^A6mEO4 zoGrl`TJw?D=Vj+*{DBNteKvyTHbq05*)w#xAiMFuKD(D1c;Mn}BC4~p7H600K={lT zE^Ws2&{6YX+vUz;c4abH*jK>FlX_)32Nrf2@qcqCx`q8AYr1vqe@VGjr^jZ-r%dPF zY@r`SE*Pi6>_bVh2Slnw;W0$S{hb>RG4Grh`!w1nRB~LAYfiaUgzKN-9D4+WJUhIgU6n0z}sq)xuYm&$KI**Ez* z;=CqLBg-#3KvpyIy3*7Af}(}G5J=Cv!_X1`p8P50i;>uG7lY;{h#PPb?8wD=3>e&B z6%M}gu#npTNpw_{+co)tEryjM+qR&dMTFSmLI{bgk#R9XPF7h2RYv8A!qTDSqX#Dz z#;UzBd&0w?{cQV)=KMaIQdo z33rN?F+UvOimmB(oz8Xfg_TlqmDxikvUj9d%}Ou%K+zPg$3plED=d>dkMI>$ z;kqAy)YOfMyw^sVzQ;y!gqsDdvzT+KL|q%F-9D%I)kAehv2z)X5YVppEbTQso9l$h zY2Om{(hCuO5cT8n)rE(t#P$$V^g0H8FY$>Si)IS66!A62r*m5|qeJ&QL1!AdPHIK< zOAi2%aGCEWkG&Quh=5h+)VMsO60a*x1All?yu`ZpZ#eodT`S`r!Gl#@(L9J_1|e`q zUp!nlkcZPWOqcT*@fgxpuIMIu3g4}V=r)DYGKhGP;65W=-yn@Qx;|}-q^}eWO@h1U zAuH_uI)A^ERz0^v4Jd6i{#V`r7G$_?lwLdhP`h>&)f2VVbq*hwy3dsGw#Dk6DJr8* zeNYqX-A0Zm2EOaR9}kzGD|!KYd&M0qG$4lmH_vsR-k(MXJY&;3T<2+>Fs+))zZ;2W9VI}BS_bjVnJG0%=BjMM? zHg>VZi{WY7pUZu}uJ9^X{5o4VS4_`#XF2XH$DQT6vs`zU=g#DO%zXv!tiYWWx-ka=6AM{*JqD}md<}a_8!7mb&lvj~bq*9)jQlwJeu#_T|@gKmJf-bQexL?lsSBt<;V7d9eKZLZ4WKr|N$NL^x>-=tIm}K&-RGd5 zbWqC#bqi2%>b-vkLGN|RzVp4Ub!7ZQ`{=y^X%gy$juqZu9oJtwShiRBCrC^Me& zivOIaW$GxiYQptGRNO{Rv#TKJ+-B+A2~kyKKlo3#9nyBaf7`ZltEst08jN^yP{eMT zXHSBNlYh!ra;&P(7ZE2RSWrup)Yy2yA4^-UkcUkjIize}@_!&@fy=8YQ zZV|H8tmB@erv)G6VsWdfL&tMqsQ-F3gE)^tY`YOYEauFr11YK-C8nFV9AE6AOr>H{ zs)RYm(zpIm7Iuppjxv=ST{)^-hsOMEDRbh<@nx~(&!XkY1L3(3QKlNh%m7s?u5=c6 zi5Lhu3wh48W3^}&`qXONg~Ii}R$VIl?;O^UVSRHfHz0MKv)mv5_*-Fikbz6DNVT-R*=ZXT^E4Ra#_@)8v@DI4sq? zxA!|~YwOI>yDaT{Wm_T2O2pdmj4%ZYE!*D;*fV!&uHgX!FSD7V9*#=L()5dOh|X)- zPs3AIk?4T8Rxuh+>?BKWhxOA-^=;BiilZQ;T?C_n)Aj+~_-ARE`kN8UAFxoH0{&(J z>-(SgkCwg~uKR`h03M!V7qB^i`oE2SBcSU>SKZiqA%%6&$x5#jV(Y$=D1h#*#!GEa zjz02q&L%|s-5wQ`RMxhPs=fLLiR}v1{d(d}g|KXHJ#Ex31h{tz>mNYgXIJ+v)e*)N zoFCooJlySk&&8SWA9p){GkwlZks@FkQunBZT#~X1&%0RZ2Ins&E;R~)S!>MIC-KB? zX3WP*{LAogN%)pPPK3$CtF5&(;DtVwjM?6HpP@+BO+5qXBBRAvHS#zuqM_0vjuZQd zI50*LHkZM~%_x)HqSlL$^5An3IT3M4AWVpG&dPif^l^A@WiMDK)0^*&JCR zZ=%Q{DhXEWWk>{!^D_MOVu|NO&owh*eOAX)6RR{mClcaVrTUH3pG15Cq}PuVqf!il z_~oZKw)by{5_O$N(WbNy#S|>0ij)cZW;s@TJvzX3KU{Z!-kFx{Hj9w=d&yIZ)fDC} zE=MS88R>r4Eh?YV+xUUr0(zKpAtpVK4c4(Y^SN@`f0X721YDvPWH;tZYO(d#Ihq(c zXE-b5xY#I1wXaT#)xkWZPS2t$TqhSJhVg@FVvU-#QKfU!tto2BfUY7FW2o3Wd!!~G zuf7SG*DM3QjEj$ z(bb_88DC>D-EDktXyq-&pUJtFem3Yf!YuQx0d(TxrSKxt+1ne zH2m+mo&4`C-6{SzSTT1N%~loC4pq(E=Y<|~TscRztA0NWhy){4=9O-I>FhQ=aXME# zG!U$S5=kUebPfT7Y~!S^mVH3Iic}T!u31H3^)_NiS8&u@=_wjnb%>EYQ8TH^2GdPp z$z@}48`hE@CzlY0u@TsC-LW=|K+~Xshi$q20V2g~XnA*F+=+^ccLhDyXX z@g`~)au{^s*N2h*R39@yGtU%F(THfVFm>ah?Vj5wH%fwXkKVL$HC$IgZ&mDKn+aI|{#S**ZcGf#A`B7>%l~lG;Hjv1I@?;{mlpV? z26tTAh!o%G36Jqgy?HFfrf}zjvPT zh-YztN8DeEo~+|R)J)HwN2H++cTmL+YSlQP8sQ|3wg2Rbpsh_mvbOefP@!J{wL(y0 z?U$#amN}@`IPk=c=;G(D1Pa!^d?c)W-wVtmJw5;TVNX07tvm@dP|=kn4~eFZ9R5R| zJ0SZ-@+QNakx7@VGUt`W;agdWjh<2>%ORJ1&Jm{Pi!Lp)ZRP45Qu=WbRTQN9tTL4O zx@&%%Ot|%5eo%4SB^LLO!1;5R5{^0WIkW`_LYSPeRu|Ai+4)I$EWCX*p>@dmq31wB}dA? za&4@~U52=mmun0*dOha2)5)L{lu*=lcy2E%xubQiUj*H}>_6eKbYx>z)AlBQSX1@d z`wQM#!Z6NiJMxDWVQx5oUhBvo%PlkS(6i_k%Wi(t2O}Hth9?bm(+49O7ry<|+qtW0 zGrsH{#dK0qboPDbd+H8Fk9<2r{DQ{rvZO!bpW{KTm$Us0H+CXy#Rd!{_-^7G6(n?2U`4<0N(}BR@|S` zP=9n#PdKPs@kg2ao}eU5PfbJp%0V?as7kfuu@=Bk>T5aZac%)C6S$GmhiD0hJ8en}=&oV+`XPM>M44-2R7)U40~q z95EA&A6$xr>mP8JAQp9wEAsDRHs7dAkfAbvN-OKQx{xXC>hWd%PJ;LA4shITo{=?q zg^GGy*5oEjuSBo%Bq>HFKkn|rn;#GUG6_nBG#pKqMb(cyDn7VMXHg+IK;=pM8V4+)rbp7c^}rOzX3ei{y00to&Zy;Owk=ga*gnG<}xutLnJ9 zlF1C!4$t^6sn7dt#B45QvDS*7Dc&hOL#e1@sjFdnCl!tL-Opl)Z75gCq})-FSVv4@ zJn!eAhHF~{y1I5I|9J56!DS} zpuPZ}z2>|t9cGn5HAIl31HGSBslnhyHVKB5c$cKoqo z@bu(l;>*YD-eczuRHBLRMa zKcw1B`!imj>CZTi_^ap5pRpPv3NG!@6#A;rJh{8UFVmCU8-s?BBVwg5)wBo5!qB4U zCo9mq_LvYhZ=NM>zqNNPWBBEI#7n9`cw;{wAb5X!Q~Y*DtPq);L5Tpa!0TM2fESN{ zjZ(L3`1JVKC}mtLxZb~1Fr(|0C=ra>d^{mvkJI(Kq%$hG-j^@*>N9Y(Mn#>bC-kpi zde9b)c0NTx%%#g}-e~$HjX4)DoHy~r#_?ZLuuA)nZ2UAgpJFUeSZBZw0u%&3n!!oaXL&x}wRM{Gu`GSAl;k-}jXp9Grh!chg&>iVI*7vFGJM$d zSxr0h^{X;Imy{J8H*7kwa+F&r^G-YS3#Kz3Bm`(1zoeq{^>At>gxIi1zs;mO2fL z!2pnd$)`3J+V^e;&T{3o>)G1tot^ByD*K{QLyf;}i<^ClFpaHLsX0V##pk82=&jFE z2w%8D$aIR}Q|IT#5=w~K5lfws%cW+mYc1ti%6qa(N;a2=j~Vm5=f4+xOup>JoEY_R z_ELx1%zowQhm;1wa@Z?H4h(521|-m=r0GXMy;z+h7D9OzEb^F93i?-(1>iXoPiyJmkEWx)+zZOYF@=M>u@# zM|(}i_mk2Pd$9FcEQ(RW#~dA@gOCAx0@I;IPvRwQTHs2^8);_P8@aV149s4ON~cAm z(hbbyo^YLUxjr&yZ(r%dbf@-f;0>%jZU712;J*%-Cm?3pQ8mGIG@{^kRB8WVm-(k2 zO;^t{NrJMhXw>0-)Ip;HT}VO|gCEd>aH_+|fZt9hB`ARs(9VQhp9~~;OhfelJ^qiD zF8A?2Y5sm~sK0YP?i2ruX@J017xn)I{c!l6DTxSQAPY|qGmURs5!{~h#TSxq8d zzuwH*)#dbU2J}D(Acygo{zd44NXx&V2Qa~!&4is@(VB&arae`Q29>>=jv@vpmIn9( zWHObLOkzOldLfNQ%~R0>(E;p5W)FyrzraX>!`co13BF$D@U`JZ@U_q-tL|-(Fp9Ts z`k;N{>n8!NNW6;1J(^f7{o&YXpEc1`_NiKhT3smO!K7nwPf3yeAEYaPWFMWzQhy;SrPfz+q*lovSr;SU_|3ZC7-zO^ufprT# zvZgzHc=2ZE_FSd}9r#!{Rd`_Lvltnp-;RHA?YGdF^rtX@=G-x*v+*h#S4wt=pcMaP zF)@}=PmK#y^asldHaC1G1OCVKnVsAv9+UX3&`+!@*VKw6VTO5FUqY+}fy%{;CY;Le zV$Gk%INAheOV+8KvxRXHdS;07v_>^#QoX8$G`A2-WglfJBE;YQH~57zy(Z({sFzFg z3zf{g6aQQM!V7NxX`&bh&E$Itl#27iCCXngJoj4T53Wt~2TyX<*n(V1rEZ$6Aw?_R z@z{KJ$NRj&D}Tp$g!Ig_X_h(ja5u@AbhE$Bq)X4YvF#@cDM$koOg1UeqyF?7YM(72 zxBG2${!YxSF%nRKV@{o{{mXACnxYr$oKj|8vsUhPi=%TYRj0Hn1n(u>MKx6=vrC@# zBq@VVUFJj=<3w!oL<#eaTyv{Gd7jFgJm1#j!Cy)9XUdsw{xtUMI)BD?pFc66K`@

-=x@J%wca@0kZ5VgjSD0cg~vP7Oc?cEU`{R~Ocp4OF=9`45FPTyy%&#Q`=_ zf7OteSgvtmCJDK)&Ov?cKQn}!=AZTPO3XOr3+=o#_{#jLD%lQJs{?Mu__G$;RF7<- z38h`8jS{eaZnT&@HH06RLus-Hs#9atDZ7C!#f-~BlTAH{S&}iFBM36DE4`np2^g#G zfcrt*nsBhs{UoWCeLpncRRr?I{|ou|1@S*yk$+PJ$vOro_`ZAdKQDKafBh|mc|#Iq z;o3T(H@RDk_gU5U8(y;8_kmGN?7(~2K{U|*uv^peFS~7ZkudFATK;9*t?t*-@-O>u zWnL?|HIP|v>v{!CH}da`#_v>rCzh)I4X?W?ni{4f`L!=me+n9cdvDCRSHl154E|;T zKI|Z*V~~dZJb#T|2U-1hq3uP9V}WX*G7?jaU!?wQG}yuBI04E)HLN`3KBjcqYx} zio_=h#z7aUK-GLpnc{S?IO_oys9|#^lu1v3NppRB4qABf)K+6~7w*kf&?G}+Dps5yjY&Am`iGD|E>L(O+kFFL5d3zd=D8CoR&!W7|aUu!37_qb{56@1(Z?#r6IT*WC?oZrP7;rciE zA+ZDYpyb&8&L)9p0O^JxvrfkMl4b5fa$J<98yyxM{i_`EmJ27c1YfLu$7~R*qN5kY zM{jO28$`bI=RvBR?~D=o9&3MdWux(KO5-5zhgWN*4N>D+97PPr>AhR*1U00PNFyiV z8Q8A~5Ra`h_Y~4_i7LxJu_B(jpf_y9$MSW_0yb`G16yJ#y6wF8 z5QiLLj-@8uKphxjUHhm`dTIXM?)}7CjH3}k=R5W7c(?d3ivLBGUFBT@Bf>Ui+a9Y< zhPHSE?zAr%uFEog1VkN0%BukbB+ThfS7<;t+Nrx)%AZO#4WZ&i^prrwF8;*&lz3k5 z?^s~O8?EK$_OpF?$!Y1un=`~vfal3qry|Cw2b?w^uamUiXQ_HD?G4G$=s0Va?`mcrl@OC zHYf}~R4snpS7`Ct_9jY(@VwTSbZosQe8uO9p5~@*^0iUdXj(}m`s{Vu2S|7p==eVv zzBFAH40eJpS5>oduNv$H-C(d8bgRK`kWrI+eb)*U$oPw}lY#t2Ao20~J=MpdHo3vn zCg4B%n>#(@4kZD3q2_&kuSyoWV+s7#?psu zAl((MFO@O_;O-sR@AjlA-F~J|dl8$Jb@cAJ3-?phF=6?ZZfwLoR8&sn0Gf z5(|Bz$z%uu78pTi1EYsPg~T}h#sw`sn{+JZJDrPjv7c1HRjq)dNR4NxuDBD`edTMQIvo5kJAT;^NLnAz`dcugkMHQS@Y~8`9e&e7 zDiMO1&7&gwL~lye1Cg&Be@US4>HXi<_sD2!_CR3)JzmIxg*OfSgfo3N{I^g1bUvV< zCDgzu>BE@ge3p*)fjd)1R;en=jDw5!c?km@ti8`gXdfJxxF+?}syHE1N~0^yY&11# zDtc!E+s|pFWv8fi$Yxk4zSN?$MXETwPEJhAEUKu-XMENH$KD$p+ZfsuaAE<1BHCe# zmhPK88~+Sk2edrZDiv;T74_D}2Q10578(#cqsL%`6F1c!v-P4*7q*Nv zs?2#4;hx!G$L7br*AD=Wgu~%>7kRWd0HZ5;rZt}WkuT5S=kFHFS~B%jbs@^rd8$v8 zh5y`kEt}@eJvG=v~$W!h! z;X}#F;8s{CWt!3(%5fmoSvww7^qe6kP?BsdgicJ?j2UpRQ{U60s_+fZ zLs*1SZI%h6f+NM2X;KN7A;D^%Nqeb@Duv=~JAXMqtKs?~)`maxyK<#&qLa~E%?M{3 zujOcn$cnLQaHRn&TCe?S+fMt)Xq|N>ocdNTtsaPrCsOxu=t}1zWQ{hu>C-tdKzponeh4QydSdY{Ep$^R0viy;x7_QG#g?F(okZ<+>=z_H zV!2ul2$rkgl)wC0uI_G025PYDnkOfrtuKwfhL|{N9~ThIqH_iR1opAj!DzctsZ%~l zWdjjmfSgVg5wk`NQ8?Gnzm0l-46KB$_^AU1Xs&lCv_s#TDfl6I?gi$FzMVdsd|CS?BMY@DAsyBsM<5TaO zpp3uJiL0rKBgwjHRLtS4|A3yD{Z%wv#AF?`!VA3{=ttzO&f40+I^!Gp@}lgo0X-=m zoOoGAxwI$@l#V#Fj6;Z8IACG*XF?=JG&z5c3Nt(7vJudlD4;C;Yl2T}+UDq~ zqBs9P$wx+dwo3n#COy(u?StMtU-SOn^ri%U5CYSvr54pvdze88w!&i9YI#pDZEgR$ zzjB}*rs+`IP!Z%#Xd~Ms0AF+sU9GaaqVL|JJzPt9c zKNH72Ec_5I8f>jr)rHGxpesV0CK_lZYPvXnX9qW|GEqa5F2d)v*#~#a7bWmaxZ3eG zf@ePx;9JBWMJd22Nt|T9`TR;3bW_+3-PqRlV8y_N&a(QaB}Vx}hc+@uok*tf_^wE1 zYk`ksO43N?R}Kpu^sFxV8ZRg?38aletG{fl;Qfd6cDdKXXZE{d)$X4pe1qaQZPQ3% z^L(<(m`Hn*A*$5RknnZS?x*?W zbj<9T!Dj-e^TqWs#qN?Hx8DJdu_OxLLs2!0M?{GvgadU5B85qayA)J_lK(Qa`s|7u zBG*)7`}wwB^?JfJ<|Ptrt3#jJOkIy|RfAZ_%b+)U7vv)XBwFRk|~)U}BgtCYe22^VTI>wXs8 zB=C`f3yHx%+Hi%RHNA|3!;fo2%63Ghz(%PO;jB`{^@9)qajS zk{X-Oo3iJM^z~3{Pi9+G&*Az_%#f{aHiRrdmnH%eu{u{pnlV7U#5;WL5io-CSQ{r5 zYDAW9hwfv3h9`1aCjZ{77ECAITJbdv%VTL@??zn`g7SLY>kkNl{M#ly+qTsB#hyR^ zK~H7|?&#nC>5*oByov+n#|v;mnmeYL0F++D4La5>a}qlZy0)s+Jf^QJf_Yp*mzhwq zLTvQsZvpdHVh{6If*$2!5g;ZDv#FXl1Bb|y!{v1#<-@}EL0XjT*+u(qLTeC_p8VO& zuyU8~r^IYrp+=*MD|T~u+B+=UVD5(Nf5$+86@!77QRpeEs_#I(oE~RFnUs5}*xyu6 zE^|k_(Mz!Z99u^-b{_}xES$Pf)S3Rwq@k#Dm(%`Q)I4l8{=_9{cxmhX$8<%CZ-qhr ze{@1i>p1Xb`%Dd#Evi{|FHD&Rq)HPvV(3P}#HD2Tv$Q&%;)t3gIQkt6`SR^aA1~u z1VqmLEwkK=>=&h>e(a!r;h+XPs7h43A_hQD_-(XYs? z@0DRqlWewP9Gj=Fo#PD{iy1!Oug02PXw|y4GCClNCH_qjL~(#qlJspfCbcY4WuG zvegexu=nAPZ|ow3q9rzZLwxk7?Bg5j1D1z$`R$l$rmm=r&Is3tR!xTj#cRHF}A zjY-^Tl{U@qe4Jr>o}Vjc#uAvqtMU$g?@OGFy0Y-dcPV(uSQ+^SqiV6gI+;9!rCsOD zG08KmzQpRUrv4dcI7(hk^iuZCf}3T%$Eyi#U4trSQo+iDV#g9~WDWR+?vffenwp#& zOO4HCONc6Ia)sc$)YVnd)S1jepRrAyMJS`QY|CGvnEQsLyrkVA)Wdexu~gJfh1V#P z;<-`c>r>Vdm;6qPTAkMLKFl!&7KQBS{iL~Vuz`G_XdFjqhvPD&Hk)Y|ZZSK5)5Jb^ z7ClUbUTQXXwq45uojfjnrdjP`&xg$dTQ_no8hSkzdPPc$MqPupljuTksfnQ$Wae6H zm*7Vofzr{?F`%=p4WGXX-RUE40u;H0-T4&Z91YhUCd!y_Pt*|2wk|dyBed!{HVmOw zb-F{3vdn^R6f_ze)fQmxvprB0)!h!fRhQ(5@4uUoNB2E-XR$$URKqO>={kl>+cd5wUdwKvymQNLElv;gD3KJ_AqdVoeo$2`A_Hga6g;aoaW4Qi*wF7O@j9j$@e zXzjSfN7@dK1!yfI+*(4f4EYvM6ul4Iz=eGKCx*& zIW{B&MBZXArpv~ zv-V69w5NT~dEfv0|9?I&A2NIIwV%EA+UxS%*YkMA4IH}`@#FL%o}U*1R~Dc1{LVD$uWwxN^%p+O79O779Q@! zA&d+H;kD&J2akM-5eoo&^XbMdA1G8^ehdCv~KPYL&l7Jr?K7GM2>6>P2^ zKG(8`xO_<4&@SUo+Y)H57oUXh%QzA2so~`5bBzJT$Q4fd=OUP=^M_Jr?m`;3*6Q0; zoqMa*Coi5Cs=CBvm%Uv7D()~lQmZb0I`wE(i#C3=P8(t>^a%^ujgL}`ZUvh^Tr{8V zl>5tHk2l`WEBGkrfy^j&cwMCMHGdyn%>Gig=>j}*08*c717<2Pv=vf$YxY3HD>eII zk-W@2MEG*nnFU92zd}hXzouzpv(rJdrrla*j;dBtQz;PZt}tXZn|^nOZ;ea-N+zEaOeZ+(m2ir%`zU3A(7X^?bhyIm+(GZ#DTLRp%*U{dAf-s;YK z?1p8=jNZE6E-Wb~qo;~<@vCMV^_uz{+m=@>+mU+V!qut8mK)$XX=T@rZGLNZOfv&n zYq>?x>6^dXt0z3P7+ZyYb>3pTCRXs!tUl)zA;?3E6_MyQ_QQT(5t4{tESXI zRgL0*85QxpDl<;mI;J2u*<}`rzZ)ufPBrXVN1T33b^aB7A0I|Y1~7WWx1rznDc(!& zytaF$Rps75c$d7~8?^bX_qqD5VG^vWN7A#hjStkAGb@)zeII%DM1_QjCdjWwKCPec zPz~-tVW%tnz!mC1;dWQp;|g`4kmvNcGW7DeCF+ePcz{OzTxIqP9N>aeXjkAY!fP}E zxC#~Fl|l>jnwxNCit|cYuEMrP%bs)jaZT$H9AEGybWG60Jnt;W+o0;D= zWKjl*7&+)S|6F^ppIaLy+1p3x?XH-KUxNvR zeJtNkSGDdXN1xiapoOez()P(<^bz_aSosJq3Ql;0J_r^)lKP-3ss&zE$@+S$Y69!) z^{OH*I~CJ7Z`}i(S7ggBQns_6lgx4pG@lOH0_I*=EAg5tc&Cn(OF@C3zqMNbg!tB@Ql^@JQ=HQ-%8BoBMn z7cE$0_SzBI42m>U&mV3wdj|((_V&*qjsLFLr17ywBaN2^r1Q$YCjN%UoRsUq9Gi3FO#I8@Jeruu()mdN@wu#yLQbp>)RtG5 za$%2$IpJ|*&d%gn5C!rj4=RHG6+H7}-d({fVPR;QVry}2FI=jY!gm$ei=90Bpvs7N zrHZQ3k$P&U*H!Z5sfRIr zC>#|z!~c4q`3*`5*2>0%{hXy$gnrRAbrCVfFm{u+dT`*nQ)KoA-CuteC(@^>zXI3) zfU88Haqpl&(?a3;Wd5CW(6SS=vl4Y{3Y(eLPI#=3Rg}FSXqsX$xqrp;pgkv&@PA<6 zR2ohmaiv;ToBQX!IK6=;ah@(#Z+wL!oZc{-0!xj|H>^RwZj(v;e|Ee2wejf!-VzRk zdcuhz8h4Wk_*~_`lvR!8H{^L$UZ43+ky zv>;RZ~+ z=I+A;iBDWIbfN=z)uxRBe1wj_no!^Xb1_;{fA4s^=c93Of@Y3FCSt6K4&A301e#1rQY`AOZapxkO{=+2qt*2p zu!FVDdVj$K0Fj9yu;6!gLN*3zlB0=h3ovph4k|mJB|orGaXH+y*{lQRtS1kTwMK6J0w<&}x`hAq$tLqUjwSS?Ps1M%L+$oo!R48jf3-#6C zBN~tcWpH6~0U&@Fo*1kP2?U3J(+aIRWG)bs`eHl=#^}(VaBF9wG+9fiQ7gWaW8D!l zKDtQVE#03YQ-nRLtQ#VWf~ z>_C(>rx8%sEasTW*VwomB<9LuWt#)Zukj&~Gqpn0WAvD&|c6C`@KIiHp=KPB8qdfYDH@bR?xzB!9Z{LWf9d?Q6&`x`9v}_bS zcR->CfzgSxKeWZ}lyUCGI%gMbd)SRl9SFZ&>!r8*^o6ea592TOf3W^(LeJ>6I}|%O z{ak&!lDIP7fd|r5=4MajV>#a_ce{idW`dgQjq98tu16CfR5qmkv(BdlHLiu=ye}q%|{h7`!H)VX%-J=5Zt?NmvHHu3i+NTG+z~gX1&v0j3ay;5R`#Gl|sZ z=$+OMh;{Uw^wd*u7S>8l=czdgf~1DBP^!`^DUI|+N5-oUG&cjuCt!ILJQicpVin9k zARcH5Kkyl4MQOg|YlDn033Grinf7V;hB@#VUA25DfWZvlkq+=g2l!n9&K2O-9pKxa zP1pE$pVhdV12v8kV50!NREkez03UaNs~zC+0&EhX#*kOvk^#KQ0p9BXXJgP`p%L1mwifjZtnjc`z9s&2lZ z&IW4FXVTyMVU~UGVGiokf=UVs^Fa364Ak=us)GZ)GFVUxfWjIxcBuHj>_hmorF25# zFlcA<%p4{l%S~B=&bSax;cW^OE!4#U9XeFEpCQmFw?=RgD?101$G182vhMy&5RKPp zo8n!ZDrs4)Yz$VLCj_0ijH{o5c00IA3~B6vBj|?OH@6TxKkz^|M5>j)lqK(8#|^M+ zB=J}~?Sm#-)O?V&n%j>6o$nVrJI*LQ_)Qtn#+3_s(ZOODLAnHMHLkLT6R?D%-<%s* z6( zEo+Yu`4m(cNGhDa8223ZI_}f5jxX0noQ`nq>ToT)_aw)HtBacn8Q>i0#1k#%L)D(r zz=H4lv$D!wl>lgBV@hKFu2>{_IAWrc{)=Y8lHEa%X1cOmpU6z_2SMn91HH$1E>gl*na zY&Yk`+$SKXQZd)THY+N$05Ct(RrDafmg0HDxt1gM+pVbvJQ;rHm>QTb*aqCa``Q~x zSmNXe?GxI*jW3G$dSZ!$=1SYIXBGH;=Ov?gHPOU%DnV0a4z?Nn2QiF;(Rqv8md{!e zC3Q*bOj4KVKw*n3M4_ib9VqN@g&nR?2dNi$IybJO+;}=f+wa6>_=B2(XUL|01kt?|DoPCHo!wS%Rx7>_zRLB8n_iHc_EfO z9b2%*Fd3awBb=j-5`%0bnG9V=A&95@i~ZN{e>&d-O)ue6-EYpb4joF8By@!AFW!7d zAUb5aACsBx+F_Mh@0t1TkkC|RLD$4b>j!4?&{Y`HCKn>Sz25JKuH^5+Wan3chT$!3 z&~e-C>|whh6&HrAZf}B9pe)Cw`N^M>o0N+Nbu>B}5 z&=es}7fzdDvy4@w?sJ_`4nao}+=Ubnt@&|ISASe{lwWf`h-*!LJwmcOCq?-u=Cue{=AsW#C6R_-{D) zYX$#32mfoqCmsC3g7k;)-)$d$goFRS;4g6S(*%E!gMTgq|Ez<5g9G2Y6DOZ#7drSe z1^)#HzbFHLmxKS4ga4!8FLLll3I4!7``*hk@KYT8EU*5ffS>5#Hy;FkwS)g=27a`I zk2(0&g0FS(4+;Km2Y+M+KG(sY;NV~U4DhQR{9M6b3cNJRnf2W-e*c{9;s(rvD4fGI zaMAaFEhSZ)%?Vcql@Idlr2j8Kfu~RdSlIhH0KoS`io_Nw{({|E6KMW-DYMX2!U(p6 z%f^Kv>w+~mJamVNylC=>(eDo?j=q0wpn0M}M|jH>Fq3sqI+)sMa@eCl#A@?@AqC4& zr}N7*ou71dId#LjN_bGh=`Je>G#{szn4a={8sxYltXHHA>y;iZaeg7hab2|Rd>Zr) zC9yJ+=^?NMnore}Wh(;BpWy_1m#Zt5INJ;Q|0#;)*^Kw}H9=O(i#*L~0jE*{a$3mg zlkT*b(}y|j=hLMA?PI{jz=#KSSpZg$1)x2X1)x^Cjx8bImQWd4fn%>!l)XOdiAbVR zWCfH`C|NTYuH6QU`X@IWFtru&_k{Yk6XByHR*T;)GAe=kCl!rP?axD|R@~@tV1XIA zYgdT4&_A8gl6DJZD0N@Sny9Z`H{k!^H4rJb$CC9mauH^{iE{Wt_vbZz`sNS#N)!%n zROKBJk}eN4#L(yXQkCPT(!B^su-(-y=u7CA13}3_C$06!rzmj z4I4zWeD4Lc-y!vCwf~pcTl~L--^IJP{@t!+#)WvjHCMe=QTB2m837eX;@vTFBi6nZ zu6<2|RQ7bBsTu??Jx*q<$k6$&h9?*)dC8O;rm{?4&amOd+g|+z6ToQTvOv@#r}xMos8cX2~T@S{}RcW{w1*T z`f5dQUSB<2@s>jt=2^w`ufRaA5A+ zW`Y8vgiAiet8TDUBpTAfbD$DwB3aaI3!LY3SUo25mPeDLZ?RIcgF9gZ*F@y+GyonN z7bC`Ic8rPpvISqjiBPodFAef(a`rvG-+17C93xLwV`_h(>1=DZ@ zs-YP_o|6IVPxu5wCG#CaCG#CaCG-6W&3Ch3s#sP`p|0U(za(7|_$2-`Hk!H{7Ycto zs`>q>c8Qc7V16rWGd$Sg!8Fg4^RDqc$8ms%xffrUWv{>kq%~}t0%^cE-!Z@)8~~h& zS#sH{0GfvFu?*BQ2eryUorVc++3SLmhV7ON)J+cRUI*2rm%b(_Y1l5$KwavfW;>{_ zO2d|Ws_~&)m^_`&?R4x(AAK3XpDba{8-Mb7=qf@7NjJjcOU}3gMcF&}+{6;+vHRg1 z6lHI66-k_jHtrk<`&zsA1IYOIQM;|sJ%U9g#*`)o!xb;x#`mA$Cl67B-Ps>bT%+9( zPjPQ_H}`7u=c3Sv4qan1s?!nIQN9jVjAS);FW(|FCKth&Q068@s@T>YYxY?3bbK5%<)y%M#k?*{T7ky`Td zR3J8CD|9yF8EMtt)U;}Eaz$cDTkGu0(MFOtJ(l>Mk}a*Bxa+{EZ)NX%b8uGb!z)*k zzP>eeVET^2i8~JzPwOa*CQrDeYj76Q&g|S3sRNe}yKT$htckn+UVNoXbi&icGuzv? zTsmlF%fwwD7MHhFBxV;^fZN)(%qna3rS?+a-l^^H>V%HOmNZ5Rua?Bo^urkK02w1Z zHb#u4|An^Jy|dZd+8s@P;|5+Es~!DIV-?K|N2aM@IG$frcaBDEPdB>_Oe-2mJR(BN z#}H@YftCC{!yl|qyEn?IgV)|w)Bp%6ts%JFup#gIDi>y|!YwWmIIAVx*g|HfjvW!$ z#_Rve&4^>0*K(Q+eUp?<2T9vR)xLJ7LhNbsRwd=*&^WpXmF6VQVoqlePK+*+4dEuW z%Vl~R4|j*pH4IZ~?@JSk!fjjj9s|AgDP!r0!tUHE##fwFFwlD0Ad{g}>!%>ex=UDHK6fe;j}NRO)#3T#C%;bd1rg76yLg-_Pj5Tp}n26*T$@I7;~TaMA1Z1 z(by6am>vl4c~Ksrjc*O98xu?B9#0+TB~M?bT{sHbhoW=sdC6R^o#z%={yhE9oU1PL z-`FqdF>m`l;;Ya!k!+|E)t z*_2vkj(>o84=sF4nt;$vZ`nnsyTEIYJM2i%9lZ9qkUt+6c*7_tN`(+ z=ghu5^vGfzqK~xbQ29uQ4#OYe_uA0Ix^->zVcoj6@?qV&cKE}X$nbmGQ{NiF8s3OG zG7DVFo4fwR)bXYWn2!aMN-r2@h*r#M6*Nq@s5{5(c5V zVlWb^u3z$wO^YjlWS1d}zcJ=J-^x{2#%mHZ;V$u4o&woCtf2ed=E_BjbqFnL(P7e}4jpnAg&SAkg!Wo}6S7m7E@~P@-;ycS<-t+m#mtV^E0F}VAaTD@J~vNH{aO)US%F=x9jX#j24N^kq@*oPRSKfXSv6_W!G>=( zRZbx}K|xwSiyXNJ)EYw-Zeaw@tv&tsj0boXbXZ!U%%cRwQ?l=rA;W0NhVVBU7^Fbs z!~7~}0a|Vf3AnxIJ#%MjUtZ{LTtZfaNi~XpbnlR9*VzV2@ZHtH z(4w~-fV+_YbLeh!W)3Zq|8wYWb7l@LlK*q)?py~Z|L4%%L3bes=+NEy?m`aGp}Pxo z;a_CuME4LZi|Md0Mm zp+PxzlJ?c8V$LeDqGfPOrRRq-_3=nS>Ab_AQ;@DH;F)mM zw&sm63cndG+Yo48tpW)HWfkz>Pugt+hojxM`|MTc*tf@O{qCbNxyu7=j`z_p zhpq0aYaNec|J9o@^n0&@3+;@Sbq1Of170=DZl8mU5;-FmK|rrYjL=s-k3zJuk0lE< zT?MA)+IUpe-86%Ot|U+B%d7#--?RG0=*ynR#vW)Ms{;G7F?`wAR6QwaMWSBq%%9}T zs@S7l>b<>zrXQJ5a&8dbu69kh>^lt7i-3#Q9TaHZ!KoxKzT>>0ek3HE=g7gSJSnn3 z06)Z<8-#k`$-zQKhfL6Nm=>OmP=_iB(d@n`IFoLdu zB1LOQ-_G|C4(w@+MvXl=)6P8v-~W2nm;ILvMAp}}&8e#*sp*`VdH(;1G`11Ed`mrP z)L$o(a^0eV&yNc{GU5_Tf_gu{_7q6^KV+6F>!{zj*y@D-87xhRYgDYfJ$X*8UmLb_4zomIf= z&I;;BQq@_|lE8w?c}!#X@AmjFO)!C)pw)9^1rK1eb2@_lgzh_10^^(Hf*A=Z;bMVK zm^?;cp3y~4q+~@nISygPUlfz7GC9sf6Pf;E%E`5_``?T{${yx}y@uYytGpa&dQ~$W zicxxY?eN9)&jduEZx5C9;1z~0Fs6{jkRAn)?c*2jfxi4L%f^1GwhwTj*rw2-w@~3B1Y9VBpv(Z) zUfGL7kFo!OeGHFv_Ir%7In$~_OKr14h+77@T@azAwpk(c7@HyV^bS{OyA^b0g&(-W zAXj;z$87gR=&>GGnC}X0_eAKi{jRXU7256z|6{IfxXS#G5&VTYPZsQRX?xf05)sn= z24QB61^n3U$v^kZ42B+*B{Ud)P?peO<%6<>28Tb$`l$+$8k@zW)Yx-Ke@$xaIg_fC z8v7pjAbTv&J(lMl%NzcnWQADS`}MyiK&0p&u073HivHz^{(Sm2k?d(9}~ zw>Ur%KgUv9b`}(|Yokqy>>(MzYzKIZ13ZBBZ`ou4{>1@q+@G%T^;fONcQ{bv`vMFL zaJ~clO9t?NIKVasxJrOO7NF;YH$MaTa|d{*1N^)IBLeh%@M<%FKXic89NlwgN4lv>XeNw>PCcsWDu|#%W1~A|N3mxF952$gR06ibP&X3Yxz4aCQ>OCCz z>U#wE?*jCE@Se;7{>=eC=K$YP8!G`cvFi2=)Gr;>5(l+GP~R1l#H#5Ts4E=QTnBZJ zpso^>#Hv^Z>Kq3(!9jglO}iQ>eDGQ~!Swv`4bu>2)@MKdXU=wQpfUa63-A+-V75T> zF3$Q{-~@3fAAK2Y$4@|tJH&*X@e^2%Kn`b(vt`%&f=<2N7H~*s!V`^gMa9Bnqg)t- z@3{x0(7VD;uh!{&J0)3F*6b$D^a3kiA2gGsw44!etV9@Ion)@9iZ?G*uP94a0!{aSLm9}le$RQDR5!aZ)B&gDuN zMy<4gqrx(A`k)o9k%vo%P8&BL4DZ2#-)+7gi65B+hvM!rBCwh9%Xluy58bSN&M8fP zfLLRyv_Wn$)ybZMUW$mWUlp1#UwwS2y$qFbH2O+x1iN&c5gv>YQFj}7RNu1Lj=0dP zr82{kt!iX?KXQ+5>iu#>fwW*OXgB+3)Q6;_*A@htzGFRUCR6PbFjXdImeU96HL8Ka z)fx9-%63pDl?=j{Hc`+I^Q>gHKd2UU>XGEkulf~1ivj9f{wg&4Nd|zfI{;U;oHL@x zbZ{VikA@tRY%x|DHN>jK=}T3Hlf!f5@unBbJGwzf3q9-kqx)8zn=JYprA!sKsa^(H z-=2}Xqw&q1XY*e4TbGH--GlPzF>`!|hg{I1p5{$cHAwDFA)nt`8!g!k+vP$lOOqtm z#9kPW1F!7If0^gf^rYhP9D8p%+L@RBgQZibHfv7FYp;c)e5BhscEiH;n;%VH;K-*G z!mu&bBZf`=Y!lal^LQ@bN~cM=?Qz1fNK#MJV>Bg_tSmyMKSkE=`9-!XDLJ}?#*U}4 z`o2=AB2t{wqn(Df@Nt)WR1V3S3+ppX%y zA20JSP3@f>I(0v^c2JbfEyaNarQXPu^XOnbx~+5d2KuYO%5l_$3o=t=u~K+8?j#Zx zJFs{8*Z~u`MX5qzPkMxP3MZ~Zh2CnvtK)mvSS(N2((s-)&rSN@p$oH6X|C20J*G79 zSFS=;f8g>H7H~wJQGN2?+L6)X?{|Mz-sf6p?BKNZ7xQhA37@NQAuOokh;KE~4P#~R+C$S% zBu*(22=TC=o=;7-%;Gz{eoI}(KdBLgG_{+*U3PaMKR$8Si^b)8Cr(>kT;3|K-i1Qh zKW@#$qh^)dlsm*SDo*_NtW$3Cf6HKd1Vur~Z~Wh!xQoa73Ua@hdU5^s)avygFsFLz z@6}u=#4fBsFNp69G`-2}vDf#_M}m_^&{7Nro2y?+v+CHhu$|A|8u7i(coB4pi+5wb zH*l>O`Hu9lmyIZrR`ybG3|+BaaTm+ptKS+ad6z(A;dqKYVz80Zd6(IrVbDou+<9Q#K;sPV9yh75x06jjdApWdDl+(ESpYK5T0XMHLIHPk`EwxT0cIb!EA_&d=0!o~x_I)m5OnM!C9DFH_fp)YXsQYurBB zn7vn_FrKs8MLJDf%w)o#nasVLfy4C(RY!;$itvsR6EC)M|&B)=?b2wpr5LE@*ylP8*iPHS*x7PBJq|!KeKk;+ZytPAMHcrIm zx5-!edaPuBD890B6HX`?6C)+hRG{To9bw$f!cb>UF-^Xxi8ZU0DF|43v}858?v7|( z4zWThA5NZdZZyEs%{~hoyIcaRx{oGKlcFav!dwQL0!BJb^+1=50`;~VS{^d}7*bsJ z6mRIhcp&{aH(3`T$AfM;dA)Vw4<;9E5cFudsslba4O2@`)Xe1Y-; zDI0Q&5&{*#701KAz2@(``3J+Di|?L#B2~dUMiW2gOL3{Aah>UKlSVr9NicLPnWRL< z3i!`X)}7{0y>h`@3b%P&TK)#1Ul(a?85EP|^j=_vwd>~WPp!G&9pbw_HEnpR{es;T z=G9$i!v5)VR6IS^{Y~X3S~cPQwkKN+vS2vz(Z7o@b~2_-r%naF>#Fa=Kc zDsq?E(>Dl_#Fay<9#Ec*cv}$j45(^ZR-Q@yt|)V!eTL}r5b+~`xwV+m%ZEY$i2y7bdubN31B`d6j>h-y~YF%>e+*k3elx^N7|F`qL zY=Y#DX=?=14_xC@Jv_cW0&b~yQN7MDFnf0UM?Il47CCj&4b5HJonNdKn0;s*BHkkk zb-cNOQYlgpvr44{QYu{nukuS-!|UqR_|Siux^UmrAz~<85le=yi43JvbB9JF&mpEX z7_=`tfgKE06wfr@G}XW4Q^vDq2ihp{^?1>=|2a!GSv0MBHmz{RuYl#Oa-zK#1ciuD z4*e^0(|g6Lv#R=1A&!NFYct9wK&c~v;;b+>^%N)i2PKomry9S-zxFQmwqN|jX*olq z*-r*av#1Ku%vuz0Rrg!A?vLpJ^xzZ!?MF#a@k$oy)5^}Z!rM!54K1hA%g6XnN^5#f zW&Q)iJLy&Tz*w599_KgyQ=9l=t&7m^c?^%!VV~USm9>}qyaVxetTfBC{HRj#Hu<%Q zvdbQt-Oc||%hDeZ+X6{?rdP@RKUP^<4!6{UOsh*M8_HOkpcI(*hEX=`-GZ{=$C-iR zVnj04Fq`Tsn`)m@{YRR2XP1mizGhO)qY*ck6s0)({I`hfkZ zDuDz3dDz{3T_rG4s@Z?95=fq3D89mKJkXcvbf>+@cH<)g^Rjx$1B|Rx@qCLfE1-BO z$5s#x^zJ`yeAsO9E6EaQ9Ez_gzIK@*;9LBg2-bS_^2dYorADk+)_i`Vg+4+YR;o@X zs!dfE6Idc(*83y^$skGE96|kDM^Gv7+5Z-)V@pVj(2B;KTTYicrMnXaGtiZEp*~q5 z5#8PJBxyKsJ-w^!G;?n0ID2lXG)?ZXMPh}XJpsHh!cn%K%4S?obEUgv6zxmkyZ4If zgp+fr9s<&d>OgzIc_c%14mPvr!ZvN$_(7Ei$_flnbrg9H6ItVRn$Dm&!sQ7uq2_O+qU*v z8=UXL5qicFej-b7hn_QPiM*qaSu$2W)ECP?`eK4Rjap1_r!kAi;-e~0KB`uji|6VE z9qc0xu9bs(2DD;F`?Awo>ZeM)jD;m`hUYZZI8Cd{_i~!AedzWmIjw!E*Lk7a+4Dv% z%y36}89cLiX7SACnawkYCwKiV?98(>&s?6lJiGGjYJG|{l)n(i3MccPn#Alr@hvmE z^LX^C78tu|1^__}@!9Femt44Jcbnpa$TRgiY-B+FD^Rcm&qniVg}2RwgZ%h??B7S8 zL1sN9JY0qi0Ya7;7Bh@LCaW22f6_cVzKpnZ5N2zouVNv$c_NqATqd^eD;X#jL9ymb zZRMOZKTnSuzwHgxZ@u~En1NR~`ulJnaelHKB){L;~RMOox1QQg6Jw*A%? ztu5MHsEQ&5a#q^*h83!W0|}8DwlMB$qNZYL?^aZ7Yg^HnF*R!t%9jTe%3TM8dcDV| z9k1bDM(!{-V?$7C?Yv*)l;q~sKKP4H8NVLxmE?9G=9X;8Y7y%ww&d3tiXNe82EjDR zW}afBvlKl;(YYznmATUC+7#JgJfwx^EmN~f#>lGwLzHr5u%Guf`=Wi)Wv0`^Wz zF}K7#D~a#AF9lWxo2=dT<=J;gEspE#%n$HnQ}<=3_WoDT%m316vYqZUFe@CrLmKmN z^sYtb@mmQ6;pp>{Jn&I>^K$*Hl+Sm)tkBQuy4a)@wf`*dTg{~qgB#=^f872f1;5IM zgG;-4wEK>lar=(+y)yF#)p?}Tz?@?5jz=hvgQA9KhDsaUp*?GBelhzF$u@O|rc`em zw|`b{|IBU+$#H1Iz}`BQQc$@z&+HnL+uw^$kaU7EW& zhHo2MTl3l3roXyQ=M9q>XJpEdQ(F^1`wqdt`WYuE*8~Y3$sin|kxH;h265r$tLhtF zR9}iKn%WqXeSC<1S?oRW6k!!d+Jqu;osuox6!FeD{blV%_s+N8p-Q z9IpR1>f!FEi8Kp~cJEJe3U=e!Ta?U%=3@5#hP*uXy^H?FuUTCezw#Xlc-rPHaSuJ! z)*nSSljFU!KQOuS61m#6%oWt<#>mvIIn#S4Q};xsx1ueoI%=zOH{IrT+Z3S3^4LzAVqNcR!h+Jx?SAV+og=}p~^yYoe5 z_OH5E)lQHd$OM^rq}Cf!k%XCwd&ZATRGdL$Y<;K*Th&*l47yw(+JD5!p)~)?-;1dl zLk$_tz4-@yUX*XmHeNMf>=8#0!q8;P$ndX(`I#3QP!2{n2C&Y1=$N3Kdzk^{4a)^e zPS++udBp>gg8#QrO!Vd7!)^+@Z@{jxE(Lbo=-W3Bwr_t6u+{#q1=xObUnU zV*3IF3?R0hPk&Y)1F_l98i-wY|1lx9beMtI2Q`9NDoa=>+WZaEu}z$*2FH3*SrTGb z{N_Iau^)8;Vqe|e0%C6&luJPDwEzJFh&{!pe^qY-vB)z9V!i)(Oo-i4Y9MxhwIG&C z`L8w<`J1Kuf7h;lw9nDkQXqB)-TLM!f43YUwsLU`h}~;YE&;Ji?jOLrhu{)7JfY_S|fmo)QqzQ-}3=lAYn9ryG z&iMvnl}{LmP5Rw2A@-_cAa;Z`T=CNElv(*3Ly^B(I(7;xW+3)MDoaA_>V^Lah+T0! z5NkuxDG+DB@;3{y z&siY>u?#9pLhPKo{u2;8<2WF;I@$ta4;qw9Kx}b_;q+V+5JGDW9=s|ZwXW|ZGIUr9!Jy7|n!lYFSg#wOM6QnfEpVFRFYGOypU z8!c}%bE*=$nPc?eLJ2+UGe`2!-}21M^yYZ|Cp;?1cNqj(^rr~lqy57~`J))g*uAEx z{AX(f^LWBn*)9x zzbQT-HQ>tln0T+$04F{)o|hVMLHxov3ZdpQ&WslXpLraJEJr6~W?rajs33#NCzyDf z1v$1?YBT}g)JLHIc`x?emM|=TC%z%2$WUch;{D$V+9%qS=0LhhVrTh1u@C8NO2GWs zTP*@YmA9}95tDURbE;6~)e8J`t3XGAY`M2c`2zw2AlvR$epewsPc1E^kkP3jNAPbE zADkKzR|tw`;1`s$Ss~v`4XIZM3b^Jp&k^F^^Gm_Z!ZTF$I6*P}G^ayrR+dPkeXSAf zYlW&fHr*7{PpUq9X|?*-)@(wd@l0#6AN&Y$c89f7d#8Q~+CtKL1GrSAjpJfGAF5m< zj)-CDT*Y8(IV0jtX{>83;5@E2WJeIp&P<`>-N3yhdqR~cpV2O}rDqO$#?M=16_Vb> ztVq9!Iibq;Nnh$-jzQYh9M~z!%Ha%Fj>&mDIg7n^QRhg-q4d!FX^4BJ-pOI_+AQz* zLEPiy9*B3N{eMp%RJo$8(3_p3yKutp-iW)r*j-c6)ULe5o1InBlv(zEBsw_Dq$}TE zI4mP{_v`U@ljq9`n3mCNZT!{3VcDU@ug9M^+d<9y8x@UhG1BkJjgSKv+}(BGUeCqXKAAc*P4teT^`TDf^k zcvzHO@Pu=$_Tj=eIkdeklD-$uR@=;7H@~5hguQnJm9VV1YIbJ$OBQH`)kQpMiLK#W z{ANMA$Z<3B!v%G%8X)V3h`Yh6zE#&uN9j*Ls?iAyyOb4T?_yVM=ihf`X+W^*D0bDDQJL99*xp@tf~8}WMJL9{N1 z;}7PHsoD4Cq@TOde{{Zjb56v)F)Lj2QARlZ(}+8s{dJBp#Xi7!Hf9esA7@0Oy|LEn zi~@R0cU9wfUGr5{z45E|3APIUR^~*6Lq{M6PH{{DQl)QU;`k+|l1;KaC=!A?wkpmz zO+XQ?P3YKvQ5P6kQGKMM5e$?ti0O;>a{gSP5I@uIM~0)**m}22 z@t(yMvq6-*tK**(d&9CS4rP{aRHE{%LU`LT%s;|4JKLI^^5LFH9=iRHq%JMk z87|xssw|;$-3(WAgqyokkjRO4Wl%P=i+xcNmqJcx&}JP}3k}#T|3sB8b#Q;1_+Fa7 zi8e7_oEhUX)m`+6qXwMen0aqR&?fxl35bY@)Z;1=4aVY>b$f6kBj9*>se#fsR7p>cewCq zsIn9;6p?E!K#~(@6O?(}KI~2=V3Xrr#Q-jYe2+%D6D`_89#debGMkL0as_ZRY3ja; z^t&ymycWM#c7E2U&d>T#rM4|)zh3iIhPj9St2R?J%_L;MVd%NS*3w-*xe+Sv!+FsTi%kDNRX?1r5wRA?}kRl zoRK!%Ejc4?s5|KlAP-3L^HT?9uI3OuP7cbR3Nb3}IJuwzcdld7lgmbbMkJl%;|_Be zu*J5(k`mC>X<|}iUu3e5;jBAzZ(^kIv+`jeiU`6M{~i;^7Lql>af8ny`}qrf&Z`yg zN(!l0H}7v(EaPnTXL3%^PwDsgmy$m2Yh1=sL-`-$4}o~}xdrZp7S3h%$6S6eT-owR zgjRFP`-7QUF?9LlA!C;I`s0|A?Lc+IzTRaZq<`@|+7j*m<0%M<`{$bXOZC3PzF(~O zckTPRqC_cIm#I`6XL68uz|yutpSPQ_jo_F;}VSwCa-bCpSlK%gE?GLE?wJS;W% z^7!=B&%yDDsh=0buhhLA)D}zvVh?i(TLOyO^SM?T~(xNxy7?}-0EpmDV&LCIPr~cyYp^7&su$X%NN8rU)6i`5pWgr|SbBWG1KTp;{?pIE9AeBLSCukCFV}IY=f>@@F-7VPJ z{f%j?YlN9tgbd9Y1i*WfD!n`x3p+e<+W~S1@^ZY6+{aHdw4eI7`_-!7#&k@vpK2kG=F%L;Yv&XWM)b7UAxjf4iz(Q{ zoJNJ7ys8K)_&XJZz4JH&pXC(za%_D|q!?8s6dRPziA2vh@OPODH2MBas*cXguvu|y zGQf-cjTpH%o1{{j>QV~g^B`0)e)2)PGVS&=GdNEn$ONeqW5<990?7fnT5IpVbVBw5 zw6zHQN`Y6!r`-de&+O0ZBrEmMdtf7^#cHd6x8Ju(i+9lkOxo{MaTm@Mm~q5AT_@^K zLbQ`TUL7dG;=XEz__Q|^gRuf<{JmdZi=}w=_6~V*WnYbLL05%Q~yPx8s5845cgyXlkCC>J+TR-ABz~(32yUm>yxI zspbp@iImm5Ezj|$c6YqXdX^OI;nr}ubksWObh6hjEI>n5hbSwib=o4wld%Fr zV@xF+uADV%4&%#acU!Do@2XL~-cZ`o!QGGfW6muYVslAo;h|8)JBo{5v_>YJ70dbP z)r5H`eliZO4s+o081|I3vN4jLeaLC5H};g_ipKW0{InU~j@P@uA{_T2R)=g`;JMa`l#{H@aKa*j;aQx~#FM-g@e-uioSh97*tS`J(`nOyaib zl957{`7l{VBvknwaa%j6Pu<2s`gp#J-Mz&XTUpF+j1+K7&x#a$sC{tuTcnmxzlnqza#vAO1uD~t_G zr10(Xzmy7}bfI@_I0K_qu(*Y>?ED7}L&?;=8PjEt!+DF@zkpX~LNVgq_>pn%$r3hB zbSnCfTmov^>w7klDp6jh|8o@vVHpX5xo(mChxzSqH9=R!h7s42|2W*u*vp&vPN^OAoX>GQ;Gd<2-?D^SteMUf0qZ2LqeEJtS@(ESkC-lcYDGLcBE35@G z>u5&4Sl?Q$>r>HCB@>$yyVZ=vF(BU7i8>q>U+Jv*#EH&ma@Oo|Dpt^P+7pRpgoi~l zDk9PBkyyTZVRvo(4A~Qxx?gY%3)Lo|ni0(0Mcr<^)0B_P`Sp(cjf6f}_^>}QU+vCsjx?Om5)_KD* z=5I3`){>5&l$*FPDJSw6!hDZufP%N_ub_H}FHP8gK7!;BF5FOlQZYK2V)uruQm<#J z+e=b-PO1BTxaQNm(!vd)yGj5;ZOyGWw}yXd^k$CC?7D=Ig+Sz)MG-tHK99> zC!-VX@fiJ@hF6vWZsZvtTZ(!h`x#nNTDX4BOR*TEg6H{;v(ADH;PYWj@zutPZ`mS{ zdZe4N*qsR?1F6Mu+e6v2H#?p|@4+|;1LcN&E(9F*6)K?5wJ0js>~Pw&rm2ndVq%tX zjPEd6zdiG2CwhghT)ZR_EjgJMH$wm7=QPhh>cd1iJlGqvIBT#w#sj{c7Z1Y*KJ2cE z6uv&I=kVyL4sC|=lRiA!b)<$VaxdBlz0Np~*Xb`u80>!p_TjzO79FThFYg_R4&hvY zf61lMneC4{Ynnv0yTMvUvM@a{9dsEv*o#zTmAH|Ys1d|*GhUK1qh?Q@JVe~}IS{Hl zbo1Mh=m1nMHQ_a};nB`v)o!OakX}QQVbRWu1+qcjPbiBnlzJzkC0QJCSHg+Xr&nV( z{wV2^{cRyXw5C@+w8oqQIU0dpIhI~It?8A{w6~^LPbg`#1C|9L@f%rtmOB zu=F#US!iwF&Oz=UhDB3~8 za*7QRBQ7i|R+hkjRiOOCM8L?8eh7*M1uJ%kfv2QC0TPyDNSG)bG^{U8NXk#GYxasw z#k$49*6WI-iMAS=hCd$Dfnlay30#gLaH4r` zPHej7Xv>L*i0()nN}Y}IHZA;(Q83crUI$N6z?_pKjk%#H3uB}y#XvPGUW4IjrM)-o zz89|fvQ0DB;>h2oaCB%>I6Cks%|p%7BtCN+fQW1Ue0S1(Lowz9ewt$#MB&CzMGu-J zpbrt64au0c!+bDL&f*@yu-hIrT(LVO3!-4?wJ8s-8umubhi5FLw2b+Vo3T(n`U=08 z#Z?(&I^Yv=QU|V;&bX=$YC9ilOCuYfU>OuwO5?`zPQ?aW27snu zOPf-uz&_>z?L5?(p}XX1of-dDOMD4OuSUAr?5v3~&BIhTYDqw7+ZaNdu>{ft1fb?6 z?xZC&ml>R~g!`#<%h>d46dkcm;D~^-w|O{f$Gvo*8v4{W3`+~UE1)l>(b0f6Zde*s zq1L15&SCVfOG@393H@iO3(5Z)$Sphj9PJ5NMxr-bLQ6}Qz|A(67QQq4C#CNC_*ur@ zn)o^Jj+@dv8Vf&rk`{Mfa;e+-NlGp)+%oGt`g1P)Z05NDxW`BV_(`#|IfCz#2EH3x zz_&WRyh|wpXO4FnBl%;>J1OjP&*a+);DQBr4GMs}C>(Hu(&!{{13ClrQo;Hi1J=$< zhDR@&!&b-ux8ya7Y;&ob@fyBJ#J*mR9oD2{VCBxI;!xY-(65&j=Ph&G&*G<}KLvd! zl(@a;m$);NC|R*A+bMh}R3U{bwG9lAai`u{Y~jBj?Ot1O5kU-(USZh4pvw2ko^FLP zRn@bv5LbS?)U5@@I3FX?2LzE!5&>L>K|E;P^6>4$|IbmQT$yN*_; zPp_q|X0F68rO%WY)0T&h*X`{Xel@{JXPuG4p08WPxzr+N1AJ?tnQlywio@Bn(Y7ZU zH0ul)^zdc|y`KYbaMLgj<8a`xiVWYLd3`tOn$p76v%Vj(^RJ`>G^4ATfA@nPU5kVs_h(FJkT%6%Y^K*3>lYuW zfgw{LgeP_h0P6rlJ`^r&DDS4;43Czi!P>Kn26KvyNyN`EFPr+Z7lvWH9D&tJ=B~ED zMxbZ$#9xwLGk;k7uMDsGBq1G)x<8npmOppBe>k+Q7-jJQcg%d~&Uc2nUFVm&tR@)) z=4W;65zr~YpPKpeh_L$wYlZN4sqXrEgeY(L1 zvz|CCS44j&+$<@M%vJuoR+uHl9$55IzLDWF5dU!-n>Ezyyc;~sSinzbEf7{a-t0y` zvX@TdjO2MMr=0?yjQ`A(&o}%x?0rk*68zIQU(-$wyB9m|j~mTxfT{3tPY5$9r8`$M zlzt_p1?h)1TJ=N|l^FkU z;fnI(OTFw3rEc~rO&Zh^cZpw9m&tg0OZ+R1UWq&&M;;&ZZKlzeRQ;hT`o+*4MP#EH zt4iI?;leNHtTUH+nC#8)Lbe?0c4kh-=-&hM92tUob)a$z?-xf5M!FWM*Z}{s4;9Xym(LAjEQH zmVJ?eeRiF|2G*fYGbglO8APUL&>)r$F>5%VPv;9t+2H}|?lyg6Bs!Y|9thFHW;YK9N{H|&pm znX*4}_RqCH6828h{!2$RDV&NczBYCe!R9UH@E`Q~|6cye8;)82GX($3@^NSzosNEN z4jZ{mW#37eUlH#Te+F4mqoVHO{4(1-F#h1KlRsW>y=^cUo!Tj?Vp&gz-X3TBbV+la ziTU<_@6X|h$L!BP3H}%TvFFQ>oNN64SX>_Q*Z?o6yEJ;y zbIiju|7l8;9~Wc;+N0 ziFC<-yqtZfORqyGmgHa3ml=f^aL)8kqY#0Ok9JqZa@3_3!(U{^pVM+9Y_=&%ndx8Q zQn&X^9^^6Nz0_S)lHJp)0=*}bO^w~a(y6ady#Ba_X@-SJ7Xh+< z;EW)2gyEbiI<>`*ITHb#GDLU{hW&L7H3=xPx20n0_#yk!@7Op61aYgAaZM9U-xUzVRVU;AY2y?S z#66XatG96q2;v?{#yw`^6cEHMO2#d;aS90H<|gB&+BgLSaZ{3USJ*fO1aV`Nas6zZ z0)jXv8Mg%_hs5+u2CT9HImv)mY=AtxT2OYaG+Fj01K!gNsjpCg{_GVl7{dkrIY#)$ zjb&r?XV(V)UJSl_-5mDNKQh{g5Ja(LL{u z0W0#s>~b zcltMwH{Mnou+iJ5`29FjsSYpxUd#S{_5;4_p)QKXKR;+{{C(83EK-a>3_u_?V#`pJ z-gD=S#+_lwh8Fa|nb)q!FpU{b1g!wt-5kedNm4zQJmuS#asN(YFu3Ic*<@;#-<`pd zTorqb!Dz`)?f#Lx4Rm+kVriEkUF>P2lyba_{Wy7gt*F`+2@N{bYlXJ>ep6GkEu@VH z%I;-vD&wd*YHj~oa1#oLW)4pC82!;6ZC9lW7ak0U1~6dt_7r#~FsrIAKclE}S6LTW zh0<{90oJGbh`ETtXMavd>%|4$x=mc5uRm*y;R1uNrbiqq#GW2tS}rgEjG?&lr{lF` zxQ`56S?fn^hAuY4`D751iET7j5Nl6XdxP24N{6{#7n|)v9@Mk+ZrLJWkZNmM`ax4u z(Q~MW$`rE2-$eFa7zFm2Y%1-NqX*$ROeJIUqMTTHh+CF-R2xqghJ2T3zXxL@AnXX9z6|zI}G}VQJgreQwmp>_Yjlm;IE`T49IE5 zE^-emdiPyGH!&<^-B%J~JstgLj(Xik9{09>HLTQ~ERI zKaemvnRkHg!Nmb=nRPe=g`HpdeoOG)3a%LN=KMlq+U4n6EqKLG<3nx5|9zFI7)=00 zPEQm`VcD$;>yapInM(1xOb7~_KsZeF(qG9#kpE+lu=U85b@b zX;&(HxAL%fe8jszba!y6HyZ-nBvJ7ad`D;b=KvWIoMjXdt{-g3UN|~C%in*h7TZ5G zP}-)jslq54nd9rW9uv=E-{I!GcYYoXo=@v|2d zEmDo$d9m_k+0s_BEBnrX z(DRk2Z{>BE_wwm`d1|#bKK2~gpAr(P)c!&7+&O*I%GzVlG~~)S1JPvQ9roJu`f46| zn@03x-*3PITjGG_m8Ywgbk*CUuqzUUEhx!vSJrJ%==nKU4yZ3VJ@-tKN4y(+8hDPf zrj<{P_f?g(heEfCZ2DQwx*+agrJq|8&}dJbSryXO|ZYzGQ$6{t`+=eJ#&j6KQ&{ z19(J%8Wj-?<-*jtpP&C@k{G`9J5#<4*7_MgD5uy|9Uy9XAiQBW*OZGmV0YJN%w-}^ zU52@EnrP;etQ%e!O9Am;Sh{oW6{>le|MBI9FFm1JqJ4fvGGkGMJ3rmPJb$C5Xu1@8 zi~lGXZRlB%sin)dGE2|45f10tu1{0<){&;}wF^w$BUSg0say03VEDz@@h|(4WtIAU}ISc_6(u<=h1q9iy1A9?aEQ3-URQa01QV^ zS5WI#sVl+-UzBw>9s_Zw|Av18a7LG{OY4T}a6nt6M~o%M=P-pdgkvz;gyw_(6WGj6 z-xYBSbVviuS5sM$+GW19rc3C1aA}hz?NBLW?Bs;qQZ(G$hg*;D9R5%%=OW9YA!{sO zuMaKF3sq1eKY4RPwg4Q?Q@|j6JIOUqaC}CY*n1@7!SZ*kh`&NeDZ3cQMSF9jR5Pd^ zGiuadD+DEH865Dq5|)vF83pp}yG)(0_rJ}DdYR?Vp(Ot>10s}fVv`p(u-A`q1?s$`#h(pjrp%8D(;C6P$?VUe$th3bO zAx7ukNe(AkbaVs1wDlXvPVvDdKT(}%G+v}vqK(oElCRA<% zprE=jN7wv}cxd0flVY-()fHm}uj}dy4#kwZJ4*`=PAxGLF6a=JC_3 zy40-?7i>f??5KR`K-o!*nj?GP!+xo0BY~z0noB!n8r+9)b(d@JRJ6JgE;v{Qe(Y4X zZxKxtmqTdjv&X-~pnzzMY=waYy$q)>kGNBXL8%5bG`hK+Ww>{zXPl_`q-C<$bDD|K z{+nlkY=t8cY7+Qu^(P5&CaLiH8EHhejtD2Ma6*Zw;IH`?`a#Pd&_W}gG?j-T_Sn<* z3^Ok?{iPJ5c{DhWxi>IRS0+Y>p{K1W>&q%GUYM$X5tCj(df^C`fd63MK8$KR935hG zUt9efiHu_(1XXH}{!@xEhp`Mtr(q5;7=Y>C@E>ExeOa#f)9DdeTzf@E;_h%5!=S*! z?ZG(5!VYqsQpQHjcTMWS-4WQbKq|^Ufu^=&sQN~{@kMIWx7TIl1R2b}SbBqxFmP5#4trm$QVUCHwd#$MidCxb9u!eO+x zDW0i3cTP@P`Gr9FwbKajyZlI0x}I|}?V7>|w}iio5Cg{I`MKcew3D2++F8|?S%3aI)?xx|UoZ^a$xDYFP#+jF-rd_PLSMq%i!el_8q>2#*(bQ!{zCG1C5fRCL0@XofyjU?;|2uTY@`V{FyY=2n#2f7It}dk`)%7 zB|KP3(0?5sXuYM zv%8(s#DW)_pY40mqK@R;PNtK7sP53@jt6g)pTx;sn(VVr`$^`ze@M?;Cug2)pB=lI z@IBRpbvfOJoq7(>u)Df$+n)F8Ht+eUZq1%uwZ-4w^HFVaC%tvpbD*|3eNS?~d)$Mw z(kK8=kK-OJPusJCx7FhwyeVzZTHZE|d+=vzl=Ic#t>YfNI*ro48tjjI@ba`hyLj6> z?oUU0Eb7{R+=HcQdk*k%?xOsVN&BZG=Pl}SvWf29ZQO%{()Ki&@a;zm7L|lL@ZK

joVk3w{F+CUVkW9w{={v-<-WJHm+CnoOS!h^{VQzjuTJyX{e~e z71et~RXR6ZThnFTo3%BW>o(NwT8G2TS%oXp>-Mc%S$Am4n%#P-tvNLNt-6obRo8vM z|Mlytm8K@W?$Ej#(lqSWi_%nMxm9*n!k?_;{g>l-w~h#>@2!}Vf&OdI{&4yi?5LyP zswv+#Zp{;Ex9R>M(8}Ue*;F&5q{K@zld7&1X6E-s5zhVd&5Zn{UD~+d`n52Ar&%H! zz0)jKz~xzf`-rB+9{>JQW+iIOJg1@`$hrm6>r~v9#p^n;cBaA`mj4}G%Hi|43*X81ycU!3oxO|4}}u+`V{RkA*S7Ki0I&CVyOUZgTrkH z7lqxyqvPFV&%gm)Z(0#6j9zR1Kla`QKC0?!_?|!lg9IihXi(5d2T2e#QB+byGmyX; zoggaJU=c7NV#R_m0WSd(l31q0D7Cir)>f@=y*zEb)C-_u5@r&>3l|l+hy+kL!+<1Q z6K?tbYo9Y`CJAcW=lR~geZP+%nVGZpK5O6BUVE*z*Irv*KaV!cR#df=$*eP*eSC}6pp8DoijN$6k~x|GRdH5iR*|-( z_P$>7ANvww94i8v4zh*=_gty_y`TNKx+Ek@pM}sA%y1zX_1X2o0k~%GFN;p<~_T4TJw~g{#s9;SrSGbMSFY~ zgNS=X(DO|FgQc0Xw@3{kWD2E?#LJQhh;KiLfS5`NQTgyEn(rH9&%AW+Myldw0m1s^ ztH^}vJifqSzBsuzh%kF)xEF_`jG|}xnERR={X8(t zp_SFuyE71O4{3pl6!L1mytP^&kM%fjrM4uEQZxDEsS;?Vs2E&UjSC#N`ZcfjOv3%2 zrih?Cw@w|X>!}5b>L3@+)7>r{2DQ_l58!%{O})KvL{H$PwmMEH;p&l?8HHndDb40? zQ8J3LLY=M?AuVvHuIzGh=CqamJ{T3Ch2pMR`n)D8yoU*+woT;V4vKds(h8cq#ITls zZ8UkP22K8Q<6nC%`zYjfN@BY^hB3tSI??w%87s7^<3holw5DyS^InXwi6`kIA-fX& zZrXu#7ilm4et}+3=LZIS$&i1El#%$QxaO-944P8N(VaQmoiekRlKJ4t^jQYnS)8bq z4l5e7P<9`(B@`tqVsgCQi7SEWa)eHf5y-Lgyphs(Lg2VzyD39m2MmCdH*`VnmudO9 zzCyC;qN*3+jVUxaG9{8PyDjnj+m~Oj+_IIz-bnF{nlGmwg&0E5{BI!1s*L1E>DwI`lSGHiOxL(| z(l-!)+7zkBo&sgvap_ zwNS^Jru-9Q^7Uv>;4XXZNHu)XU?(2mUD&61cvj{4Xl|K5rM?cG5V1kmefFu{ak`9gfj_10FBHj;!~-&;8e>Wv$O%>G*v$ z#Pm6{XMIsPd*$5jo+0<7V$WbXJ4z?|HF*JV!Qba+@Aang+chLlJP$&Vu5t4L-Q)w| ze)_L@NA%E+5L6EN-ag5V4`SEwZP$?r700xrOzxmI7aJ)T;WzXa)LTj|qUr5kGl#KQ)RY!4vo!0NW?UsDD4 zM$kQv>PsL~R8yF5QJrF?aW(`9kpp_yqM@@{QR}?`ZKef3(8{tgVa5h zcnTN`R_Hg-?(ZIdHMkge8C+|twMmHtae@GBx~!bm#X?z#vwlWUSv!5J@xtRD!Yv^Df)Y)VmxVneQ0n;{bA(ujrc-3 zX1e4)5SCD-3nqSAxc+2%aZT0{&f}e$pLmdu+=FTAGe><+l;AsTXR@qv@k99qs%go` z*&p!uGGT)Z&dQ=g;f|^?825+_G@ak^O>Cr2;ZXotm0EPc0*R%>&LyX9>vMg_%62$l zhV|S5kX~Gc5VUt9YHn&7M6f>p)cM)V3brYBwfQ+dKxceSf_ga>ls#`<}q-GPTo z_=(ar8k1asfTN4lh>2vJA?_&ofxkU#C4ldK!TRt*Kqx+6-WFASUYv#cBC!Ib^KN%B zx@_$UMzP}tR>_5LBZRHd!@79ZTcZsBWRRQnC)v{x7Wg`IUis&m{*#`-d^Y9hq|2&0 z6>}Y;(amY5_v7bRw^&w53k=u9A0SD8@EReo%aui%mHJj*_dX=$n+#q&3vVHhVP6t{ ztMO#6r%$w5PcJ4hvdOalimoi19v7kun*vHFa>!~Oz?w=8w(5-xRfHR7E|;2D#FdLb zU2MJ8q6z}`dUlEDr|6ZNjyEH+dgM68IxJT1KptyDxw_3(6d!dd6^ec2{Mh`~{Ki$A z%{5%Gh53DrQV zkEceO|J`Q=4!%s5P<;>Q$cez@=S)YivR5PK zuOqY%V*WZ7EsgkVD|y*bKA%rS`rq(H{Ch}5Ss-)eTF7UdB><&2Uz6{Q6_JUG-#H!* z@eCLG8^@u|;6zKzoF#mRWjTqHFQ~!$Kxikrs6Oh)$2ifI_@3Y`0UCkO-c%(J;^F$h z)qjp=*SK@Mk_^0aF7IH(kq(oph)Rl*%$Ck({n~3qB{d^Dy)i_l5?w2uCGWz0mP88@ zn6#Od;bDOpvl7}MnoisB z_}a0!S~Ss}gcaKsmUB8W`X?{AZvi*EZJ4u^z5u%gL0*Vq^{?8jC0_(^q8Azqp7AD2gn4ZKkp#;u6flGe@6tB{hQL zU?(}UDOq-$-Zpm~yv`i@J4hLl%*!e=nTOhA31zW7o=|kI05c6WyVZSe$mmjUIM-%< z?NvsPQiM{p89GDleowJjHMIJFi50o(RNN*BM{^t8&A0|+DpRPIye((_;BFH-c7cJ?UE=qImVbtco^_K-3s5ka0i9&CIjEW zn+$vhfk&nxeeAo7O%2BAGR8=sJBStshJFA{zmRf_5^I6NYAIv}J4Soi6;W-Vw1KTk z%a4}!HO0vNL1t+cQrgJKB%#Q=Baq1o{KF)QYHH**f&C=X-wYp$F`5zZ=IW18pU6;2 zWFoJKTrMA%$Ol3zyi4-24yK8gl%(HDW^rsxia@kT9Y^(9)Lv>iTIV_g3dDbwI;i+2 zEXL;2eu>bfQranWo1FRR1ti`@HEumu@lDv2y4DT*zrlNBM7=exf(G z-h@*QDy}6;YH|#(!~$irJX8|g*@w}bzI7*5qB(tQ>{$wh3SCN|M@(9FFKRj-3Ux^= ze~|a*`s30(-yU&G@%WrhiFY9$m0AKC?|$ro43|EU+L#RPuqPWVj`B~WFJ#L=*t$J( z=3<$=tSKV%ILg)1X&4$2`n%=_rm(f}5fO@fsAqmuD!e^f#ZwZa5*j$k;5& zKLoJs6POU`Zh?7$g|Fp7D$ju~NBO4|DG~c3z2oI`Bz)CA6~ggNX}#4xM?Pn)P+Ozz z>!6xZ|MzkYuh)qCjJ^G!E?xbtJ6BxX8?CyZsRu`WokxbWfJh zO|2fx6j9_zQCH26`xa8fB#1MjEM7gqZ_?Ka{TZ1dOrAsPyH+n=+tMDQ>}RHXFe{mu zJUoHvEUA}z{MUH=Q@}V`wr6yA*@>2+-PS9Nn4*#_%J9kxnzHZkbe#RIRkCM`RdUui{g%swq1B2>7(MR<0_j8P z{AD8Ry@H?{6rCG6}Hw2l&g?rRNFltk$S{H9IIi)T-Q>T^D@vW0VK?46m#+Qeq4K9M4`6e+Fx-z;= z;_Ab}F@~?bdi146TyD!H2xrXk2wKWvRUM`K9)}>!*K3_1^$lurH*?@l3}%KURa|GJ z#SaWn1TOkN`>8g4tPnMZ_dA$m!aA0!?^@mU5%K9Peqrzmt?X2SqkOsQROnMA`>P!l z4Lpwu3{a**u@Ap!&^Uz|fhFIeL{4mQC-U$VvSPMon^On|H%VM0E92sesXibMZ6dvc zhZ@w#>Gie3OYh}$31_5%RC_udP>xV%X03N0JtCRuzoK9yWy<2L^{yD?4Z_@LoGHu7h2?xHrCsy3}^Rxw4EDWr_R@Jz#m@yrD zKJd^)b|WUrVp52DIT|ZYfoa8Y(30pfk19o4psa*qjb3?UnyoG zP}6?%hc@<`dq|A4-za(Q=F#FOENc<_5Ug+yO;rXf{ev;mvn&;U-0d>Bc$kyNM4|AW z&u2SFg-@zRW|x0<8h1QviTe4sft-01h3Wfn$7JDX54bTQnni{x{> zJ*z;!m2zCZ={R=cPtiPlqG+vZ(lBL|fA_qRQR`Z9U*%Td!`_X%*OEKf^M41iO@$dP zuyWmEVirS+1|aN}`{^ zAbX&09NlmY(0s|x$bi|?al*u` zY#6J)g&Y$40>h6S_7?40@5ubOw(=kBFO%ou`~)0KXW^7;L`ye$k{Lz~73RckNIT;} z7L(;Luv_XO9;^@|K_tBaY7;imS~4 z+F_CK#7iS{9)g{Wt4LJ?n7*R|lMk~u5=>=`MfFZGyh48^lz_iD#6myiA^{Uf2AwZN zGl>2f8xd5QiJ}3g%Zi@HI2_fY;}CvYHd7cRvCE{0wIV;`knCmA@NW<{+sz#8;)*yd zWaj0^>ibI!LR^NaGvKWl#;Hfu)mKhRegL$17<>(m5Ej#zhKEmA^`UQItJ?;0K0LS+|0wPcVF^`W?N6v-!Fxse8}Apy8~72lpXzwTCA&$q zk~h7dAnxAAVEzno=xbp{X>xl=u8mM}m!oYYYQEd-?&hF2iO4b^IRYKjt6fJp-X3(J ztE`6G6(U8S6ZK2VfSqukMwFcx7kbb{k1vAX4LKO=H>bVE zu?wuq(IXWX&}F(b`!2dm8M5^nr|Pae-=IG-V9MmI`b&8t?a@2J3EY8Rf0UYw4OR&s zGkHg~+~fPPGzX)~s~+Fa6swSa5wl=g_#8$HB`+TH{69zD02DVijz=fb0ji@}r{j*6 z;vhZrfo?1$J^BbQBFv_;JV9S;ps)HL36S%*{bK%MtS_?JgYi;)TRxE-a=w8lj7uNz z7_Z!^^JH-sa1ZA88e^l}_jwsOl~1Qj2{PKG8V+VF76=bz_kYso68Dm$9_vy(?I#m+ z7=v%RXn4?%ljC=W!rfb@AVT{tCg2@ZQwEd;O@h;USOmULwuPBGdqiza8Bx%2=6E%xKD0X zn)0KCQl$V&!ECXDA8eIu(-Vc&z|J{}BuaFJRHmgLkg_7iWhqocv`xQ`Ss%wg{0N@( z8+-d1)e?ElvFHy>)X=ecE?o=Ur&AGkz5m7&tci`9|3<>85+@K1QI193s1vN4d3Ix? zgwG}-S5;H{7AKn5p}u4}7L^Kn30$YbxzRaiuxg(YEo6Z52Ux{ve=E7=U0JB%o~HsUIFC#y%|vGp>7ovyV@IH_?P(ONIrJm+;j zKP~#(gMT6J>Dp0&UK{iLXXXt#YyOjwr?OTZsZJ>3-1AJrynTBuI}(=>=IxklLSHDM z6FI6TAsYv|Irv=9s*}5xCZ=?!t=+n>#jSVVouQ_A)uIc?zZn0e^7;N7d4bA{6|bAT zuo87uRJYle(1$R(diOe=_^dgE{#~m=@U4~luAWoH=enZK4|Z~8cboqvvH>nz$?j0o zhQyVOy=Nt3FJT$Moy?C$z6nvoXq8pzW;~OaJj?mOR0#=(+@Rc8qXL;{$XzMk4AxF` zcxXuKn5VYHMY%p7pBvfbPel~Bhm~iOY2Hvm505|MuN0-0$mXzv!(!}*@T)tI~S8}xZrnDwI*6IUpC zh~-w~p>5^`3B6zus=l%;AUWuMaJhZwOZ}03?xldl6|&ayFv)!92s8J`g-O{!t8+< zQ)tNsZ5?6e5@+MSvqBB;3S)Sy)$pG2pgr3$|7|fv3pN>DT+6$8RxrAX(X2?8Rf)!T zX9~A+ustss?)pxz)9WuX*h))}8oRDJPj#%iR;Cm7aEx8OSb@YCLSFpWJ7Dc-6y^Q= zuox92>F8lhBe4|Zb`pr_V}vRgl_^Ycidd{7Da=>X-i5vVSP@%ln~Hko51Eou#POxs zG90pcXP+;YF7ITb49H?MR%w&WVxvMu?`3S)}Rdy)AzrY&^Q z*xPGjZ?9vISxh2jEgaWG?>x#9YxN?ApWQ9mou2g*e?gPuwQPG-fd~Wya-!n z)P5Ofuk72>UU_TGuo=r#w8i~BR?JB_P5P>}PHm4h{;(DE@8`%=*`}Yzl+k}nOT_B- z2S`_dN7Og>GrSXT+!g-$FWED{pl8tzn97DdbGMY;%4+zp*)u1NkRJHoZO`1DD%925 zo|$#FbY87KmT$|Rd9}g8^<34%+w^DDcdhPx+puTG@iR^mX#NSbXCom1w58Ft8rMok z`7Nx~;uI=oa`vyjKOg|%X5s<@U;|_E{?<4~Alt3^88xOU1b5nf%D7=I~|J@O5k1*B5L&!ioo#b^cgPpB1oWL22J<+IL}Pccz%Iet^cTptjY254$ZAK z>!wUua5IitTWjoJ;(#Kj&|s#bk(7N^m|b7r87BmKNWZ z_=tRZ9w8EG@oi%sNt3T}k7Nd)EAA%uw-h62rpcGOzoobv_qWI;E%~_UV(F~*X5jm4 z>DTB39Ye=%s|+0{fM!`O_eIv=Ou98d#BK~aXcuJ?x z!SO2@56&=)*Ly=zL|fWq3hu%T%3h?VvL}a!xF;|R_4a!HZcaBfQBU@sS>rejkfvpP z3_Fr>y0yFFznnsojA^Yxi#{onMZC=s44FNaIF}g1_zMubQv;w9~UV> zN)ReE*5PT|QhxBS{#wnanRfrE-?PlFo(YD(O?5Zf4~~te#6M9(H3HSw?Q5pxgg}{z z5vLGZMJX0jw79Ez`vj{V>ed0{5k=3$ri=x*RwA4^Dt1zCxPwximF*@C$1!b5lyefJ z!cl$lC;!kda3@Qt)k zcR<%v6QF7N#R#$0CfT;K|I10can3aJI@GVkk?dNSfRUg5Q;Z^8)b*tLOJ_#G%ATY7 z`eVnjV-@~}nieu$XmqrR<>^Av9~i^m?1=PlrLRiYz<@z7IIOo0P+gJ=I_778Q-p8k zW%(J0;yVWQZ#!A!Jb_S=SWiwyjara>y3DogCNX7wD0hN6hHazvMt?ZUD|i#pZYy6l z{W!yF^-1u#hq`gVkiJvhpv+l2^ubJOBg?43G>#uqSy3b~5W){)uZvaB>$8m8hK7*r?V{nnkk{bmvPe8^Bg8W5&1>J@8xjsajG_CSQ=J2 z@jO4WDJkoc1lyLYo%*J($C|gWpS z(`8e$&9<06#uZTYG>gVjUG8r!Y}jq*5cYAo2#L=z&a~i;@KRwB1^^kC-+07r#d^81 z%iRMLY+J`U)71;Wzmo(E zs5mn395iAOB-YRV7OlMAt{vT=C2WXn*D_>_o|zTQF;4bSXi^HLgc@#LrSGTTBEO0m zS47|R|3Zbd8Y&U491H6CgvE|MC#zDvk^%#8@8?ICMdpv*Pou5~r0)VCaz17Hv|2l# z-r96Ni)*#QS)Zr3uWrx7nA)j@vp!C5|8e`mS)ugy8~ECn-hNyA&`-8(59MzOhw`>G zgznwaR5yg#Nn!1ivnxevH?Hg*ToYFntBbDhAZ@;jNXk z6~(nVAvx=UJ6C0jHna{xa8$@ynI+p^vp*?^``0D}cQ&0}lc1H#LH82P|2sQ-0-FEP z6uw-(wXV`Svd?#S5_6U3@?T6PmY8ot*{+16UzP3>B%UnHeDB!grP3XO!uLt(E_p@k zBqk8;@^@y)E331lv64kL`qEsfo}sRcI?GFN_QCVt4p&k|K0t( zA6KB3wSfYqdP}9s!5t5bO?s-o4EY0NyF6+B=<(8VBdPbJKw9y;(6Q;$*3|PkXiaeE zjgGJFp>+)C!L4@2F0@GoYSaD~)~sa-U^MLgR`YnA@C==%p9meBU%W;S?sPl8v4@zx z>Lp!X`rLS_PQCPr@lr&+w3nBrtzl8CN+dqUF0Hg_rjTvLY}k)hTcz6nqerEYn*SW% zhG^$~Vszd_XhL<}h1Ev4Nk%amX?RsUs-}TI5v>T;5$;R~{;VqB%D#Us_;CV`j~zKM z;HxU#zKEa*a25xNGFPkgGf80iU2$6d9-lD%L0K^T_p?OFI>o(rhTqgdyOIG^5Cu;s z!=>DCoDvrh?rZ6+9H;R9aB-*^C$<#PVK6VTb%C2xkhXgKX=*!$Z9EW9B?Df{s1QZx zMT(;G5tURy4 zlAoQFTAE6S1nx)hxSwzq*u;L$vMm>-vwPSFnF8ld`UlxZ&S5_X_r$T1!4$Vg2MVj#9)n z{cLKe|0-EkQ-89H0c=p;_&dEAw;vaCt@&a9uF(wff1rQYlO2=}BK}>=zrx-90h%Gk zky6z^+w@K9yH>yB&;P1_SFJG@p&G*;@&zUS&)7r$J^o!^Kt^rtAr~e|zt!q)zAbym z0AnubgH)$((-*1lTD|-KYGV&MjeYl$L^TxuUVC^3t^N1f!*~Db^!8nRf{7z_vc(p{ z@ph&z-ojtDUWC%9zbp?&qyDn+=s15_6&%lx|JZbPSS)|pj%xGrH2$)SguS7#p3YzP zAXFd=iR?y1{beV#@^fwBFFQ=>|7txk{Besi1vmQ%_{)0lh~jANEBlGsP!RuJ?^=&? z{|Q|2p>iifZk7v&S-QrFqOncSp(^HjAp23cydv9F={=8g7&hMJHQjPr#F-6(p8C9W zzZhwFKW;l4v)BfMVly-<~rUgC_M|tCv0pe`~3)pN77kw*8+$U%#dk{$2ES&yU2Z@gJqH zP3wPvzUn^n(Y}21K z*M$rJ*qXlB?{4GoKjc&swZ#Lq1^?pEV~l_CVDT^bR2={MfEnr${`KGY_rIK`{QLa< zf5R;M-{SAz>qsl&Y_We0^SkHw@moUi{GoL35|kq?V4mJZ{V0(0+7l72*#=NoL*zQA z0=XZx@mluX%bg`MKFVXqpY+&ya^(OT##A`827~rEHC#OLBH-Y+C-9~^qMNlP;%(ut z;qe*AjwSY*E*w+Se2Wra1Fse^jwtJ^pdiin7j-(B_)aR-#l`bi{>dA!QCofF z3GA&a%8s)Q@~xws8*OaMWPbyW%cTR4or^DXpv%A}(Xlt;lDz3ng0w9;hYXk|$sHt;^L5pP`;+NM`>T9!AasUcrQmW%&zf@A7;sBM-sl^WmQ~dbjO$cUkn5sp| z=L%+E91y|g7U>@6|H6JvJ9c}zu@z-hfEu@~fOL{7hXrey)Jq?%_yeU#Y^NlB)k*pr z^@v)C_o8yre@nsMGmcq9A*I*wk@KVLv)Is9QUnVyKbM~YVxr4CmakQVEwzw^ zSFcb_piA8Pkz9LPQqlR+j|H;loW2y~MmFK`>H1hs;7I$VxsLLOIFGDvfMCM+@#`NT z#~+jTpCiX5P12#9I>!cEbgu4(p354OFgaFK26xOlTKR4w6?Ein^aNfoq7a~FPo-bv ze1bFr!|Fuygu=aaaCF};*IB31IcLbIokg|K5le$IiQ@#GR`wU^PaUc<;`w+vpFm`% z*xgih`iCSEpEoe{cM_@UW2&xil-~K2Iuh3#iAXwD?|wO*hy=1otrW`3Fh(!=dIeZC ze_MLd^m`;lp;*TbgrIQ8P{@$r6BbW=MSnjHo}wiNibqTo9YCRXQ)R}}Z74iT3_aom z*|sY0hb69^O0-D-6S`M}m}P?K&mE1{w4Yhb9~rq;{fYYp8v~I18@$3k6O3c)WmuSx z2npu`TG&04TmK}4jqFRU)3U1_>i#iyi$^-%?rwLL92-}($sM>cfuvb$^Bl{QQ}O~g zzThf5mO7`~s7cAcawn{EHE(y-Bxa6_yyGfs>f^nZ&wfQ61=KI{f=U4$*C+7mLQiG_ zr9}b)J|pt5e02#I(^UD@DLm8s)knEm3$F|7N17~oI8I(H-CK|(BlM}}!zJmYaD$18 znaiqBKy!^K9@1awL=w=mL=J^FJVA;^C<8gSGX?f6&jSZ){94Q}sMA(c?v&F{HW!-~ zDm18L(b8VL6ewHFT$M8J=l2ZtyWF{vS9lz4KCV@doMa^14B*1?rMQ(wHr;%zG#*!) zkH*qgR!^ytSvOMGnvZqH<4fkFShc~NqTbDANeYx@ns^#X!rEDoG#mqHsA+z=GljNm z{-m3`v#f}v7woT*$)%3)$dQrQGC7)#yP7hn!T@KayYgrA4{*AgYx0PQ8u;Kmj98Ou z@R1GS_Z$(|_5#_@+jv-3fo5fEa1j8>w_QS6oTV=VC``(@k)93p`iA-AwQt3w*bLkJTHEpJ;(^6Yx$GJlg_aC*Y4v@XHqX zS^>Xhf=ey%#RC3=3I3S{?jzt|nBZSo;By2#!vsHLfx8QMtO@p7;C2EYW`b|Gz(+0x ze6b0hYJnRB+}#8hTHsv*KDpm${5T8zg@AXN;9D&4N&&Ai!FO2TnG&C!{75Oz@>vZZ!cl6P#*+cS!Cm6I@_{HwyS%6MT~e{#d~6O>nFxYF_T$ zXEZ+6-)dfdZi2hA!)Gw+C4ywR366Eu3j%)81W&Vyd`iHJO>m#oD3XT-{D2AWY=LJB zxX=Xu<;-aA=>opq1pnCrPZjVW6a1_NzFok*P4L?m_$C2&Fu@;LU`3ky_o~KM3_;mN z=w(yr_L-fxh6-gAm1WGIK{v6zb9req_3j((&(aV3dKj*4`! zAXq0ZMl@QERLM@7I&UU7)!ncE{Zf&k3{3hh6dZQ22riDwI@uH$m36ehUz83nT9p)o zwbW650~tj4*}|*J6pI2$C?>VlmJvlP%3mrb*DG1{M9?xS$wJz2$+yLuE#7gI?>&S%K9Z`3cknCP zyPZL9Q44Lt2>((l?UfE)MkO_Wu~Ta$A0L4Gg`WdStV<@)_dnbv?(p|?$(OJdvrEG8 z3JxR^*Va+7j^ENf^b33K^5EObWnC!!_M${-vu`-3XU)DrPG3Uz%Aq{F{!{AYAD$?u zBAJMf)^jVA-j70fA-t0Z@kA=yQNp>UiCN8|lhc!aOniL4yWGV0l*NS&bJM+^Yfh@~ zG1r_@-{Y=nR^QHR3LEBU%vZ_t`}0lm{EPV}dH#U;$=7IQ`%5%Vjx1H-&eu}hOEVnh zvZar^L{f^B#PuSneA9_q+3tD{WXW|uF_$39v3rxdfv?m2DI1<9Y=Z$E|)o|?7^SR3*m(%e%HHMCg3B34S%uds5 z`b(;zF#7#MnI**u&1YzVXVi=j$Cg@MCdB zyu*V2okirGzKB0uXca*yd8>$`=fZ>IFyH=tm?OJRTlw4==KJF?J1T~=u!J|)$GWB& zA^tn-9XLJqU1?UmIk0Ed+fnfi-cI4-IPA}TANEV5*sVSvsHzRIaY8L8vjys33hE2v zP>=XN)SrE0c8i62y{fZ`dQc10mkR2d{jolYpi6ycpFGe8^_+OrgIl2f3?pdx9#EU4 ziLo3P-P_{Injcq|R94np_^?5mT_}qOj`^GwUq*4bk+n2B35UjEh^^#8*`It(FPK6D zIM!Ss!)A0r$wH~f>js)Q$)z5JHr8}wXpOp*wnVld9$ASXo7@K3pS}~>pq9w4h({(n z$>DR_AiL!|k?r0Y-%Mkz9e#e4L`xS7zFIn-4vTAPhwsGqyB7HLClo5P+ZGA32iqW9 ziZFKiZo94}vg_lKJub)wwm~-SJCU8-5#Lh%RPVR}eL|4QEtxHwdBJxgd#wdBquY`d zMqzrT&j>ydMqA=rgSqGQEuGi`pMFPt*d6tAVKU)xL{09v(YJqR$xm*N?_;A|4r59**8*J0qO&329?DJ$leIqYF zO|g%jYxQ!k@Y!Nmk1P4swk3p~gjM(~LkAe@-#8&9*jtqFv>>`q5X}Hl{CmH$-h0q|@3y!K zd@S!}%X@=k6}VVc;6f@uX}RWGx5vHJ{sP`=FK?Y7ObMATFf)yd%XGbD`buc!3zdnP zT2nGWp{@Zy9pjioM1llQii(TT=`Jg3@mA@!_srdqH`QvbAH@<8o)SYV96cdCJqGBf z0OMl-y8`65DR}007TjJ6ww7{(D@_oK!4ro1=POMCXSfb$E&X>-oNBhT_hQ-AF)R8v zDk_R;NppA|8Cy2`2)k3UMoZWbq~jd@mtV#THgs8#a6VA@GioBaa@pEed)M2(lC1KG zio)SI;(Cx(wzbkN+l8RW(XUY57~5ZVR6Nbv5N)62IY;j%9Ks);XHKg;se11%t@GSW z9=WdWoU+D&-cDtW7dt9))N51~5$L0{&21(d@`YR!evUMP9+Fzkr)4=MCkHwnCvZjj zBB$gD)!%CDNrjF-wyLmUcBd2xU167y&8=F$J#K__Cl3T97A2Hv;;5L)?G0g$${3uy zrsN_9%NF)X&}f4l;q3v<42c1p3eX=w{QS9*z3o=~cP$z9LEE70_7oDI+1WPpM#8Yn)u8|Mc|5GKZ$#-Z)HToe<7!F{ps?}(%p;HhMf38Nbv0|)-?(P=&^?{{r8Mn zcqYYSVj_S9Aw&5sVJyb*rhWsdui@)&?BGdM>NxswR4jvwh29Daz7rfolt zzB0fWK2JdL8Sf-xRnI4`d zKc|N86tH*)hPl(n7CuQ)k(q#->Dvmbc`g0+wl~KMo4oB#XxzEimlSVujS)Q`S?OU*7FgUUo}U3(ZcV^$D5}~ z{hcKIK|X%lCbej;I>{fI-x6ifl*o)$kTH?*k+A}cQt6$a;ze~`BoA2RXOU|nS4X(} zJ0#b1+q_*PDUkuuC&M~?It?jV&wVnJQ8{A@+Qmq1y>iv2+Bih32;iDbe| zd5hHLXej3N)gW^t;m?vV*h;w6N~n>9Oe^7RE8#0i7-c1#q+$lD?hrssxYxh767nQr zg7wCSR>C4l=xZgsVI@2%36=t(KWQa=Aqk_cH%hDo2lIFNcq`#9E8!=Spjio{RKhg` z$)wK~7?b{()lEUkjNWf{(}fn6Rgz#SF1p=HI3@`C?13n( zJFvG6*qsK9=orH@B|HWx?O-#nCFs{jbK_!m#Ry>V?Wg>nX>k}hZO)$H+SEb7dagvnMynM%0E|LbP}QNLf3OjitumGVNYlxh;G zpCgv|Y%KARk$3=>XH4bGQC*i`MFwNgdsB|3)Typo?mx zBn}^Brff4(4z5?@Eh&S|lyuD%y^d^hwr>1FRQcl!R-ogtM%KPb48W5sz7;b1ov$a!iWmyxDGa zHiNJh7@gaLbUNI(R_bOf(i{yV7}EG~k2T`Ln`*Z)d5>O>lo`9N+E*Ri zaq!40iDSe?mfQiB>?8a`S!1U;eb7O((*79=uTd+xM- z4f}?1AbG@Uq0;VLYI-=*ggA#tclM!KT|60_Xy2yah+9L%F}n+4h29~ovX@9$T(sWyTzX(=!y(rC z$5u(zut}=WP)YjC4@DL|s^12w#{36F9_7>+?bWkJpMtJOB!f1QyK=hwh9^sq&=D-) z-55>dAbfXsKrSK~o`&1Y!>J6?!r?jcv#0v$a``*>+RL--TP_?vOn!>VdExLp`6=e; zg~Lb4Pcd;W96nrriYa{I@R7vZNf|MSNGwE3ineO}3T0l2tFN3F9dk36Wy%U|)LaY5 zwOuoBh*m$>IY!k#*E!B=0Q$1gf?VfhqX}|#9)+tm7_Ra1sM=BP{1wQhEgYQ^sA^5Q z^D%+)s9IF+{EhjjT2=1+t@)^0R_^?@`KVe~?i5AGY9S6nT6v1f%kBvlUMJ4U%l$(V!)ugY`JKt zHgA8o|84Qjcl&=^&lf>#ydC*1-uo{9C_=k8>#l@m+}Mlm_V|7zw=&3>lm?~O#mb<< zmHCst`C6F)-2O*YV=x)8E^-6y9elwm$3t_%TGb!EC1+ z8B&c5Jrwco*5_885?6e^=)F_w9FEMba3R`!^t@6mz5@43%_ofi9CZu3rebd)-B#^r zk5%{2HL+tnoYZP@3;Q6md12Sersl;mV+QlXyia6FoBKr0BQdfm+9l?BJ_wX$KwT>0 z@n5UY2?{@6k?T(rV>-Q!^%$npOc;-3#VOc7NEKb`KdbCmA20E?k9fIz zrz7OXjWkK~CptYDjovhWXF2iG*Q`p1id9AJhzn{gd+K(Qz|cm7M7=De z>akaT&oS#EIjM$D8>8bRywyL)t__)!!e!-h-;%GZhrscHVO*_N>Fws>x*TlyzELTW zuHkLyyb?N*vlHDgpI;|%{@Q7^!|c+JHN!a2S2og~w!(_X7ae~U>G8N{5{H@QEB@Yz zsK$im_I4NY_{ZAY#HfE~d)wZ}@|2of++Xx7 ze#D=rmH2o*qO5SYA*0PjjM!SDfuZ|W$>JDNw;jfZ^pVq0>OwSKMd9H_0=Y1Y2ZHDr zVSNbjK!liRSf6DwSpL*}i7@WJo(cQ4_W?I?Na5!82zQLz%-A`ag!2iInQK{!F7OPQ z#C2#Y5+ep9$a9C@oyrS)e@P)cPh#EOAs89)XtW{X`Wmr;K<0fXV#Ifd;V;S(+#R*D zhY0U5S@SC#y<6c}8;@g+Y$f5IFj(%)k$o`3&?#YkxSFL({D{-^ixoMpc!O;ikrv}) zIS{#re6{)$6Hmu~e>lF)4BFf#zRiD8k5=)23A1Fy|7~@D8AsFq#rwmFcc_-Ev4}=9h-fr}h(=dC z^Zk>BAox(RE&4*Ft-@QZ2K_%3ejr>)VT`36v5nC-;u;yF8M}&5g|*nW+-Si_W`|yF ztPT2V)x2$bAG2AnjE_ISZpH5~MX`VAezuNk_8~Q!;_^B4=>Jsw9vRpF-SLdip@-YX z@44fB>CIZ*$F~)~XPPncx>xn?HvJ>@U8|3|UFJ}{egeTj6~yk}AaU9e{+c<_pX@!i z>|}FM590NwxQC2Popl*{gUAU*XGBK0eEVdXzNW0HxhNZHmmDKf9cAK>!AZ|%N5w7t ziVT83vC21(7WFXM00y!S9&aCDbBX4PeS;$c|N8*W|Emi0i zt%9Sh8yTnt%cjSF5BI9vnXn93OLhtB(NX+2v#@_Q`@*<|?c1#NS*!FjrRAKoPLXQx z?y!Xe($zg*9H);7Eh83+cfdWOvJR`sO&627+48|!b0ybMqnMb0;dDisbih5*0aIcf z@EnzKm7Qqz-W49BxQo$#I>0PFiel#bR^PaMLGCds62;A^A6cb)UTCF{M0@K5omM?~ zt)u(^g-IB7$~WuPLFyHma5~gh4I(+l0eeQpf>FMsA{W_K>dqN`5Tiqan@Ys|`QIah z+*o7wzB_PltKQeLKX*L*7Q5taMqn}1&?=4h!;g~7s)saNRiP3lClum6m*V3I!BKIL zq_8*>8SErJ)`itvs|2AmlhDFzc`+uBy37k?y6l$T&^-G<$7N-bLA%lzRn}wM77s(eP9rQNzYcXAiAl0 zK_%3L^6NmK&$nOi^9&;3kc#K6A&ga4%tmY$ABCa_4cKh<{kumvV z_0<~rZq$4^^@OZwq+PZe8eA4i(*jzW9B%exle*H|Gv9YFZ)x>}GcI}`3lBjy?Pb zlue^R2)dq_-9_(Fpt>-H0^gMa<+h?E{mgfTnh+>FzBG@IAS7Hb?Me7X^G$7iVSs88 zWc*QN4VsG^kw^b#iD^HJ%Ckyx`=R?!8MXh|`vYdZBdfzUealkT+FNJ|@_;^GEg9SN zx6P&Fnp@-7+JE`}fZtJuvQnAr?Tb?OfAIYQFG4MC*V~4-q!VlPo^Qvkw>eb?$=|KxP*?Z4}Ok0T5SlAhw>laSL8*0^sd(&~PXdKyS9arA)iyWe9qxzI!8 z=EN5FdmNxAtouFgq`-gTevfUtnWHWc$Lm+6dzGJs?{Vj|aH`T{HaM|k(Rw%^vY<#T zn5ZPBN~cA}PXZCACarUDtmy>Ni=O2L1z8b=hy$I1ycLDiAzLWOi&01=kgP2PH`IKF zmtr_XAyNB+oV&Wu>vXi^h*ppNcrz3p+?U=r*V%PXVMCFmq$HBk`>;wmncnxqz2+O; zIsor`Qo#?W_YLkPK#uZw5uT<6s|Y3BA;rvlRyz~xnNh6W6AKT6_ob;KQUl}Vu`%f)6bfmH*GoR##DM|^$6jwi0L zco(h;HD+KlBqelgLKU`I420Z{MxJAYTQ6`2Zq13|5hQC(B$M?U1cSU?V!SPHMqi(- zULWla{H}!ZTVkj)F-+CFL@~^0i(#t5pn;*X4TgFX!!-RvO#iA56Wd~#rZD7!!A4@s zI@lO?>53V;FN$GwTMRQ4h8!^Dw80=-X@bG4kBnlFO!Q)WKX?^}tEC^>U=T@IFx;;v zMKNTy#c;pEFj)Gb4Tek;L$SVsZ4=cGX>BpE3C>Usl71k6%YMjF7#KqOSf3X~lF}N9 zkwM|fl-_8AXS9i@T<}~O#baxWM=~fp1Ef#d;F)ORStxkEg~nB*>eWke9czT6QFzj& zXWHPIVd9ZpdHv5(JaujH7~A&rk5l@G$eEa5rwyKGO*|ry>7S6S`scN_cqD_W5BEm=uCfiD*GxR( zE}%awc#cg-W^3S*F8P0i{XN`Xja^OHz3>IaB*-a0XBau<=S0aF+3V9ltZ2U4a)kibE;H=}PLRy&++B>$Azfai(fi;plw6}nVf)#}a=A~y< zs?YVI@U%*{cIGE`2pt$7iu8)fx6ys8v5TA~g6PCpSqQAIquWA< z3J9rB*R3)-Fw``Ss|@a|mw@bDP9b|2{`#=JZ{kIJf~EB5MWM~(xutK8?xYsi2gY4= zlPdfsu}Enn9q&xOXtdlJI-0TuGI^8KVM;3*;81IQNl9F2)@zHqnH!Q;7VTktHy8DLu&;?# zZt;@KqFlZjSJ^J^OWi_$_pL}u5O@4MOwj|BJt4HL|H!$th+p1Ij4y&W-C^>n?SKw_@r)suqD`&43R^|%8dI~A%fI1#LR zVBQH=Dd!;O+gznV!RN|8RNUU>J48+Le7hCWVYUM&Ksvc+LY^EGchS=$78dtR0Cji} z57IH=i`f+1-@14;Cw2ydE9!cusO7&p!YqA5boutz!(f*W65*if8$YmYUo zQ7&J#t87QdvS$0Nsphk_q5e0Zx9(u7K@Z=1f+=#FjZz6fmh}&h84>p=Hig1?<=|XD=xe+JM@> za#ZBg?fLA5UyeIRPcF=T3d@>U{4D#)aN2W?pbajdJ?}~@?LLXv-BzLSwMT5)y)3KML<0fI)D&?e7ajLWCT{LLOfkY!a&of0~;jQvFRS`)M7)jCkU z>|DlkZZ_LvGyCgG;siKtS)zgxr+sGsIwf)foxIGhpzT}bO;*svR(VqtG^thI?h4wW zRo+wu?bs@>k~_IE)~;nYIx6z$QZ^sF{k4Q9Es(e@YrQF-pmTJiV|J_7ES4t7#kbiT zicpVG3iMmohxXjQsjy*k>aATeaWKMy)Y)`Kyd4EK&fgh-f&ydb6vySLwf+?-SUflI zX;Nl-oc`E1NXZY}l@Q$VKx)!ftZiuLI0A9OVBy??9!|Ciwf7-)8*kb|^X3y`U#!_F zW_@MvB2~L*;~0zp#l10F%+Xi9CI$@*WXTG4lfFa%VpB;DJutoZxnK;|*TwsFO;a*YIXuqd&jP}G&WB#JC^8n&f4o%CI4k^*; zk=-@6nZ?ewnj(B_fjNmdkxx{_uI5R|XU>y}JQgIqEsss}apn%XGH=B^@wIpw=BSgh zGJG+8Qtc?csSK=Y%j)VQSmb2Os$AJ@jr}F=?!fY)Hm;2H*jCe<>c$PVcjZ#A8oN%V zY*%$nGm+Qxvaw@Tt~P_BNKJ@4nLv+x>hAH;yRS8(XJ}hkbd8d@MS~HOmYjRZR zL%A8Gtdif&RHzC+QeMWl8V&|Jw-}O|+*R8<1QW29INViriDJI;I1&rD7uzYZL*eVa z>|?46*PTxh$%XgL^QAQMF6Q9E=e+hYs|#ywg1_;an7&og9_+#Ujg;;xMXabQWj$8otwsv>hx1NaG^HB5 z@~1{hFO`xhDX&XGg?EgyPdH|zrKz->Xxfg#gz@%hSs0qZm=aA}U6?T29xW|hrLC13 z3eM`ngn4%Qx^$jBWt^k4^i;+;XXnBTC)+5mw5x4GlbM!mrk$bEb`(B$zHP#OGp&o6 z)=8zUFMRG2+k|h-w612FU8PkQK6j06!j~$|>u{(vud`#}a~>)0xly(Wp9<`f9=AMf zvyBRrCkn9@b#VBfM_DL*y@Mi-#$S14FfX!KuT3aiccs1hE51h9tH0sv7JKy`zM!J| z1SrT}eJVkSv#^0dU!Ba@`%=CvZ0KOC{)i0@@FYTvli3hxTr4;nrgW0zvu#3yl1xH6 z8@1A14O7lYVJW`IMyWu{)wTxE&M6xUe|4peN+_rr{DvvX0v%zi{@Q@5fp3`7MWDCX zsG`cN2EAcQSAmYVQDX&F1Ku#j0rd4LHfeyACPS@inW$>HQ2}L)6)-ans0LSRYvg&& zs65qFuRLo!Qa|;$*m$I3>aoOlq((fl9Sd6O!BkD^Lc_2X?iA@4@r)HdY1e*cwQ#`5 zEv9_HvZV7D3RVs4e?Y|F6`}QGzm*?*@r@VYY?j20p*>?a$dAuO zPIi|yF~8-EjtHN*kDLf?9=kHMY}_i5I#*j#=dt#-99wY**E0Ebl^#3Ue64LRk0yj-m$*wW*I!x6ZWW!m+w;0a&M&p3W=+Dm#w z?Il>mjoR)sYddIRX!Y1#!yLgw5;HW)H9|EsCv##bs1GFKHnAiFR5zwR&ROaqA81AX8l#g z=k72nZRK&4kE8n319R8dUDChbNFBC29u~ht>hy90CD+jTmDrU(WHdg)v|r_TxG%|~ zVz`>wElU52zt6(mfGy6eSnnwBM-U>jBYG)zA6xJK_5QeN!GCOjoWpfkKhz(xe=9`! z57Wo6(*MN%kUb4ZECZ&*t`96?MZ{zg;f`z_8uYXK=nwbTj(>JOomui9*k5WoZ6q=} z{gV_6*KKNW_utpCB*3`ODye-A@(Du&X%lf_NW0zi9Z<2lWJ&zC>X4>-?v z)#jc*m2tPZ`7u#`C@N~aWUUAQf0bv}AzxU&$5&%{xoq)rh^jfErgD4YFuUiXiB)?N zeO2qiCEHI0cieY$wXeEtM|%?bOhw<2p!41dEKZBcmAJXAU=_OAD#!9wWla~%J-e)F zd+~XwLe*(x>^SGoa=}!NZRrJ+O48hKmex^HEdAjUE*(c!PFHUgyw4itxWTIne^qHtZfkAUEgw-*h%fv1wD#Nuw+q(+y$`_&~?Cl(se zmM=?Qsfy%%sYPCaMt&owW!ts(8Hm$2vByO9%%SZuZhdOFO;$zhVrjWz85@NVgZv)4 zoZl_1E?(r$dGp0mbI&(FAC9S|-!vJ(@qcV;F5G_7K0PVO8(3Ay!6&(|_viLQUrlrl z=f_lxG(^mvp6m?nysyv7c7+G#+Kbm#>Fv~Gd%WNKF|H4c?d2p>-zPzZ`Dx^5Y`f)s z8QghyyHArt;VnIW1-$TJk-cw^-}0*weYd+4)ni_AKlvQf&n{o%`lZO%c3V?*c0kMc5Sfg!ddImg07#e-&r_wuKhth=<2;?SK<2E_KQ9)ys>viWVhH{z77t< z1uE$Jf7p8$_$aGuZ}+p&wlpW*R|K$d+oK?-p{xNZ0#)1(-!PV<5bOy_T?5vg0)9pTKNYcP zMKP|*?4@f``)G#bdgI8qKT49%ll4PqKkTa?@}}1h5qXk+s8Hiu+h7X7cfk4NL!s0U z69Oq>%B%4E!&rep! zd^TyR+O;?vwN$^>Li}2YPYbC+2p!;o)ijwJ9(fFZ#P=(fLGObRi}XvEyoSoM>9SA{ z;7bkR2g!zn$_owM}nY573Mrtz2g$rHKPpq__7rQWcMGhM?VKnZSdG^{x$NCCwXx^R0}|(*A~jjSiqyt< zYUn|elOL1G$Mrmg zrp-Cf@@7OoTd+5JI{P8!L)e+_$IBzE1C4%A`yz}4pU4&5*F5zl7Do2o+)*|G_pYFS zQOJY22e}Y3t0ml_9P)O;i=vz`evQ1fI;RYpI~3FjkthNQ2*xX`r#{QMbw!UaJ5Gw( zth`I=-c*ziAl|%Wn~;fA#a$WE*7(GsaNbS710rmAGM%jb`f6GDTFbW*SU8t?_VmpL@#&S7yXG2d`RL}}uP znT2g#kb5m1x8k@xq%iD!=oJO5V% z%Z64Vw0fkUfOrGG)cCrm1<`4Uyf>FxPy%aFB;1XJBwXfTb-8MO?JqV# z9NmNNd{FCKGAM?1GdKf%yB~e~NUj(on~OzH)A$JTnJ#*|cJ$REBN!tiBeFq8GQf*p z)JIZ9q=V;-G=?H&{sI=OT_A!jAc6>LT1=EMT~I>V=&?rvbc{0f^T8O^_y(N#{GKE~ z8nt<-5UOsS{D}Pqo_Vfb#xn+nzlydqN54hu6=?7))<$VmttfuYu>OlIMVZqv?rj{_ zgWc_)r;YFV_D}S?b>j#04CR3Arrco_9cAM)A#fwVC$Sq7;}4WQ-3cG0?IXF!s$nC( zscRcF2!G24VgEUbo;Fz8c7T#b=U6(Af)3sS9UQfQ5GhV4lh|?YGOSOJOuvTN9=l-*!4FwSOa9u(8qa& z;I9(==j$bWKpOZDC468SI4l)7e;Qcgb@eq7)8aN5f1ivT^oDOmj zJP8N7@;6ENkTmHZlJKEv;J=gbi_^d#k?>(@;3PIt{VquZe?r2Cr-A=P!VPKQPf2)o z8hEFKk4OWLNcg2`;M*kpvNZ6W58FS(i~Zy*wa)?L>f zNboPL5tRzq1wgc?WjgK0)3$tUEB4*jp6vwGukB@@ci58cNnk_Jk=ZF#24#xg-fHa!ee?wnUvvTbf*1G7bv|({iL`@$K_N-{ zf;h_oLb9kp3YD}z2?M$02u!tDG2x&_<7Lasv<$glshf2E}t zWkVC8mFed+{grv2mL^S^Rb1xAv}Jm=^nJ>FG;Nu;YiZJy>E|+cr!8}emR{nRuzE}@ zZ6Ec>?|mX`i2~sE8jK|JylvhCFfDjXi5Dt>FAzOTGI0=XSAdnWm2{0)vdDK2hp(e zC}*_PkIK}!Jr8QBcZ<~LQqE|pcgxhqo;$VFowDq6DQC3g$7J$xJy#<+DQE1S6V%md zbu&M8o=-OueSsI(|Nqp@Y<>R1bu&4LtLbJAZs<=pa{_`&PZXe3-Hhew93UB+f7sBA zulITX?^zYQe$daXfn5cB1I7N|WN26Fb527&gQbR4&lIyh2I?3J&&*+MOh?UEQO10m zDxIQ?Il5RVV|;y;F(0fyw=!lelrew)uP9?4$I<25iC1H{5gDaZ#)wF&h?GtlBO;BV zNSS|5ql}?AWfR99*^o{dqtm~v{ose_FEXiyvR`L3>c*x3Ut|S6N!(%-`m`)t$CArn zMooJH8yq}0LtTdne*W~$^m-Qid5e!4llLpRKgk$&ZIoEhIu)53)(_x)2dL<5o?{%0 zdquKHH;L<1G166b)K7pl^-WduW-NtXX9B6mOK@2k3?gxJ0)kde7|=7G0_eDVE)1~; z9r_+s0$YZQvFkVKEu2V}+D_76L+MlXSjOW6V25A^<-#D&-gXctZ;sGcWpwmP^2%QN zgX8Adm3!6R@z++=8?xfvYV?(5`!l1jbiWI@`R(Yby9->O+?sXGj_B)0Kc>0J_d)dZ z9XJya{vB3~t455aM2oZHsgS^%#YxYZ%mtks(q+ffX{C9y&z-J>)8+I{mlIF-CRg({ zRI{6^iG}N`5w~M6uM-TqbwKB#0iA~ju;R1hrV`qIw!&OP$i6uAWmrew4Ys%Kq8rAj zUva5j2)EmHj4LE@)dm_WS#dRew+O5|1I%4JBQsYB#$|E)0RjUwlHdzxWR|zR2HrRP z1zTXTjFamQLQKva+}SfA7c%leDBN^1@~d1L&a~cO%fN+#XDin2${=O0tqt{Dfy`4TH>PJaldN8M?@4(?(u+4=wR* z3bv!;Sz}sAll8)TNTq6122yNuH`N0p;QNN3Z0UL{vuwx?bTjxx)??v4P$W0qLx$R! zu%uB98zzH-DzIF%#

%A+McO_wDfPBF~-#jof?BdSJme6`Qc24O{Upq6&NQt4t!G zsUK}AS${Ml1HiGZ_lQC~wQ=}_=ay!CC&5KYN7O&5JfY+-U_!XxqiHD&c& z`x4c1?bB6f6RH!x*36cxT4Pgnbv;i_!s-l}0kSr>l8JLXdgjse|b1Xiu%U%*l1U zAZznBob|iQvDvlkh9TL{r9B$*EG!6M+lQV1=*oO#nS3;4WIsnkhPoabhrT^QGuR&hLokA~)>~h5J zs0N`EBCiUFI3co;Ac+$qUkaGYsTOOnbDaI{qoNE36Y7l zg!(4sw!L&tgs6=y7=MqgOFk#EC&Tp^&U_rm;1eS6;(-$)&*LlwPIX+$=R|ghmu%M_ z920>#0zro3z(dyNUE+mrKw-Mufs-QzZP_?Rk~lds6gC16@nI;OGZ~t_pO1~mBOFW? z$GsP4@8$y|N01+ma4G;W|;=~gl8+lE@h#DA{t-&gl&3gr` zRfD0W)wt;!t+m?3*N2QJnEhw1{e5s25}GL9l%FOjduz~O+R#+<`J z@rq{g>u&KnRK9kKSE>q~TiKaGHl$4kTDN6SG;j)Gx(dxV6!||Lirdv3NX7{{ziEn`7XR<-}>sE`=Yngx$5je)>{}G zr{tMJy7eexQE%%$IxnFwu<$qEaF5IG=~w-Ie~N5bU#=$wtR z|4uD92&#@mEY;V28reJqRQ=DeV1?%}x^GXk8V7c+TW~1i8uIW#>e-jbw?EN+#6?>7 z{pg$Mz8@_(1R8joXyEN{uJ;UD?+t@T^xVV&vgh_v&(T-UBVAYGh}7?K`Uo9|>3Rp; zYld}~5oq)3oahC1pw@5a?5?v<#=GmvJNM!=HFwu;yVhNNmN&t7Hk_{#{Fd5ZTeSW< z#Oy}>#q2gju-h)`ubuxl^j94;Q3+0)hd1UlEZDUE+K2vnJJ|S*_f_>@;(bM|B?$wT zNHkgK1jVCQU`Z6$r{XF*Y(PSVrk%H%)*9Ws;A1*JgO%|X*qGyv!Yv4jBF8SQSpKk+ z1gC=Mmg7#s+ML@a?U(~%`0E1PAyEAAr-+Bkx9-F+Kc9#BQ;AbFG@`8@BWH+J*XobR ztOT_myi8Ac)ZP;#D|LA9I z!Ue?fVUlkYU;lRV|9N~xzmIcJ^5|q7TSe!_7M(XrhpjIdTdf#W1-@E8{o447SO)Y@ zUe5~hYO44O=o+h$mr+k%EocedhRF3zrmqA2)7O7DzN&DtMlfc2`XciInzB;(@qGB2 zX8r#HUqAZh>5CchTIgedC|sWu{7L~I(bB3+mE zgCO&{8SqkOz%C3|!+5xgk5#?kJ`Bb@j~p3|EinD~MB-E@jxu1?kLz&wt_BLa>LxV% z6zpxW?Xlo{bi~f)sn1nOAAt*P*Iz>m6B7T*piZF6a}`Jv@W6temO$qa1)*hmKvO)) z^K0Um8vpI2+jX(HUbFt1PLx0w;~24__npN217mC4mm9UN{gM_^IDd^3DAEl2mmb=A z!$)u<7Ui}WcrtDby|f%l*8%?o`4tE6enEe0f1y(K;|K4~Rb@n+KWZaHLU{B??ly4H zxsKd@j2Jl9wSF|MosZD{q)<8by$4s{A4&UT2Y>_GA1kE&u`Eq#cos%5eCH5$K%~Yn z3l}>dUTvIz$(cVwoFLz*FErm($2DKxQTXT{l0w&^v&J>@Bmj9I;BRPh=!3a_l*(SL zz2Qaebn`zcBCS@>0ip!D>Ihp6e}3xf-FPEX`OXqLKX=yX6G_>d$PhihQRWR@;-l=P zvvvniEo`7oe^fjt$#+wyc+dOcTr{|2{nedFLA|yO0t&uP@ECfPuZ5xQw8?V3j#%!x zJ9Aj{`M_RWId#|FPYr9OLEXmv?1E zX00yn%z)-EEAQ=-5sM7(%!t(WUcL}#7dYS8$!N<<3v+WQ@_`KOKV`ejcH`OY^!{81 z8MzqDdnUd|J*L_@xJsFYj7OR#GdR9zcm+r9`iCqHd8$8Ay2_pm5x>(c*oBYL7a6cZ@o0G9G4in94hI zIgvV)QJBd=#(`9?y^Nl{q`XuGbP#3sGG(@(3qg!JU}84MHsYcYpyE@) z>r0g8MNwW3=YsZe38>DC_)<`9u8|XEUqo5$MNMCdd^cYmnGIMd7a5|M!oeJ*4B$s0 zKB|S26bzYsQMB!JH{9EBAHvZ&h%wzj`WUQVu_p-+-EcMLZB9BlGlElx1QGA*AYz`$ zkvbHZGnio8IbPO9ln}avRd>)%+L#Zc(-w5mA(Rstc%5`2a;W$26Op%iFaIJVI&r~v z3F+#+`&eX4@8!p7<8Ne6^pOR}vAFl{2QGpNXH52$2*pEJe)hu}@ z58`fB2GdUJ==-pidp+_W#p*azi7G;&58X#BfIQy8DH__J8}l(|2+)J~(I6Uq4T<=z z6>qRT%yK=}Y&^T)x~>gML0ps_P(Gd z*S=_0-u{z>zcYltQ-rGC48mOuEG0M*DCF>y%q+N~NU-4-#BA7QiPWtwepxcxJmc9e zA|>!EayujV49wO^6t@-m_Fle9GFz0=AqcsjrBY94M0>rbh~9Qkv z3J5_(7zmq(3}&-VjwaY_yWl${ej5Tuj0oaGX0s2eV?Uyf{5S&)*eN0C#UAR#Ba+RY zSb&qxE#BuiOEp7sH0lmsi_md4F#B6(U}|7QAfGspy{OWEQ13CXjl2&^ul*E|D|iia z2{Dx9HJCV%ZglmScR53V9uU0tHWKk0@tW(gu<`6p;yCNt-c8_A!-hw|Ze#_V2~LEG zgOyEJBD@a-)0HgTCx4Qq2oeoaHZGYTkF?{8OWk~(ykF@BFm=C@^N#OVYC?t|C!)IE zhg`v%j@L}XFoByFe)?N61CO#x&sA1aZe>_Ofbk&e$ZklwvV2}JQ2S}&Ks{%f-QR#DzBHp%ik#q<7yspU_^S3xv3l=T00KjnYWPx+s5S+zSNu1Uqu zbJ}}{pGFuaAJsNIlO`L;z83QtTrJw--}6zzJ5PAK@&kLag}1I{)V;&;Nd_(T$giGy za3Xc?S5KXlNImhZryiO}UG~*e4@;!3{pzWQCsNmc_0+U|DSIsR)l<{=NI3Pq!-;FZ zdIlpC84x>v^$adgWI%lT)icORWIzo4)ibyEnMhJP-j6Hh`4iJl(u)bN9-EqgjO&nC^&Dm<%(FGhq_ z!%O(Dioer_Z(ToJAxz+xjDihkNJ)po>YDsDE|cXieDC&R^Y$a(1#-lG2jnaq(f`%z zF}QV=9<_#H^jK&ZNsoHNXvmw};Yg6Bg(C^ccqhS@7Lp6_K4LiBtyG~qscsF6MRvI& zyKy4Bi6Xmc7m%Hlc}M(o6Pkq#f8Hsczd4Af$h%DBZ4-Idio6$INZy1lQ4S9&Tn@_y7f_BuG6JSN zhILP4bye9>Ysj=0!<_t4ykWN>NxzAWOq8ietjH=;A4Cf$R`JV`?gkd(KeVG)N z#bNk#l`h^e4m*<+md#;jlfp)F81(+Sv^fYH2K|aIXf!Ul!c`-kSruzfDrlPc-qMjz z<=8>@QyiI!y&~QEkG}^kKM%ebS1pkN-r5)^zBh&}Dqv3E_Hx(9S-2^@+U9wW@JF|% z(IE@90R6<5*C+Jv=ogsTll@y}0mHJP0A6#Jl)q!DwVN-n^#Gj+A9_$Z5^0HN_9z?y zU)U>z*2p*aKcOLlrnT31)t|Bb5k20$1Gl2#!29Y#*vj1Pb36^J%Y6f)FD~1cNr8K# z&oA3In2bM9{u`qIp}RZseRt-p_REPs)weZ$!H6%XSau z+M!+#6+A?&u(tX02MUNRwMsrx5FvJuHGHDrZHl!ytbFO(i%ec2?`h8OHS)d#zS%3w z$LlWFCt;L?H8(yskDY6%eZ>TExcPTiF-MAPUtyeAP)I1j;k4He$vxZ3DFboB-iL^IQUXePfZDW zihg(P?2@_b9C+AcIa!IW6_pg|K4nX>j@Q}*{|VG`F?-~>>b8b^#EPH_fsUL zx5g*vuWvu@{4@G4>HJqx$-HBH^SHQ{Zw+Fdbbb1cF|XSIt7MoJN0;p%5Otr~68#gM z#5r<$R5PMP|FCR7u0+4v2U?^DQQ%LOK_?G?2qcKds)BmgW1kzlNx7`b{}!Q)oh%+89KgEnIChdMqLcva6R_AvHNgj^PxNh%9(RY^Z8$^LmFo~1zDJE|&X42zS%&$<)vl11q>6u$yW;9?= zq|}u8qRag6#_mHn1*KN+ifq0L7(&-{gCX3RZ@M0P(b)Zd@FiNqL z{?d4BA?ENnZ%MvllTM81+Ia0ZN(NJPdR(?e^{h%6wVBJBZ08)b*9_>nnPbqX z4Q|Y|w9n15wBI%iAEEgubM3nZ{MugV`gp+F_X_b_duS$pL%vvhm;ujreKcV0y9CIi zaB%OoAukcL?t0==KwPvD4^9Iy}vLJ=*2Vu|pMU;5!sR59OW~$92q(|>$PcTdHXui$$i3xPabqUJ4wkk=Z&>l3%JdKX&uGUfP% zNX)YX+QB5ci>AB>1ryOE1RF6UbuWGBVHhl!7Sf>Cf(621&tDGmo;Dx(z?E72<(9wf z!GQzXg4NUMtDw`PS$sTz^thM?A{o&G^GSbx#U5NChf8P?CVZG0_C!B{M(EV#urG$G zXJVT0C(&`-{z6aBG+$`+yd{O6X*ZbbfcXUU98WLi2TVoS0Eg6&Js4gvBd%@&WiUqAQ&Gw<#Auq*gmZX)R(wr?x zce5wsry8Yh=R03DdR>c2U_7p|TV0HO6_D)_+L>tCPGe+fd?BWfPjPHB^%ify9;Vs5vG+ z0H1f&pgp@#Xc6-ZCK<3^bQG?9t-(2;H?YS( zPr2k4DihT)ErEyeB4#O?R~S34eJG!hl9%~&ViV@Boh-xcARPVtZ4Lxq$|0Qj1Us3Y zzvF`oBXE=w0Yq02lSp3Wr4KlX$M#o1dGN}yeeJU(Hi2!3PFZ@Y6Kn%=C9)-ENjuQL zKfhDTQptlo$?JOlpz&TWPhFZMPp#d$8)YX%D%ARlL19o1DvHC!{5m*Sz81#yCnR6d z6~ZL1C&^cVn{e`+{U<&WhM6x*d5zyq@%MN5eNz1WBYvMAe}9=q zuoaJ@`%h%LoG<`c&$c73e}S}e5@N&&Q0Cpf``O|Wo}X307REIK5>O`%pbY%2pX{3e z^lfyF!1R59^TCZK)QLX^S&%q03DoQw0hwnz5n`wy({&kWy$cZ2GFW^dY*j(7Yb{lv zpo|h;_B_swon!`t4CaKGH>&u4gl{&fnY;M!%dTI+f53IwuZ?+oK5zx_@&?IQXGwh6 z>MELK%-j|^Y$Exi+}PU-JgeQ&cRqiUJa5w^&7dzGS?v*Z_3ljI;_l)3x@D+TH2;@xN2c zU6=iYA2i9}i0(XypRRv_40ME0@*+3hPR=57Cg+AS9zK#f>xkWBa~ zVM@*;go%jjF#hgje7wj)4=!Iu$UZ{EXJntzdn|HzP$mYCd6^%&9$RGWJwzMHz3&r4 ze~JA(wUe0>m|LxW5{>p^k%HM0YnIjOyK=n(Ea6al*jNptaJ@jagC{CXT$(@yPJ}>} zjk${UCiQbw!G+0GDRWWWcp2wG682S9WJA^VrM2ygo~?6wV^$~>+UI0jhitQU^{Ant z*0y7~Nc)T9M+cMVi{o?*FMqeSZL@XAbJni+RothG5rLyv+xA#9j&YnliASDjJG@ru*-140q! zhltY?H1*Q?q^+}gf5F<0j~%SYCL$YhOe4T;L|rF9fbe_)T0N!_;3q_Yp^F4KlYtp~ ztZj2uYy0dhjx^gypJ>Af$n#5W4Tya|W*1yr@JtE1v!}eUH&)TM)z(&MZJVEEYg>$O zvaQVds}V4KVt(s5EWKsTY;DJELk@~6!EfGM@x^CHv&zV4`QkHM+d(d`%QobYwd)91 ziXseRXhp_L_O=zOH6K^=JB)N{RZ`8J7Kg5WHFTT8Gr)~PdMQ!ChsG02Zn18~wZK4N z+e%c@FDj{uN{&S(pItW@FBx0c72`p_t?Q;~zFCaezf$swwP)$EV(4G!K~+0}rf3yE zo~ZZq*y)`kKJdYS?#NKX0_i|Rn}4Zjs?R_;L>%cuE5>*Kw?~ewzi!* z`fZ)Q1@W#3e=ouRW$mR^sPoBtK)UQTb(Gul#GN{rTr1CO1U;m*|3b=N2YO2>AF3UJcKiQ<@@ca!rTk4=`OlL%)di4XTtxUf z?MGoL<^N7A-;VOF&%{~aFupou?fQ6_&9jwA*xL58EuZdp)Z@PkUsweFk2KhcZoZ@| z#f#zTg~t->1iyd}a0Sj0@WFP$2iq!&Plh4A4Ek9aO-hq7Dg9(Da_<=RgL`=Lxf2G| zPh!1^$%od=Sr~14KD`D5Y{x8>47zdQ#4Jn%xDN@Q#pE%XKeX@=#9!7S1M3p;y&h{U zN+xq96?K`p*5as@E>1FZ^bEqPsJ;G!tcvz0a$8XleTz3_ZF`f7y8?&WC|vNuiO@oX z_hiaIL8&J~vnlXnWJ56p`J4z9QQ)gGaFfjTDhiCsK!NBJA>1JKX3wJt#M9RHE)`4P zGlWcay}7fk(~4^*t@Jq?TU%2Gnp~S$fjo)Hi1Rg?Y8FQVg??)bmObFZI`v{-OP&iX*XIo#6pGE;+I237_+g> zg3mHryFM~`PEh*%;~2)ndyd?MaRCEhoc=00GHr z`X^k%9yNS_G6$e{tpnGRY)XOaNK^xV5he7@Bv}HtYrufg+72n=>q}YBiA?HBa@w|9 zX}113>Q4*<_Q4fqATKC_J8YvAe2QjZS;t&90-pyL{F?i;KQHam#wwK(r^kbzN+Y;r{=Dp}wt`jDL}x z4ejA?N2|L2wiOo`*3p?-gR}7%kvSNUzM z{4VCtff#l+V(C}18-J|Fpw=kq#2@QHtVv2>m~lFM_y~=sbTOD}ZyOqK3;JRGL;yOh z7+ep5(N|GF*BXrQ8B}Y=%eLYd3>z+`q8QYHD>%?*YugBFU}5aJw%c;BAAH|2x)Z2v zPuq5D+Y8qGt=5j)V%)e1XxvF^+!t@k?czlHq9>t}JUQZ~V->i33HYYA9kt~H_tdJ%ceA#w=-p@El)ru zJf1v&CSmpS&$*a=!YZwPXnk^PO_g=s)5O^dMsZ#r+_|OwrmA(FXV;xH8}9ihqUDZD z9$14%UFKSA#!``oO-U{J z-S*-w;TMkl9{DEd!`6|R19{Ze{(TU@G2J!-fp;R)V(@ z!vTV`lBhEldsj271F(BH`t9oNMcMQY)F`{%GP7{dNNB`-Kdbq4~zz!Wv?b-J3|YRIQH`JDA? zv#;LWG*oGJdc68r{(!I9A3`jO=517sdG&Z%>h=c5sm;!nYF>T*P-W;)BvGfR4Ngy! zo8YQ@xf7YGceuUvs;@x}xmSiz9WEm{{>IYrjg8GsCVnk&p}??rsn@r{t1Nfdg;2LC z0e8?HSniI$JM4~t+ljW+IXqr~LJ|)%uPe39yzJT=fuGiy-|c?o8HBe=e3mpc3@!n> z*T}d}D+<(1u{CSpVv6FoAnM;0Wj&rJ;m(#KF7yRZ>A~Stb1WOrV|XH7?2+iTH~#&` zA54Ou^6P|q2L5Lp{EN_DqcmpakY0W#N*i7y9m-jCRraz9qYvJ0H`h|X} zPZ4^qzpXQUm#&*a4vMLnE1q66bH&cN;-Ov{t{hj35$ramJ45>Ba~sE??Bs86UL zs1FG5globx;g;}8I3&Cgt_VMb6T&~?obXJzC43SN32%fet$#JT(E3&DPekFL)^3fC zG&&JjP5<}gd43td=(+x-wkH)IsrX98M=HKj@eyxdhWHJTzk%|1_RN=GeEjA!YtlU_ zJ=KBmLpUV-5Dp1HghP#vHTu=)R-;#qPc;6}_(J0ct$&ae|FnKceb>TMpQY1#V+Y~> zYWaHZlNr4+6ZQ+nmh{G!mG;ImXZ6P3fTi5maf8cxV`XqRl=sFq%$CG)Yo*IknS0{H{iNpM)f*e11!eBj=LD? z^5Lv-Cpvp$k9YOPj>Fx*xi|LUmfl!5+>A(X>;RN|4Su;d_A5NUzfI)-Iq=SP`O;rSE9e;RH$@+J2U z!rsIC!kxf5+<4?OA8sw&@8R~rorZf8uxylB40k2sliT=wZ_JLmoy#3X92e5G!~F?) z{P7uBgX|K|tp|EzAHuoue%UL?8?Fa_+iShC+ogLQ&x_y&$tU^G=P0h;`S4CoOGo)@ z{&Y`$dc9wZpZPlQ1gE|K4*n0|E`B2swn*pK;?Z-8&Uq2%$8gpNc&Qci>0Dy$b(#HlB(tgrl&#{^eAx70)N}JPkJk{%d|F+)wc&SFdvf z+XgrFUr)te`Q@ot@W83qI=KG{_ZZwZxHsWWz+Ljnsn}I;Cb;QvcDO}w&2X#XI^g~t z?g_XE+PvJc0pCSW7gbxm9-3Vc*D(3EJZw_DQe|ByG3<2VSQ5HqEe8@+2py)T|ZPA zA8ZMZuXiu^)VYJ?54oGid$G*oKY3v#uw0#?7OK}=qZ-5Bpr_I6uFpr%TZ4g*c>wI2X^Z_(QVAJ6b zc)TH*y;z8vOeU2Uh>i zBb2WiQ)1=k=W)N;%k2(}tzveiI(@oYsD!X?^#x2byskS67g3PJG%}iuY|aHY-*BD? z6N=6ifpOd5KX)z-Sc&Uu;qy}!xgB$>9P=tI)eC5yZCvhgugDj4=m|QSouPndrO_!v z>YQGzmHlpK$mkrWy84E?#;N+a&EY1>LPj3L^+c|rfYTfF`-1NJq?~=>5IWL_z(n4N zO}*`D4Ew^tcXw*O~|Cc!Z;0DQTDT%!9!nA1_q0$rvVV%X4gam56Q?*y&l@<2R-%<$45sez(KrtXpc#R~vi*b=DGRXtvF+dVL|))9e={2gOv2J2Y1GeWETP zIem#}0_tR5Bm;}M8X~Z0kw{A%rg6>Eaq2Q1i)q2GLT0k%x(3=6@g^Eprqt_T+M9q? zDoYtA$|iCZ?-RH#M42L|1YEo);3Dq?T)ZdXqQT-Fgw$WJ-K6!JGvIavp*?gkOQ!IB zuntp5z*v_UIU57eC4#Yeh;D2^LmV{a1!<;J$Y+`n4Ai01xrtg85&*LRh`OWu<0&LS zOHt>?+@S=qR+-a^8J*65*%rFynfS!=18Q=^pC)G9UAvy>Gs3^aO` zF?iUkYO$fy^Lga&TFoQA#O9D(oGq1YCksMJ5;l`esrGdI`CC z7}I1GA;Tne*hEoWAu8VEMPGZMCMOb6AwId>fpKah2AI4;A{~7kNi{N-fRTeYHrGqmY>#dn2OS4+qZ@so0sDY#kZPoc3HNo*|4A>%3P9ctdB zCP;9Jzl&tIV2nqMd=*k8A??P32@UFa`J~#cfVRCKz)%dxOmY&_kQ)0Ggd!%R4VX|G z93fxG+2nxN;qyDf!8@qc7{=lS4OJTaO1+0WN}a9#6RNIy-76q`EJdtPAncw>LG)hl z2_VQ;jer)~)>xr52HbA&rl)a9$kh~fD=U_GLTkrUyeU*x#70Hh7w_D>0Ry zb-pId4=6u4RpkK<{(!sA7XZA;>2fzs9}DCIH^KZ>3bs1#$Yb<1)h|+4snb@@$OHfD zLV;O5h~hLN&e*9cc02qY@6!KrZBV8%Yi?ytE&5eg8wEufmAnG8TLH%crmPlJQxA$X z)%vkhgJzg~_*+0Pm~JJ1(W{im1$)$ooBa+XQ+)oA3A5)4XP~}7d#6b?9uN#U1E6>H zmN^q0g<|m32boiYCiRUq$SbL7yRhVdlNH8zGDcoF7kz%G>o{p?2P>Bkgi z;U(Z>cL0>;YzjhrxJDhTF*6A()ZcxQEOob-!0%zV>UVkqk|9$@1~CF*U}*3GpKh-^ z;A|?u7}4k@0he=?QLfDkUMmL!+6>dASx{LMIM5ShImM|$cnEnQtWX>mwz`&zr51MF zu*KF-C1vh3#i4QL8J4Kz}CEpGoxR2KqA@e>{^F z53=xJ+Ao_|rC7?!$e9((tXWFUyi&zJ&!&`B+^W>vQcbQ_vCW+gfSsJ9dhTsCioM)Y zNdrCPUZuLcs@zg5AC6MXtT{?eg>9bjRoiPEm2<1@isli)D8XSzqPg>G9oWDuQ)*^e zY~@N#d95<5Y5_3;WmsdiV8Xn$qWm_cs=V4hueMyNt+rIwRL!j^S7w*jin?eXk#kvv zWi~2rnT`Bw<~S-UZ_>TpUS2&{shN9IEr9dm9$C}5D61NO#48q(r4`C7o5fy5)w$JS zE)&n9GNo$nTpPXf6Yn?KEHze#rM9+GdT+W(7bfz%t)dbew!kHM=2f!SZqZhH#IsC)_xl(EJH5&2Z&RzeplAL2H{T=UBFacv9 zm>_c18TSc^9Iru7Q#k$-XbwUm3^a2J>5l{FU$+1SFIQSj3ulPR<)bnSXS67^!W4xF zITYE-6$^#Nmn-zr=ngpoz7;{eKVCw;$H~p0C)T~er5HdOC}twyE_c93Awge52vN9g znm?WoEjcJsV(EbwaKFn#p><8pWyjHwxBNIV93Yl)_nTVaV_KSLY#mv7<6FR zLUURiO?Z)M`-IC1P!@929=b0KR?ZOQWMNvMXv1rKFjRRgMR*V@98aAG1*y>J_yR3F zuF-^xX1if%01*k+12RXqns-A%QG;qP#Q?|)J#j_XVvI+yGaUTJ(hDhtQFV<0CUjxWLz>7h1(MxuYUNgX6MVKjJ0{7o+HOsZHh z2*f2KOG#0u52=KaN)4~391vMj^XU_jilc}SaS9WQR8fq0k8A88%V3qOX&){i3i33Y zW+ak{u14?abXlTYij&ZS=)hVjB7)SC=BZvO8+l!32uq2#qNKM8x;N-hNy`MJv(J1^ z!lnv9vwCgtYml~3(59$Pb-LB-NrmeG6JR+BrEeN7Dg;6=w8m3ZzgTuVpdEICv7!5J zad`ctgkeLVgeo0bYI?u=Ak?*JZVCw#s{2JU$A{3y@%wfUhUp~YKy*nNoFJhM6|76W z4iz>PMv@@MNzrf+aGXlTDqa_NrBo72?JNTO2~D396?q|r#cLjCNZxd~2~t^dJ-8@M zm7G$WBp8*P;EBXO1`R7(xlie#qiX~-6JDO6{IMW$eNXf8CF^RJ`Y2(VaD zB*10?HcN1kV6J%TA^|Rv;0Y<<2?9*&S4O6nm*hsKhYPCJd8$=-o_ZCYr(%VvH6v6) z7;*-j%|X))PiP4@NT54)pF@(czKjqsz#-HbE%WtS(Ij5}O1 z4`WXU0tO}L#k)K)mO|50=k|vjP=z?m#&jcYaxBqsE+%PQNRGuF$C3Kwb6rvW!YTQ= zM+!l(5^EA!5^-i7;>ua2(!Ll3k8+A!D`#YTckmX53Ny>)UdP9-c(A0M#X zV)#UxsohL#XeRj?YwVM#(PJhtydu)L5j*0Z+WU17Z}CY zL)hVM2JQojLplS|3q?x3QV%x*(S|$>SUEySGx@3m^!j`8`=N9RxwB=j__>#+sPj^Wr z#6y$v)p844KETO2jmfaDDjf@vrW9ws0ZL#fN&EiwL(y6U zO_We?P9g2y4Ayj#_YUa6vE&t03;7?)O{x4OKPxt&=p3tQCTXRG)xX=V)MLr%hJfvZ zZaZDpeUdYZF%rqvH9%1E>vt=m$gp5<^fpU%Wku!eDeCxhTOW`H%+1E=&{T((;BARB zsJfurg*3``;JhfrdJMWamDI{mAHZFt7knk|TUw$4o0;FI0G zqAZ5%gi~Q9(h7GFt^@`yo1~G;Vi>w~!l^KlX@xroR|4bHO>k96N47xpnG!$TZn#3E z+X#0It_tZMh08`7KiqD(LZsUWcMLA|r`f3@8$}kcAc;azHCtgyLooUgjDC8UvKj#t zfsA*Mk&1`r&IKz$Jn^fVdHUhwd+n|&!cQv;}ZC!K4ID+K@ROmKprR%;DHMicyyE8DZAHaR(5*RXm5yz; zP+VDqHF3b}Y^t7PzEMP=?K26jHk)~?Ko}5tJmnK|9fid_iqU$fYDrZ?b2hK2R!Z^+Ap|IvjN?SDMY{B9}AhsdEHju;Xp4sH+K?&2>whFklKeJ)vMlU3rZY zf6B{iERL!-U{7xX*^M{ZzQQaCLBt z;g-M!;nu*l!!^MWpWKu1{}S#Ga8bBDaPPpKf*TI_ShySE=D;n6n*w(OTru2qI15}k z9Q~-x8gI~3bN$&Nl6}8+UPC+MfFk+a2GNR%iKw8??^niPiO}5agw+@}yXuw(3wZW4 zl44E2Z$$K_9iq}vpy<8i{i&tP9d*vSB_MTZOyb%Swpqy_ z2T$ylb>gFH(f~o=11e`drbI+d3oa6?v`r_BDzD9}mu=R! zPmAWk?jhNnplQG=VNyUuMJ?h{s;{t2DRvp54R7#;VIYAzB^oSh6xZ8@gM<%fQl(WQ zDTHYxB8eH@(SjZFAoeCHOQ_KU6r~BOzHw?82z9F1dQ=hGFri3SqR7r~3lz_L<5eycP))m7Egks+6 zR@f#M+jf{+3hJEL96_fExCq23at92VpMa)wbm2 z@mMkhOOv-u`8Dzu73ZV0Y4ZO1^%i-vbxvyDu-3YDJ`hVLCgg>lgrAelI-ld#QhP-L z;JCH2l3y#!=fcz)@vH5Qn_w3YFBI6b_*n(x=UX}GHcO4&QBz?rqwrGfl+zPN_)vnW z%B?kAIy8s0wI}Vgf}#2;Q-rN4ma~CI>YpV}Z=3}%9gj(wp0%^YbavS3ZKT{~Lah=XJri~y&0#OwX$ZkmP<1lZ zGb1O+J)Va%7z`7Isy7yv$~lP!NgS5;sOoGs{_l$ZhZ zB>(iu8<#m#0|R6k;BfoFjfe^BCawAn4I-+psD^-h8Fr&F1&O$Fw&!3T$SD}6PSpMe z-WZJ60GzH3){I*rHz7K3$a4Z~7%<@lVVZ*x4Ac|ARHBI@#-Q7+E(wMF!71a%V{}~- zb`@Z9!l8C|pwa2|Egv739>@E`O-|P51;YoM-^soDgJ|g*^eV8PSH6kyYdM2|o?| z7)L^G%;nb-;;vPVK1ddtNhb?QussyB17@hWG{nLOs0B*}zeX#pnr={`VC|bpKT*FI z;;4RzEZ-F*)7VI1MXFJRac7IL397LP)udfYveYDnbF(iPYHCqw!%%u}A%n1l->mmz zFUE%nR3Dp+#yuc7 zr6h)(TAzp_EF~h@vPqj1x|;%*USQNAD{$q;QpG%>RGCm%s@yQCRKb|56o6Z?%nOG+ zO$AF}p#)(@JL$nM)6|MeVG>5KnEAw6AqA2k)*ZsWo%V$Uw~-w=LHeB}c#43HiHUYx zRC$WY=xm@h75a6#2ghd6ZxoGY1jH0(!1+9Ht_N#X{I5DO>AWq`6m4YENu7j8|1+yc>8E7QJbs^X& zf|LkrZ0js!QCb6I0Hu`~Ek1EQ2-pXvJCLPV2oH*h7?#>NHU?q2DO4adD-2C}HO(M{ zQabac9rPmG!1Bs6$E>P(Vx*2=ArdzN5|%! zQvgk>cjQgHD?Fb5EP_rQrtPp{hFLOn&+^C{`2|T}l$cDG(e_fhSiBZVHn&%8jPe_{V$mL})^PEHv652KATG;^ z&#zO|pmRAjMij@@#U2TIW>Q$s1406X&ZTbY6xHGIszw-#!#WJjChITaeVU%3<3ZC?9^?f<|ElaVvLR8AZ_{7dJWOSE-r3s1`1`dHV$`>2Un3MQ&T9&qOh4H zEoQKX$kxET%%Y*EP~R{xW|Jn_>ae1%x&&Idr%AZc#gH_@T7%#cUl@}dkuiueFKmi0 ztZ}|2jFnxTuh9#{XvZ!Dr;u>zL)EeQw?Z>sX01_PQ&U*I4lD7du)@<}!W<|uKsGz; z!7y%PLBTlHZYjIfaa)DWR#{$NmTWx~mzev+ueMZ{&9x`TPSZa0an+9q#A60&UY)HSFhM#d#|j_S8^+S{TAFdAypRgxam*N< zL0fEByb`BfMal^xrQ`Z|NKqmecttc4FEziI9#UpRIZe`NxB97MoIDO(e~0u|*71z^9lR5J*y>BGu$6;~5KxLYr?gc~KZv0Cije zpWn1pG1VbC=_6@=g%@jo`~{Djh#yT@@zX0tMiZ^ySz(+MzzceW2wBhsT{B*Q1CyVg z*pZ6O|$gK-%_P`1r zYeJ;Zyr!mr{*BK2pm`0ACa=2@GyZZU=X>*HsI0vUuP!o5*RZhE?WT&?Ey1z{b#I1W zOid=j4wURpY7wc%^@h6Q7c!ShOa=v(X<08ll%gF5X7MtydowpQA~DAT2%5bi7oaT{RCD>0};M zuPutlsmaN4Fpc-RG?^F&sIa<#uo};n)Gc%`p;}#TDYKUsGz*!TPF|4$j5bT~pG`<; zil;JJsVFrzL#ZW5PE`hy8xtAF#RYW*CjF$EV`@Lu{a%_bvrl#fJP+^2ZWD5bBGRA5MmsxUIULTr-%?WfI={WGD3rp~j2ZBu| z$ek|SP7&ub;zz8dL86H;@NfW1|CQMG0CWisjXHxXoQ-(9)A~pJ^-^4ez~cf@Ffkd z@uM|?#xR{G)Tx#eN0C|BbMwwDmLgED^o^P_j+3T{cr>BP1o7$y5?)2oEeS)F#Kod_ z2>uK%g0#<^8pMq*s%lz7j0~jurDm2NSV=%PMv6{`G-J~TeagBq>IY8fp@jNUDJ8Zt zP?J8D;RO1~Vnj=yfQ#a7X|X=T;HCyhyQw-?I9o*dbayG$Kx#d>;_>oDAdM+B6ys(d zFa!@}mL}|=f#GQbnY711LFzEdIg#FCMUZzNF-p*O9ZqyY(}QVorjaa|Z8ZiiMkEg+d1&vSz(O!%X(mPjHFMfjbo)K?`o`P+rd_8pl?Vrl4Ab)gz&bq#u4!)#}RG7^v}T zlv=PUSZj%mYr(k!O0+UfEfl>PQY$GJDLJW?Tw!tIpH5e4;ixImMtzu!d3f>&Lr5UP zNtDG$chPiz9QhhYp0@+#<}TeC#^WTtSYBLf9TfR2_REeSgRa7f=N?T)H8R_(6L? zA^gB+T<9IAF2r>P3t^$Z5SJ1x91FKl(zvLIkObA6=<*3wD%5;J8K-5;dUGa#2BLLV zqz7QQrNalEJnbi2k~yi~9$2twga!Syb2%!f>vr1SPoM#cN38<1V#nytWY)L+6p}pZ zF+V4dK_CDhjtvTxPC&?2QZf4!kcK^(OoHWG;R3TBV5SeV^)%+C4~*>+md=!lt=ye1fYV^9)j-vnjXi5#`X zju<~!70ZK1f;z-GzN8FA4la>HVyQrR@SD(z(r6_chX?UCb!=il&_*5FVCA8VJ(5To z1HLe>)`C9TjctXv0thA}boI7stSz@!jTtkB$F|AL%{H6UZowuLwoRZIjgO;RDYRS5 zrRZ04s4-qa@_}ZRA|z#NcAF;Yb56vCl74VkwHjG9tGxxajLnXzkC|d$apP;2YVr8NruIVmKXSpj$yG(c9$Mi=g3$r zSCcZws>|7g9U-}t?KgY5V{^G zlI%g@tdo|gmPN^>v?wc{nE=M;9G*u-`8tA1-wX^qNvm?oU1lk)|Fj6O(Baj+zV@7L zVSl~aM*)C*ld_FR_-grTY66LHCKn8Gs$FUl;yub^PzqVe*IL}?G-sSz8Ln$`hcSZJ z61Ck#)JD^wO_E5hBp+ek6bSNmlw3j6bc#}GBdPx)VDY5m=F~o>kxi@cI65sAe^ZMJ zq^P7xcvF69g{6ijkHk{0FFZZ2Ao!ESD`xm6&r)}jXNk`zK5syz ztdio`$-*E+YMN$Aq0rB%oH*fn^J1EI}_PETO^Ti5u`tjsgt!SqOf`&i^K8x0k-Y5A{J$d&P}L#6ZPYcf}OG4BBx3= zU^|`qPFe}z=BDe#%}t51P21S!h9@%9`$#sYZO&$BTX1*B6iDo0?1ht35L>*s$5v)f z{lW%BGzgOk4{S2R0lq~f&Z(e%><5#Dv}g#ar?tUME0e;o|AU1%R1Ar}Npx{Sb)_Xy zNGITuiSMg>B|o|r{D{KUyPqrC(JmQ7_v zR4UcFsmxz&(zu{RJv?(9)KZk6g>kCe};SuAI7!u z9Ha64q#;lG=-kk2oE!cPL;~eMr>6e7-0EM{Aob5PSpAERQUAO{)IVRW`e($$!0z8U zET20VaB0{l5*pzcNa;5Hj1MM6K;wH&kZe{^)!U(0iRiI=7QGbQl;e^LyvyhuUtsC$W38p|OKJ{AGsnnJ7%_srSbA zZ8w#hTl?Ofh`}fxWn2o7FDRi^mrKvZS5C%``|Y5{(IWz5aekWOG*BlG}0Z$S_7z&c)(O>fX%=Xpyyra1!e%(0gHf}fo;H6!W|(!4Ieoy`Vi>@=6?b^ z151Imzz}ddux1zRgAXAVeUAJB`ddPJ5wQ77$OGGewZIT?3ozp=s7y?!Ti&|kXU>h(*JkXm2J-b7CJ}?Kk6qpYz1Ny%U=?%aja0j^q_Y&?2 z=~?(lV-c_bSPCoww*4FOzz}c;(DOq`Zv*DEBR=?2Q9dvSSOi=QZ2mE%*8tmqo5}qr z#EW=fU<~~I3^`yUa0M`9FZ2WJfo*jC3(8Lb{((in(tXGWV9u|w6A+(v)VqN-od|a< z($fV!z$)N&y6#4L=o%P6CG__oJYW@Y1zqd_7hSvXZNUk+24(^M?hbt!T?5wvGotX#G++(T zI}!OXs6$T&HUo=+MV=153fKm00hUL1=qZ!n9)s|JZNOE)njszfc3?fQh1`JxAMC1$ z#dqm|jliYAHsC6tC$2-^2n+(Z19O0TfcZdi5afo!A20+g0G7w&dw)RxunxTum@&LV z_Z$qpBOnLN_v5>Iz|hDJeFv~+6w-YN{3YQVeZZOnJML%^Ct;C?9d zq;%+Iz~E%0kFJ5Oz{b=L9q*JAZNMC02v`909145VHL#wpry!odHXvRDD5?(c&np9(u127gC(=nH}6N8wv|K>u{a7g%~U!aE#%7VHl!orU}a2Js!sW?&7_ zl@7ZA{lKDZgaa%EmI8xw5g%X;u$B0E(02s%&xd`0LEtiAc@FZ0uJQdy*HqXQ=m*vx z1AV}%V^Q9KHNefl{NoVsBVm{05pQ4-umqTq*P%BMo`~|80r!*ey+B~b$;c;Q6>uFe z1Z)BpEyQ;gfx$}=?ljn~81@3@Uxs`K76FS1uR^%M`oF?1gjXZ~rz1Vrz@4xRb_0fh zyMQ$nu>TCCb2Yv%mO$@#@2+a3&>f3?MLp${rVB@e(JtYh7BRlmhV9_Xi(+^larcS7k$59!of36JX3(`Uoqv`&2m&@;VLZvgs%JAgr88!!Xd4a@wrOEJ+O2p@`J8tAwPimd9W9-2pBgH@jDUr0_GP$Kd=b6 zkodEqpRUh=eqbrEg09bneqeba^aHDa+krLbK|e5ONvFOSSbBb^?wODH7Io@Lz>Ev= z%|~GA#hrRKu;a^U|trUHVdBQ5p0Bb1J%Y?+LK?>Mp$mSii1I-v!Kn9D4Ggrw)36rS)BU zBjKxEdcui_*FU@T0$^|(+<`^FDqz))F1-!t`JzkDI0<&qyYzBk&d6@P8R!|)tp`sA zKdxIZ0hTW8*0%r~&*|0^79!m9yY(!fzoc7V1@x@z)^`C*pYPUFPC+=ky7ekx&ez>~ zD==esx1P2L@zA^VWkA2^(Hntnz=Tr~pCLVZ8L(}1kKP2#7~7+#=YyXB9#{jc1U3U} zfNj9dz!0zr=$Y7~w*vjZ5U>;&I1Tm(rUR>hEf*kO2leP_7ee3U9(@JSe^`%R5BGZD zUXsh`(X&oR`eyX#tAL)&9=#oMrE_}p{EHB;;}CD~rT8{yH?RiZ=iCUs4d3V#OCgVM zbC#U}y}(LfK5!xUsxx}@b->1R5fAWf!1Tq4KQIT_yrf5uyBPW}fIs4wLJnAeNsnF# zzNiH81Xfk{=rhg)56lNvt%f~-ZNP#{5TASDALw6;cmkV&!9O7$_rV^QB7QIT=q13S z<{rJS81Vr%1N|RE?kwaFa0ajum=6pAyWyVGg7jSu|GRMwECSZUy&TvCYy*aXRlg!V z1@H$fyaIBe9=#aXrNFhguIhr_aa{wf0yY9S0-J#iz&79xUvG_#66hV%tG59CV|(>I z=OTUMdiBby;BP{&zPJ!_z*1lha22o-SOaVZ7M~CIgM0PO!1|P4Jx~O@9oDNC0gI;g z>NzM!jleQs@JL)EzpH>Xzz}dRU1#*_NyxWGpcna+F}+uB!gUd_mGTwnMZSfA-Ehx8 z8hUWu2y8_DRn6?xm*Kh%SON5BK_2;3KdVj&)jj7y z4jBA1?0X^nZ1~8!%_9;fZfS${H^>x7fE0B+HF9p^Di%JmR zCBQ2YF0lM6qz_mFY=eB;GWf%_|L<`B3;bP;bmKastXFRa2CqZ@gU??MI|9pB^yLw$w%vnr0L)m8^euzjy+|LhZ4LAT8`mPgfI0U=@81#b14u8htr~jEaJ{}) zPrn*^fq`qG_n+W#UH^Ws9s=g?fPe5sAEG>6gZS)3c?M=Q!yeF64=jOu{>R84@(;{` zzl={%FMz>M5gyRL3+W_(z})NL@3UTgEwJ%(*a=wk1?;pO@n}K#kZb$8SN9`+p>I%6 z!25TjJ^+i_kUsEL-@zWZuK5Z1umb6G=z1wI=+gBqzzkqBun4#p*c`9xS=U2ugs!gw z<^!vN<-pBA|AD&hS&96dsOt-X<-j6fJ#YoEDpl8;fH_C#deRN>4@?7=19O4(3v|5- z7&=kcTY;XFbUg&j049{f{R~|X0z+r%`V63_K-cqt`RC|*CApu6_yWs|p#Mgs>mpq* z0QxW0^%}aq4Cw%RE=N3X!Zk1pSagN1F9ntY%Yfy;N^-v%@c>p~4ld(n_`{rB8IXz?=yYe2`x>9ir^Mkp3LP5wm@9vwi+qzQA(Vt=er)?L3Fi zFFDUizQE&$d5Fg(@;A#Dx7>NQkD^ZL zqr>snyj+?3L2KT7T!>MI4f5bd=`Dl}&W^(ROcxpv_(yza`vS9l!P&mFS-y<SPD0pp zkY5+h3my#VaFPd=2|u{S;ARn%?F%e}#fYbO^j2DU(vbsxJ9szA;~;J^xJGc4ZaOG0 ziotCGhi*#RvdHJj_CdH1m#g6B`A0}k)Rfx*JD0h_?W|36+8eseb&j3GTwj?>#%nLa zX;>H1V<-+dD6Sraxfz^MCMe&Nz%_zH7b^1z#fr+QcB|8Q4xOSPc*t)yjp~(cJLZFs=#QI*VK@ zxM~X*0=L$}d0l8DEL;FwB{(B4l!xiyD1IkUxG=++R5FnmN>3i#w!qEEbK(lYHGuPy zJPzVY!EFY&L^6t71;uENYH-QmCNq}{tOJ(?ZVS++K4$yM9Hw$|g3GQrkeDiy zE)?2^M??Cf6ltVnxl^l&LV2fn2H>{vDRbVYgDU_RggYH1mjiAYIDkBeTa1fR3s(%T z#KNrrR}7AlLkIb-0=E?0U6N7UMsT4pzbSBQ0Jj(1a`J3#OApeFh7dJawaN7E?1h-e zPb9ra>nU8XJxKdv@Z*SwH1br|_W<}3@Ura$e-85*;8%gCI6yw_eDKLQ%>%y2F<_tOYJDO*DN6E@-eN%~ZO&j<^r z8-8lu2*}=6zSgs zUCmg#ipDt|#L?TycYrg(r2I+%*96We7ky!JTGcQ|%X5S!R*rDel+K0lpSQg~9IB&5 z;BvvmQS#H;ciX2k+m~lbr#nK|pGRTsfG&YGH@mPX&3oD1NHf{lgTX*I z*745gy3m(qdpjTaTXtRuU4f?l^i!Bc;F7?Fhq(-#AKd>&-Zny)_kDAiyTEzCg@@S= z&IRs&Bb_tQp$9+c50mOi9=K$17vh}zBQ;juk9x8Se)Hk?x9SOnQ3tu&KNv2# z7=8?RW?;SckECNE{1(ISZ>59k*)qsALJqgY18kLzirwF=oKH6zrj~{LZ-##l){-~a z`p*mVUopUc0DijRzZCv&ApfY7=*H2k=JD-HnrwRm7;v^i<_(ifQJ)OeoovWd?CkG* zey8r78maD(t_tYd`_cbg7nPG;(3SsjNWX~U-q+USzRX+}PcvH<>4CT(1NT+%i|Hk! zy$ZkQv$VU;0=s568D0yZXAAWFR#^x_t{ienSpQ$vA0FQmU=vbKc$L;v0e~gqw=?B*Y4#m_cJEpO4CA+^GDNW|F@Z1RYkz?T&ivMo=uqZCH z50BhOb6ifO4CiqdCRkroppX$n9a24Q|Q-FPSWx%-LGHd@Cgm@YzPFmsjwtm0dPv7QrUT;jd zsCnMSkfptHG?yAV9POs?`zSTbi}nCs*1~`Ad$xlmo~H4k=64Jj;K53yqXjxjurKdc zvgL2;7#mSXvLE#fd-?9M)e$z%)DE`Okw@_;f{vvBg!G4gSI5+dIyOQ_Ira?hg1+BO zi}Nl!bI7vQ4jsuqV*bXyY-Rh3!|yYl|Fmx}ilwd@Bhep0S6n1rrgqHvroFBLnb+me zwfQH^AO46zurlQyS)ugTcY`R7Qw%r@_`*QIRU4PE&i zA^kl{-+;OiR<4~l+1h*tyk(6-djcKu9xQwuFiXt~@siN_VQq;qRU+>*%HY2c`%*Vk z9K!wE&O12Q+ogx%(Fi@epy!Xq!;^&hnXdm?JTedu>U6e1&wqR0Onz6vZwP+v?q|bp zX738KC7MgsXEeg^((eEHc#O(x0u~?|pz8~q??;}zW|t>4wxeMV?TcR66Vk`9E}B=T zFiOEM27gtga%`#-+92Diib_FYY=N$dUUL}4?*dPImT}7`({UWaCVnr=hx0SQdoc(o zhy0f;Z@tIhvV2OI{rk!?k1YJn2e%nql*txqjA4%Bnuu{+3mxUy6JB7|q1pz^oT&3h`*Nt# zOZ(*4Vn6wf_PQ)>xfT^6y;)Rt(*qb+U@!W$`=`qjNf+6v47!rBZ~ZK5pJmNUnr=2t zf~m};a<>IKvarY9>%>K0c|f@%3vp4Y=F!x+xdHhZxY{&RdCefO4#OaIvM%Kot< zm8mMou7xa?>*PUp-w3V>+<6r+e_s46%ea21Z@}kyIs^B+i zm^p46!O^o7rTZ5*Z>0REybeKEZbFCtF~u$1{^oJZ>ae(RYmNE>FB!?0Z-D>sK7{g> zo^xph|EPT&Ep3)|c!W6Sk*>ASm4jzTzQwsIj`n4wI#L-)g7-FxFLXt^=k`oc?%Vb+ z-V1*#-V33t7|*)=w14s57%ARlgKFqX#`7`*`Vo5@Y>uQW1-kY?R~vMFNAa@O&l`>Q zhk99*GyIY!qJA9Ep)bNY*?+mSP|iyQahU1wDV*+R$$@T`w9 z58vOnUlHE53m0r(0og*xK98}vO`gy9HJJByOwoLej@iB@{F?D=aV&s@w$BYV8*Db% zW0-Z`&!iOj#fhskAtJB-7OrL%d$@pAlZ5jnTwte{495^y5-#wrjlkLA0zKgu_uFSw z0~J5Aebpq`AI}ecigU7qv()FAi%mAvuGGP;1#SySI8uA}w7I=QwU%>S?eM!7&m%>V zUK~`$T*#z$aKE^45#AQ4GG?u_ZJK@D^Qeqbr@9W$H7#IWE1h$OL>&uwAYaDv)^RfL1-K-v?UeT^3COQv_)WlbTI;Ccw6nE2 zy+=8GrOs2aQ)#xZ9KQ<3EMFC{8owHp+*JjCIvcOhvm7}e$Mv{NqQnRW=u z>zR1o3~Q&h`DfXC*5$QRHB0V2qcKPs>AMim3F4d%ihCuvvWq%&M>H<_>bG(45VkI? zjrN$w#3*JMy=pI7hRUGP{fRx$n{j1_{yNSn?y_#qH*S}l&!J+j!t(?iGO^vcH%t#ELgDrl$4^2kB1$7ub$xO7~Cyoxi2Ofb_r9p1#Yl3?z-55rqAR%yaOoCq9k?_LN6%lU zSoCcN7X*jpZh6GvZwolmhbgZ%kgR24;Zqp`kl@_^HaMczr1a2cZ zV;o3v@*alqzJ&{bTW8_Y!I8djzd7L6TDZmFD1PB`#o#D@+#E_fuK-tJ;i|wh&S;Ig_p^l>x>q5tq7I6Ow^P2%~ zb5f^1Jj`z{xW)s`eha{D0T&%6R|2kKbf>;1OkX*;Ts%*o7AChATv4D?|5F%O3$6~& z+vkUITfm9&o%)fS8FblYdam zq3^j;-gcd|AAgUPGXL#gr8(T!{)B~{-?$kJ_Z5CVQ0G9`fijm!ooe^D+HoH4OP!w} zWfsZ5*hvn%o+JOB=kv_*#hoqxULgN2kTV4;6cyX0k|}~&OQvGJJf&0DW-A-_-9H(9 zUyIW;9y}q+uJ6nCwK%ovNp?;cdQ!Gizu1DX7GBb+zk>O1(^$d?tHBhO^Fcd(*}eut zU)qr<&$o2y&ku{Bugt7(wY|PFLth#6r9arI_YR7n4_n>Co-NakvmL9!7AU(kwrqmF z)|yWJZ!r<{nbVtZAH4=A*)9nK0e@Ymes!cViaDR0d+n9Uyr8%(g}$6uJN3ipz9xKJ zXXHh-$#xOqR_!ER_0TnATc>_08FHX5%bdk2_A$#-Hgg??cKU6+JA>K}J6+~`y0h$* z(H$MdD-XK1GF$Y2E%$SDx^G{K)BE`+DD?$cTIFOBE1#}g+ z;9VtF`TgA=KMh)V{c*P2865SSf+l@&(=k55dpmYS zsy`K`c?0LS_WCNwETpdh`jUU`)R$P(rpi2-GdzA;z_xzRf|$}*3w_z0o%$_)C?A-I z^L@4E^e(rLpDgMWKU&yY5#Obs6RCZ)=)3xN^yNX{I=r8xg~laz@hdYwE4#~HUl|qA zLj0*PS3&E>Z>-TS9`~{4$SpcYtA?9f|>;v3dfU)_UV`|{q;y~lX?Eh`J|n) zxtM;^p0b?|mAf#MMZ7IV(6+lzf0e1pH|CWEA5R4-MrLQ16_-Y zyY$78%7M8rI>#=1Xv9F(W-oLV|D{XM#yOR#x+pnCtJb#mO@|A)-L^d9$Q<+eVE`g z+b_@dLCjeYQ~ma4=xfG1i4Kj_wwmXmmPaV71(a6LEZon$)1@DYdqR8LnZ_pCwu9~S zq88&3(w7f?8}at%T;Klbn_`<6FbVVpp`wQL`MP!4v8cS|we^))mE(5|U>7mfyB&~? z8`iBicx}VBmeXtOQ&KJ4tH5mZ4|t!`_lMf*Li=Ja1D{1OatoJ%V(3dgpvuW^y|c}!(~v2C41|KQP%8xtWYYQD&>DRWT2R^lC3 zk=ojJQ(qLJ4Qkg&S1EKYzNK4li&S>abs@s|saB4~=v{B(_HO;(m>;((yOy!Ac2$CX zI^~*s92&)SHQjytVJvmjvQNml!cG!;WFc_nGfgw2U}=h1E`s|Dd9f^u5`w z&%w1#{#a~E`@wCxjk&(8Y&#yl$<~bsqr0(NKMD21CJf7(h%xJGcf;Lyy-gXVyCk-pD*)E))3j@Gvy=^A6Z zgZXtaaz9bRd?B*_OMi49lM8?O&E4vq4fEw3d|!B&U3s0R-T5^){jaNmKgz#y_}vV@ zpHcz__T2;4gkEvm6$Yhi2Xy6qj`lxN+hQKWCD;y`F*>6(1rWI{Uv}#c;=F$?(z3T| zxp_?;w{7y>2aDmi=%;QyoyvWm{pl%F^?I9ePSdM;q#Y{Yr(|!pK4JgrN~~>ngWzQT zLs#>sJ^$7Ff~YJdEWrG6uel5d!7T%a=|Opr{u$ufp#NlI`qHNo7pZ)a{u20W8H9K0 zkv|-`#S!J;x}ocCB9t!mY?XPXCVV*yD>Yf?K zBiWL41&_h}zPCqzdjHDQYTGgeJyfPjpert}M}LYca9>$B$`o(ieKxF2q5e}!lOSFT zza_BKIGmH;MgC9k(dSTHkZ1jK%zcm2%C@Nxas_Blu&gN$@>>dSnZcR-E-_}L zWqPV1x58+PkY49pIgKi9LGKE!Il4!`lC+|+bYkiiTf*njS^|}eU2yl{-HV0fZVCer zd1LD-y}t;}U$${hWit+g*Mu*7^gQx|gKVA*E)U#zBBaeR_scd9g35+_;F})(Tyj@s zx4)q~&1h&ihwt>~{k>V0N#rgANgLwP}bdQ4?ePROOrq9!)WR3yH# z?VBN$09CorRdYj+dWRIo;-?~D%IiY#wct;p@DL|BEk|BanMJA!O`Dxx%=G;NmP?5$lTylUv66!nq1?F8Do|J9ml;?HdW;U8K{(xO#B8 z_w~1TB)1)04!G2?Fj~MBf-}-acG(NA0Nf6eGuMl^jJl8or=$~Lhx>c<*@)dDj(=k<0e@i@$MhEQGA!dt-P+M@7-Dj;3~isw)N;oMryN7 z^T*om2<0(vv0cf)AWEhmVPyTQN2gm|bNVkdk}uooq&&2jkjtU;t9m`4u4)trST7W{;F$Xe}g%$b^P5OEJNrL_gRN`6K; zW?gd6Q?7a{z}ZDRo>V*F!Jsqg*B;BWVCHtoi6>velbY*GbK>O@;gq~rUkF`+jvgIL zPlnwfqs9eG`Zg!1QHWX^zNkta*cl(b75bfvg^^@*fQ)&-)Y`<~-kj-02mL+y4p+#2Av8|PTh=XW%j%g!~l@kZ^tF-pIs z=$tIaa#VLJp|8N#t9Ot-#8usc8~S*Bgs0&83LcM-$YxFOn;hG#$J>`Di~a4FL#Q57 znv)jN^B@D$O!mrv+l&#twqpV+OZjl$nAoeoO!2@$aa#&5e`IgueQ)M^y(L0Xm-7_$ z(6#u$Uj0z}bXw;;qJK-53ms&^%jkEh6YH~g-x$EEYpU(AaJDb60N(SUYs+isgZ8ie z)+5F17`yfx`m^z?Ll_lXdh{audqvZLrBfTQyg0{)LPq(~4L$X5_2@_2r`u9iw1928 zk5zRwXA$yuOs{^6eSTPVT@)eRvffuhR~Fu>{m%aBdgQlswLsUF@xA)rBISp9j(Cx6 zdxRH6q`_V(r=mXK9o|<^y!ytuG!jtvv)D!$z&(I;@xTw&g97-;p46)^p!N_4wHKv` zQ|;z%y%BM89X@V<%G^~E;+2;Nee^E;v{$BvV{tgXHQ&rQ5!o0+q)w_DAtCL!KQ zuXpS8?oLu%U`)P!*Ol^bh)&Si4 zXdl{6i{$vEo!SwOA9F0vqr-ANU>gqY#xm8PD(mmre>?i-BTc6~CzL*I5D$KUrY`Z>`gSBnj=}5Ahv~dl>YUt?J7g2qAGhkm9 zEn&4sXcP<^cSC2cqerK;F4qQIodecRKee?nJq)DELf+}P|BLF8>u;_*Bj~iOS6^bg z=t-;6O6NN0totYK;VJEZ4jS#wz;QShcKZhe^2R>RdAFIOITUKgLeN=u2)^Nfb61RN zFXRLxo*Yp(rDYDh%fvX9ep1fB_%NkMKaa}KZk69wKN?=$VR)i&3g9OP-^!5Tyl(F& zJRDd|(04EPZtu~Lq4Yk0i559l)3Tgu+C!FyddQ@u_UMnngX?cfuVtMAb2;X9q*q7U zm*#e~Z&Z)Hi!uH?zDGY3Yn`r(luZWcthhDC)Hs|r%w8$vvY@j7Iz#XD$n_7`Zj3y` z#}>$p{u@ZVAIh|qopOrvA39u3n9m@)KNqe8<3Xyi#-r|chUK*!59VQJtiMEn!rhyq4{}*)FA6Hbb;T0KeQbG2T1BN6y{5wki89$BR_a55_qW zILQ=4rVTQfh7+zA4H-;Vv5bJSO33Uj>d}vng3MzP@}c$yQ$EZ~6{n;M=gFz!j8t(> zs+g-T@&U)E3cv}e;&l1~lROH(lm)^tNmp%1F(q>3};fB3mL zRg|P+gFyWaG3Xm_h{4lAH^eMXjXTZEz{^Rg0ujhd700HE3sOa)ij(b=ELg$5A?BD= zaklieQ2xawB8T#VU)7&5{=v6fUM3rSFxFgd`u571Z2-@R0Yb$G&#p-om!yglQ^nP( zVlfg}-k0KYQ^onx-T9|faUmrdX)5$p8;OR0MCbBUu{1UAWM2&>8d1f=V>iT{k{Xw9 zW{_wkH$2f$N*Qs6U0FesCKC2HPXf zG!8F^&gxq*AC&-|CqQT4y)Nn_0+s`azL$Y&F+1CRFTZi?fj&}m7p zT+HoH?{yBF^wPcvng7t0{7H}g0^;hL*_U3Kk9jKp3yo}4&7!hXH~ejczuDCPkLmM= zel@HOG>tj6<7~Tsw83Qe+;cEq{;Egcgm@YKqox0_uD6_KTP$YzXo_5{g}$Yjmp;S+ zeQ&}>1IwB9p2`^^AG4C6Zx8h4evA1yveOMppYcv^a`&Da=Kg21JH;mj?i(NL(MMCc z{{S;Y1LI>YSFhNn4^mmsw+#AbU~cgXoVyYco}~;>-^kA_y%5$na!pkI7=06{9&?Nj zU;)6jJ75>5`MwJC{Mpa8Hwc(AN4`-y6L3S4j`_j_O6MWAcDIa^wT%(dNuSaah0y2P zhxq^pkjBZDbW$tTpVVImlytbucx{9pnxA}@^n7ibR%^T}Ba{c)qA9weZx!b0Vo4tj zr267z%ro(OGu{mwM6mU<;D5$>n18_>r)+oc3HPtsto}r4c>hcP;Ks;6YFo;nqrL~< z;-Y@ezAd%xRiY>RZ8k@sb3r;N|8_v<4y{*TMCm#f@v{uc|A*j*e{O z2z7QA?8uZ8F zMQv5eg=jDFy+7Ihet)2y{jFs*V*3kqa@@Dh1ong*QUi9?UZ~k}e!^ z`0WB$j&JH=+DjfZUTO!o1Khufi4dmqGjla1f5D|l|Kk77Ul9HZ;LnBc0)9G3h1b7F zLVI_RF`9=HmGM=Otv?yxfD-ALC)=xRK-SC830U7tp3BS}{Xy1LK* zpRb?D^k0JU{DgtuSEG7E-ycmt{hLDL>}{A_8Q5-NuExBF_zm0n(L9K$aUXrlv%GLyZX^$dS*)dx9yvE)ZV%#=qkLVSHI6^ z(Smt~M(oqXt$xoQmu?1WJoLxHTJI(2EOHPQ84olr~+ddqj(jmWP z{93O*jf`Yx6U$t~s0j6x(ouRP=3MZ7-SGTa1hAZ|0b@(=LM zTbHCH*95Naqkg?vaB2lt3oei9bzggJw700#eG?GcROf&e7wsIp1+PMQ_@3`sRHp{$ zC=OFG7dsBne#)6NUJHj8(y3w;0jcjMu-jR&>c zd!Zxit6u$FTOFo&EU;Aq9aKkV{2A^4H`wz)=^0oaEd8`LD?-1TMR{2cT^o1z>ZjwJ zj&%Iif(wDWmxw-_TlyxvJv985ItbAn@K=CuBR|#e4W2LG)ZeBR=k>kF=OV-;a# zRE{rW`Jnl}tV?7EVlAmM{eQvS6!aK2A$7X$xKyAXU3tt~Uh#@!$PY1MhvTq4G2&_G z5g){e-Oj*!F=CD8`Dcu%ba`Hn5j$PkAI6A(xIM4Lh)3NqFG1ezei6(dH@?BPVbF^( z#8;lbe-$IPMkm$9h_x}^pJGH!jQ7pkL|LEaS}@#COlIIOwiQH1-b=Gh8A+G7atGne}{ zkNC><0?yY)x&P@A4@O~6X?s)xPInLT;$w_I5AtsD2z`*Z!6R<=V6k1rdy*hOFE~6~ zyy7*->S!T8jqyC~6<^295n{d93*$cRJyglg6K^Bjw>9^kc=5j0gSfsM<^C~V?1}i*%)Q00_belBygD!ES%lo`b+~t}@UMQ|di8s*Lzv1%i zixzLYHX+32QSQ!YQ66DqO}qT8lx zj}1%wcDlALA-Z+CrY9tRHC@{`+_Pi4wrj+M4KuZC6GuKdQ+wJUdsn9Rsvox;PmWBu z_GoSA$mr!qYx>Ceo=olQ17h#Y)P6bu^ADemO6bVcRwPAtWoora@xNwjl?TS&o~b=_ zAdGO!=!9P~wYt&KKW1vXN5}ssQ`BA{Q@w9wYOhT7LglupL(qv%9r4i&?Z>IXni<;aBjZuPjvVpnbZyI# z!H=hFdydS&midek?@rfV$_TzaUE6rn#7)z+x28>eV7m67X^$XARWsfHm?`d?DQkdm z{2gVZB~6yE!Fgj*=r5b2%8TxZS>YW0j#K=@Nes%L*?moe)A6kH#Kdh!Yeg$i7WX(3 zo;+GBbFRkuYAyQb>Doiu@SW4OCN1&h>Dn51^rmUrogNR2zkKiz}WiCAfT=_U_bqLfm!a=%1!(n~n@#J5Bri$l*f#eB|g4rfHQK!B39THfO{N zu`MIEVVd?y#)ucEXOnZiiQq0y;l>|E0#G9*m9V7%yGcx!^8)U z^cM~jZ#kp84iz6bQ>kf~@bjUf!4>n*DWcImWBXyEDJt&g!^ORW;@0APP-5d0G`zPV zZ}-I{J~Tz#;=K+kZySsOfSq#8*QTpHCAVLvNs%4NKZEMXVhb_wW?)!?5_CG*OihU6&^6 z5~3^9#Lo%QYtqDH!xLAhiC0Is8`8w`#KfD@#J!2Qy|~4n^nRLn#2@!&n)t#W`*WJO zdt~BmY2xvb7?(eIK+;=j;-v%P-bfRB4~X5CCjK#M-1}3+$D@w!KzK<-FCQjW9JtBx z+F{~_(RG>-Ka4HK`M2XJoxe;>DoYc$PjdfosJMI5mvH^{;OGwy71tgz>h(j#+Czq~ zJ5+QY;#qyDcsOOow}*<|DZ@WKRNOb&-!nx#JK5hgMZ7)PyAy#;_C7L2+?DEWN)zv; zdS6Zxw;bxdK25AXbPM(W`kJ8*kH;PB7?O=FL(xL(EjfqVdog_YVo`E{1jr?h%xX+ca{2)>0_C7IDta2~Hz{EZB zfr;pD6MhVerw4iOn)(Ia~%h-YFF-VTbp zz22?~;vZhnA@4}Mar~Ph2`>gkqtE-V31Wv2^rNvOKb;`%iA#7SC_3Z3A5Rb~hl1WU zbmTuLh_~Vs?g@&G!@P|XMEx+(wrFYqGdKDPij*u{X*4MY1S6 z5cJLiN4}LTULT$C>?E;%jCV`2czO)z^1#R^lEqJfgmo~|Snngr;@a_EnC+JFURY`M zcrT3d@OUq5@YHxOlKj$mFH-o{crOz4;dn3N|K)fuqWkahUc|0rycdzVKG}<~Z%_6j zsQZ$=(ELa;25K^YF5IHI-;NcVUG7b>;#JowWZT<=JdLs9!$DI822X=-jTOrWyYW<5 z`Cw$(*Mk$EiWQxM$83xhyJOs)v0_imw-`vg@AG!YiqC!C?_SiMcIH&oUuP?;u+`rlBRN!_>p7SS8mbm@O|eNpSlONx={{jo=jge+!3AN z-sN(P+~$(CfHV<5)DiUn#)2+~QbHVdv>!hN9l+T!s~lo!ft1r7BhZBi{6^fYi5Hy{ zTAV27N3&bHsE7A@#}GUq{;nhXc};xo7sIEgp2cpKv?YyZe+Y{V^XnqN|+ZGsp4L{}6}E?e-eh)7Mz8 zbw`|_k9|Dk7_;4h@iLa35XU!-~ozUtNpLvI1Wau6B z0=x{FQ125@4vB5@iI<1C*ZailL%a|8#JfY#54H>$hWW*PLnf^8i8Vgz`Pmjv{OWdhyM^v{uZnW4pjW<=s63F4QGyoJo^`nIcH)izvAW$!r+3SD^vE(d zD*AEFF`)uR?f19lAjcTGJ5C?^g~KtIO2BSM+^-Il7WWNK2U)C7SYI>6F>#9L0jJ}r zFCllY(~Z*yoRE0Z>8^8%=bY{>PRC{<`t?&8cnTf#Q-g-A@uJ!5+en8`jK}*iqgQH<$zP&?^rscJ94)2`F+Jf?hvNi{%wKT0 zU!j8Le%Il6Gu)I&V_zEn&)9u{4bkR|if(s0CVVh3&e^!(ANI7vaVp}B*uLoy|8%%N zayZ_##QA?s$G~!^JErb-1fKMW8=OgB4;HJm5s%}hSxdZOu&8#QS{9A6UyX?0qGbV8Ze2p1?=R9$9 zAhCC@XbKGPm@B>sB;GYoJTZ3or*p;YV-s8EidEx=zcg1oI4=IRxnlRY;dl#H_qh0N z+2XbF!|%)%AB~Uyd5(A@dH4-;#Ouk?c(qJ>a{Le3;<^d3Z)J;@Cd5B8SG+qR_U5_b zrit+{%n@rR#=bL0d^9nB)g1BN#00#iWy7TS@3O`7lVab@7JDbf*UuFzgRwWx6|V;4 zAIcUV1Y_TtBW^t?{?TkveNe&!+2Zqq;#;#t`$4g9WQ)fRj{jkfcb1k;pUf6J4j=wtws}#U$o4|sQ&pncp`D8p@_xuM@dpQHj^;})`U3y84sk6NS9$F@ z(SL!s0QU>B-5X2g{*bq=<$nL$J*wAPLS&72#Nk+Uu0Sv0#)EjTI^6F#99u1Q?q0dM{i+qtdn$!c8yTKd!^6H`_ zmG+#%>a_~vhG8s;15LX;THa86-dhTbS-xPnSuW6|+{->zSi$lQezRQlm#nW%VS^z* z$}HE=t=xrM@f9pzJK8MA(G-H?-FN-}vy)SAQ}MoFvBHZPFK4ty_s`5PW4xAeCF9ME z6^wT=-plv^<2uGi7&kK3G8*CjWs!_%Y*`j6X2`$|wp{I${_{FpgzR zVVuS|kMT6d^BAvSoW;0|aXI5HjH?+pFuuZQ&1dU%^HOt9QQS$nxO&0&P7XPZZ>c5#cuJ>XFhYsUO zkGS(xe6ib29)3pLG07vy?n!)Yr0$nq{ZgXZxcgLA!hRP_$I-#PlL+7*mku@ijn{Kw zx`__jc0h*_9@bh+2YbVwJ{6+TOJnw39tkXM1x)#)*tm8Rs!BVqC&_72`_AyBXIrZerZZxRdc) z#(j)YCvo_U6B(y6&SPA}xPiE%6APR4H;_c2CcJxqqrIFWHG<2=Sij7u1=VqD31H{*K7O^jO^ zcQSsiE%6APR4H;_c2Cc{#}O8IFWHG<2=Sij7u1=VqD31H{*K7 zO^jO^cQSs2%slR%vy2|P-$lcYSI;wc z{7_N)h8-U-s*fI6E;KPr7~`6L?PP}pVT@J(v^bD*=^c^g__oCL;Z6($inRLfl@qvwRtsw;;DGA<4&zvSU?!ui^*|fDL5xKPO({4B{+{8fIb*b8wY^EI3QSdosa`}jC{ zP1dD@_LNh;)NYSA@oV~cA%$Gf&`14tw-XB5oaStlq(4WG5 z@k3_$Y0S63tau~bW0-GgP&`jVi+tt_URV6}94>u-OUAQB@mDcV-(@Dg;TOfzMhQA@ zWj?J_U7yT+4f9#uil^t4>3Es>+Fo`2DDyj*FXp`g+nA?4xD;+#KpBjumj$-DsQ64) zg2qP^5|KY7Up-FA%kNVns1)Xn`Fd;}mPZcr1#B2fCmr-YKhk5&pBvweE@Qr!=gUX4 zp0&&y^W(;Mn(M(+{i@*lWpw{febkmvOV zL(j>~8|x7NVLcZb^1L1~ob@ba-dLY7^jyunu}%@q@^>+BtXHgI{r50$tY4(D{3Fbl z@jAwzm~UX-SjWg`{(a_+bq(Visy{Ga9aQB_eybB<55hzZ*}3*Y#mnz%f)6lXe3Rnk z_c6gwV?OPE#rLY_Se$I|>lH7*R|)y0hW@7%FTXtr{(3|H6~)VMNP@qg`C@E*p+kNT z5=hG`6wj7zikBa}1OFcL74Iruey0)qzYIO^DL#O5Mu&#@lAekW6)(S?2!1T{wVx{f z6_%gQd~u87e=75Nt%{f5D+Eu|dZa(FNAdEzgWxNf7u*3-`$Y#0cS*juOUcXc z0D^zpkf#^d;E><*1AcGFAFlWou3z+^AL+@Pqj>p!J;)!ze0#3q<@fNwFJQjmRK?$+ zxE|{?2^he8r!|_F2Px*+q)q%X(fm_!7m-@4P`zGxIHfSA4eAE<_je zX;&*gi}^8_sFU_#UVf_$Zkfzi-=O5>H`l-yFduh^;wJ@^qQ5eqMO*rC$ZwzlS2Lft zUGef;X5i}#J=md0hx{fP@LfaChl-cq7X!bC`LaREFu6P|7>rviiht8^#mnz~L4G3h z1qUcze%lND9OeU)6feKi1^!&-<4#q){N5J$Ynd-ws(AU`Eb!}@FQ&cKIOI33fLoZ) zyIb+{dsX0Dna{dc@$%bK;C1HH@FWWz?eeb>qcMR?=?Xlfc%!{Nn)!C-<@cT7X9@Gw z4N6{qiwXRlhWvYqpQEaQ*u;F+4#mswAVL0P@OIM0na^c=zc zMc;jpMg9u3D->?UTS}hZ7e|M2e^h6YkCVe)Jg#No$1~q<;iod6Jj)#a1>j}A9H+vi z_mk350G`Ir)jaM^Q?$62c@N)b8QRt{-)xb8gZV1v4f$5)i!AckJu2f9M;$8;ddD3d ziQon9a|;(pCz$#3SfsU`h zlilhp@%e@MP_{X}9(kLCa%G9;x@A+^nFrRPX2g`-(cu`{E4`jaD!cSp- zyM>?2ys*q$o(7)A6@?{gT*2LuC}H_Hi+nlrK?}c@`Gpp~mieU?ehc$e7QPugwZq0f z1zDU>(w1V%SdM>qUqOW|h4_Pody+TyB^dJcl8+U}K7=}!FUB(|cH^tRftTg~VRb_w z-$w&Kt0Ks+Hsq%$yUF)uK|a#_=LX0-#89!;Wm)-gsbLFE|<#VosZNjYg?| z;C%jj9M8NTRXdDfq&(WB{$T^w3p$v$t}l2;Q~Y7wVsm*O2R@$0QC&(ePpb)f6`s+C zaF7yCiol=8da8XYo|8D3o0+c-DJ|PsSvB*;lUP2W9A0GJ*jL!1vPXO*dG;Iz7ZupY z^B2gEL%IssZiVb;CK^miZ!Jv_;W&i(#o$T4xIhs)kE4oNzF~vnuV(qbFdukQ5zClg zW$3?65%h!-9c#hMA&t81`&(=1`MaW&HSoY8c#2P2oGQ<=*vHp~{w77lvHo`E#U+YA zgXOzfPgAVYBfDw19T!k`2y9h?yggN%4xYko`c=iGhxMN~mgSA{x$-7T!P}+l0Ym<2 zC3p?XKgE2Rd>T>otv7CCzWO;Ocp1xoD0#jJgXD24KE-T!zO4~cB_Afpr%$qc%gc%| zPu?QX@N!}8mh&$?0(k1F$t4<*k+VbBSl?A9m|&e&U}CyPE1G0 zV=43E9(8Tl;Y#MKx!o|z`PJa5{KQRAIhD%=rE;R;MY7^!nSXB*rEB0ZHmKy&5|sQG zEV2kZdG(w7itNuDpl;F<{hU96|}JS8lL>p0y2ekjIK z#=5_Yb!}q#ik}tH&iqy>&lh1Z90gU{xm?-pL3Yb!UTjpv6P%A{f~S11{z=&(kLAmy ze3)WBeS+o10;T^W<_po0QMhSL1X%y&k{_t&K9(1^Dz|kk|9AxXUY2jN)YG^W6>i)F z)luKeK?T55x{A+L#B;1ao#oSBSG*iX;xvc(>L?|6GV3`<@_Z2nWrqC!*WQ64D9qATf|i2uT1_+;eLNf$HHVx#{UrRf(OJaQfE0r>buHPJM^!u7IS%dy0xc zLzqNgC?rfW*kFhfgx4zg;5{@A`tbx28z)c%3M8mF_1UWxZ+}AoR?G3{FB*Q2v@@#1zh6~+=!bGIPvM7DeEn5|+x-4{ z;1KnR-?x}P0XwH%Bki;LK6&v#neldCV3C5G<{F2fieu9isJHzi=ikGlilzztl zI0#1iOV^RD?&szNKOB<-&rp2z@8#Wh%f*=lPW4@<5A5J)^U)Js$!GmjLTG$)1>=H+ z|DRI&!`Dd()h+Ja1f238%1Qn=DPO%p@!V?#|CVO4*MO zD)c5HcYa3kb+v<}v^cZ?_ywn5l!#}1Pe%TiY>{@beO|^tAJ_cXRNnsKljOyJkde=K zlz#o=Lbs;<_|JylE%>ck{>O|-J3HT$H=3S&h2rh)QX`9(c)_@oGuM&~&eME$D!z7) z0Q(fbS@Au(4-V=U|LTe#`k>tJRlKjb^Q%&x$1DCy#oIbA48Ke9Ttf&Af4|~;-YK}U zp>vPX-z@d|jOPC@jO$J4@}m<{-_oxLX!^z^;~_%*v!wKgVtQIx@irZAg5Qg@U-l~Q z#OPn9_|X{t2FCSeq09S#^Z1>Xdc92Th>Iq*za9``lk=mDhceJVXO!MKOA2WH;wygW zun^ifJplZIWH!!6fKz!&FP8F{yjq>2@+5z28Tdyt@Xu!8Ujcp@#@#NBH?w*6A2R5# zc%HP+>XjnTjsE$HmsBnp{}dEI`XS*@!*5|cS+Ac``qeK9rFOgXR>g;YUhfcpY`WMF( z@bBI}-g|m79{79uUgtGZU!BIBvwtkToEHP9{GB&S{^nP_U-9+l3jPeO^EWc`nS6od zbM)th{>PR6^}t!r{|5nVTzrV>l_+#MqV%?(m(PjD^S@R6h|c>PG|fLLKJ>HlUh@lG z{X%(fTj_1=j48fB1@lvMKH8%AYD`bK2{@1UzmxX7OWVJv^oL^fKdbnjn4bL0iXVyb z^T#y*Lot8Xy3((oCGVwgozm|yFOquMzUh+6pGy>P?~np+(Fr35oXS5<0ZaHnUF6>m zr62mR+~2Oar}XU@{fg4B>HKTs>y3tgR5Gyn=q}A?UF!w0&%bl0)A{gH#-W$KUFdAQ zUk#kwe~a|5eJA)g6yI>U;8#jA=Rx2$))eB)1sl`xZHz-dQTxTx-eUL_A~AMrW-Y@v zpT>xW!#Z~Wr*a;AmXzmGrGLgI=`Tn97$)b(fD^s*9qE@Fv>&%A{XvzJe4a4!swzID z^6kZ%|9;Jk@w!H_{ECbK6pOE8_$LrSEA76 zh77!#fxiMc)pz{{DX001-ciu@8ItmRSj+!$;6&d(Tj(tw@9&hp{dy_Phm`(|YbBqf zy6zA0$iJs5-qrxy%as0V#+4{^d9Bg61^m>Ra(!zC{YRAk;GYPciWTQDa4P@m6CTSC zo}=%*=sGFq>T86)Cl}``#n)dczz_Akql_z2=yI*mHwAoxKD$xzq07%=gyN*>x2X8q zy>f5kbid&rj^cM%KH5iBxM(jWYU;Sw#t(geRld*;fEo$D@|@ZNpU-8 zg}-BfG|y4oiLJj(1E+pzUoDJzp|;OUm3~94zCESibEVMR{P?(SQl6u#SD0RRjpEMl zNx^O3!3z{$J0!UA$+e0ftO;)8Xa+cyb6x$_rpLZP>Dy|5ee7aM_in{Es9j9^k?8kZ zis$Z=`(KrdbHC!#$`5BN{wIvkKB#^+ZW z{mUgEtM8=ZPON_mif>SUu<^UgaP41{d$%e+9n1gaM*nB>-ml5Uc{Olu=Le-E*K51| zveK_Ngz$W&|DfV)9}>XY;gi70pJV&2l-&7WOmBCC>s33_$Mr78vA(J20r59AkgTQn z^k*aw+y8Pqa4P@wh_t|8Yx%#V_|YFoy(;?ZOLj_sIe#vIvXOI@;)lL0_Ky}zyjJ=M2;m9)Q#Ip+Xy)5$}{bUv%} zM_w=VU(|=*i=|)I{#AhU6>kHl{Ewa^^!IA}Tsk9o?r)@j7nQyWoaX(ZI|ZNvfC@i3+Qy=3$+5yI2X5qvp={%11q zuWCNIn4b1s#n*pZ@_o7%fldieYv&z|gAeZzLTl%82L1b$euJ(rKU?$vWCr~gl>U(V z87@-#ZvvWI*<>U*y-{B)tDeY5o24xH+BNax4Az|K!A z{o0!axJB_dE54^Gz@*~uwS3giK;MR@-}$gpDDEX5&u|HF_P?pZXgv8$#oKR@^eb|4 z#uYF9jsR+gIxk{ei9(m2(jTS`X!!BBNpbx?1mgz8OFvcj5%5F&*YKr0cD|pr=fyuk zc=Fc(PVIJBD{69M-%BK)L*J72w|2Xo@nk-4RQl4p<(bv@-5K=vDgElph5k*NdgY!k?e-d}hSOGjUB~I$&yxJ^1y23wd{gde|2h4BmvQ+! z{QqBM;16m(cHZSx%AZeuspP*$&pk9bb2V@(=bFxs5aayYrg&~l%1_^2rr%3Fp+B-y za9hv1bx(T#-VU7dpMG7G{=9i9&(OV;E50El7w1yNHz^XAC%xql41U=WBoYRms2fr$Vn6XB9Zn zADt3<^Vi*?<9GGDLVxb*a^0)Ao!`mdmc*mm72or6p|kORCvcJvaXtU-O7E!u++_GY zj7uKj|Gy5L%6aGw(Q>XYr1RU8q2Q&-nX5AJRmL$NeMLs}^X1~)sW^SVn0_jzop&pK z^e3e}e|(la|D56n)h>RFz|NNyKl~HYA)iwGdx{VJh5*L@XD(?!eqVC9P}_eMIPuAm zR|^jFpMURAd~H+;g687ihc%z|H%mDmula0&{Y?3+s=)i8(%-1~^m_y_`MIF@9!JXm zS4w|7<9ZXikbO@1AC0Y>+^zU}4F8zo(;De9s^$DbM*e@9f&YW%Go<>4wdXmvYJLBY zl>a-@9M01fxAR~5TZpK~nBoV&A(b-wXp`b=+Q6p&%qc$I68fFm7Inqf|3U!sZ>%t` zM4`(MET8uZsNLY4xmU`6NEZg_Jbd~+Me#M|x6dg4Y{QjdRn2kkoS=M?zbVku_!@ef zG>plC|58t1V{IX|uV(RT@sX6N6*jQl?goZ4qq*-Fw^d`;zss+G=vQ~cl;q@w2Ec#Gmk zJ}Vi#R@>n{iqkio>35A>oR2Gh^i$D%e&6!Pm3}&moIh9G&T;1N7vuSZimxgCxtt1Z zfcH^5OzXh9N%6~oQ+pnHoaB72mh){)uSB8CCo}LbYW`~)m-SRl^L5RCkFH-IQ2aSC zOw#!9TE;OhRB^EJxI2UX14_THcG=@K|K9*k^*XHU8hSi|^P?TD?{`I>e@^LN0i4=r zgSOif6kk-nwezqg70$^~`f12N`fZep^ETi^`gizpuhO?aE~rl1&Sw>0(|oLdzoK|f z{q1*YX5UeKNbO~vHgTYXmR~czTInxfJX{j_`&q!LeM&EsH~zl%`-0-rrwRTf#aA`| zlFnb3X_l{5`aR=9Z{zXpJ;}cv^Pl_)aBAnn8i$7Oi}LS#jBBQ$%b9(lFFi&IqGH9l zjPVem{+U<$!>-VORPoK0&z$`FuWb<}2MdWcmPoQ<(exu+U#97w27! z>rLqLea+|KgmjeoE1nJkLivq-ptv(C_}%BrOI`w;%6W81@*mcA zxJ~i)zer6dG@lEhn9+L=>bn1omi1|hAH7KEZ&Lak#`+P$2w#IKi zpac8&6yNhIDW~a0SKX%V|0T)i^P0a4oa!}nhR|=)4r?>L5``|W%fQz(|3e><3_hXl za}V$fLh0?#F94_aPHWrM*cgLevJ-*!tb#1b6N3KUB5PcU>P{K^FK*Rc4)u< zlhSWc`Mg2jd+tw4IS+kK8U~-c;@_pfDWAg{$NqYyuNwU`&tl}mioaZO`_99;kCO*) zRebgPa(}ndzn^iv30?jNaC+~Nf0Xi@{`p1VY5jZtE2R8K)LyoE=W@jl>HKwzzGK?( zj|l)a=HCo(dT(w<_|y1gF9rg)r}`^Q?mYn9WQAtIm$O!-JZ+tDidvwH84pv+-_Hl0 zRwYeQxQ=(-ww;?cZu7iuf8Hy4l#$07d7+HF-Lt)Ilb%;=)ta?lBA1Z_GL}Hb6No#S zfbwn<$|s@Z`|>GyO(YXe#;aRtH5#oZH&&(7>-PE!3&Rzs>UaFbTDRwSyk5hr)LTuz z>v&$ZY}8RC?mur~B=0S_IP_L7%W$oW;&-||uTrkpwHS+j zz3=Vo*D8CtG*Y&14ImXxZv6x8&>g3shg@P5;`)uVUU5fpE&^Oz+_7;RRfJ#P^!Ix` zzu9edJYs19%hhW1&JEvIDxo`K=t06p9~BC+sn(W5cU{!Y58wN}&|Rgy61wZO+F{a0 zx$Vu@n&pnrP!CB#=|QF6?X?;soI34ExacNxR$uH+2PC?3CL} z9&uy8=X<@Sj$f|I6T=o-o&Dub)!ElCH+!|)e6Q2m@9i7)a$d(@0K+#ce&U&%`E2YM z>BhY5bd!;E*IjqxwITlHZeR8*7(~56dj$a4r|Kt)uT7$ooX(v#F%i|G%#{YK^kVsjlM@i#U6ax*<$xwF_PA4q@J z@!NhmqlEGdQna!+JuwT(?8nVkC!;2vM%O$3(__4uf>j$ z#3Dp!cFWt-W0bdAD`)gJJwa8vdoe*Rq!(*Ia+}|2rPFj<3q53$(aZMq7$u~cAU$iI zBeHwBT(8a7(o^Cq0F8`sTxm6zJvXC^_+dWdVY}6;XQt$bSt%FlAbjPSH zgtXTs)errZQR4yYiYj4xJ?p66?%l3i^((Ea@AZ2NlclYd!g#&a+6&<*~PuqoVe*c8x+TTt?PXyjJASaQdu7`@hBzv)%W zy>flWl;&cr(J!ZQaw0U}Y+IvsrweDo(wY^r`+Z%nUy!YX*?(yHNm5sB73mYiC z;ADn2P^j6w6XxicC*<^i^C?>P-NQ@;r z{;gnFm?F(;4V>Y39|8-O_&V$bVFb_2ve8()@ zmb4L=pGpkMk)*{uLaedP9p7Hq?#&cl?9FbT**LfP`dkfgDqfh_3opTKWE}6l?xqwH zcR5UQTmsG`Z?7-hv~gQJefGm`<7jLsIGe^GC0rMh@wQ~0lg%)i<2pz4O>G8uIg#J! zcvi)8wL!(>tc}gg2`E{xF_Mtm@?*k>Qa3Y)yG!M^uN4iCfLp}@U3M7F!*AkuYBe6(Q*57Lu=}N@(gKT#hgzlvCcHX44r;k#%Fr^QmJn6k<8FSw z+^tnG-a57BVzGkR#jPx1&g*r`wO+Sap&6_KnT1ha+G-PHEN5sMzCKw6#{+x=^d(|4L%qOeB4L6Etx^keHBcgByBYcLm5sHRjeFa@PQUrkO7`$dj*}2z54B1E!wNoV_W1B~Q&tJwe99=wCs!fd zd~%?=`J@zOHO0n*3d$S=lPvZvjxXcrQE}3QR79#Fm!YMW^G^J_u9Qq(VQak z4tky60m*39UnuwMJr70%mTJYFHP-RFey>o1|3ZtW3MZ(9T5JKd#11#Nh$VEn-<*dg zz?BSE9oWl=p5X@7XS9Nd#aj4lyS*yh>(Qcf`oa1py<^~93FIC58p!juC3xj>4Qrw! z>gH|EufPK$X(zpzc^EprU!3qguKsLqv|X-sisYzk6sV!RSyaE$bGyCb?1%?wwC;Di z{0e##pwOGpI%H%b_t{>(TWoA=jN}X38sqs<{68`^LI1n>FF!UuF*-Ri?vCNcC1f-| zH99puk)I-j=a|jfC48?iZGJf4elVK538Q2PaPaTXv%;WChd z^=fe5a}10)Iq0^bD+AFbBMfP9B{+vV1a1iuc5pvTAf5v2MldH*dnbsiFh=3mRN8%S zsf>V=Wm~OMV7Y4mttWUsdCQ}6TG+KwgNq$LAgKhcqq4+tN2P4F1?|`rCMaxiQ_C8Z zsRKdYC`-hJ{mf)bCq)>>u*J~9q&L}lFWQqUY0d|E{*xrJqz(>V>;+-Y< z#E{!YFQ4Z=m5V#(Yg!ZUIZQDG={XB#^uJ}!n zoy-XfjhU^D|7;a*mAs2)QPHiM3aNsHMx(^tBwWLin>t+DQXB7NS!f)SHspGtksKNr zZSFX`YDo@3p(51?stktFOgto}09*f+^5`ErsJ`q$C)-_ppM5Cy9R<868 z2rEz4T3w!8Nga#G9x5Fa)mBGQRiQb@g*LQ6XbTH%L@#;8olqO9w5Hv~mQf}Mt#Ct| zm*Z_((7tcKdu{ItOElgfSjj&7sc}Frw$A(GA za08vF$G=3&6bNV(<~D&6mMEbb1f29_hmnretx(e=yqB2w=3$sgA9Bq)9?T44;BX%g z3<{V)ULFyZ;kj+TU#nMX>5{?)5kK69%1H+4fwDJ`u-GPy(q3gLtcU5iW37!F8VpCV zDqed@AF;9Jp{aY`|jPg3f%_3IjHa|M9FcRv1%ogOgMxwS-gOdcbujHQuW&Y@u_UZy+WG!+myErJPBsHNSB4q+ z9Ui%o7=~Sn=h^EGmaS)IVXPNsw|d1JNLt6mHHU8N*nrq*;y#TnNb1-e88XF5ROtG< zS?V2k6!H5AqpBk6n}VteHHwZvD~87pnz>kmEh#E_EuyUB4kN*4vl^M0wF7W)jLGZ? z^;~C9$hbv#1$btOTRYOpB+*z{()&+#LSelO?6}yZh?mifS%t&xwE-W7YVNegdN_@x z*z8Wti#VPLV~2cSkl<6BP0v=NX)L=V~+u@P!`xE*se zOtMOkLb7INYcq4P&YZ$|K@BiV8tWq(k;3ILb;F6;tbn!QUux5IO<{si+spM{4PM6b zJc8uXGO)-(p8Q4DA?^+)>vF9kyJosAn3&Bfe7%i+6GI~{m&7VPjx^yt3?AG%(9WW1 zMCOG9nkkxGW=)M}RQJsU)ugiwn@Ao9vJUJ|J2&7fJJp#^jdetEM72xeKj@C4zy>bY zDBHo|Xwm`hOQub?*Rz?9o}}~K39uZSnJjiDRk4NF!;}%7D>~TZBXY(bc>6|p`hg~0 zp6}{Z*Yy0xd}w;H%`hS~2AUmS(CnL^h;{|Q@Eb$SIhX`twr~cv{q8Z`iJ2au0y2UJ z5aFC+bkv&g=G0(#bQb#*zM3vNHHe>B2Ai7-Gc{!}UFry?Sn>>E;*dF4_1m!1Xi}c= zd@oy-S_=ypYR+=E$vnLfBVr^nn8q$enJBjH2F(w=B!^1C2q0oLFOsGW4*k-chv) zF&MG=rQ7f5dL219BjZ`rV>VK_t6HQkh#O00nO1U=5#djK31<(@D1JyA-n$I5v9g54 z84l~Nv|u<3NAYF}*(n#;0L*>7-GzmEpxd03M+D}0)bTKk;13>v2qza|0Sf}I`D3V9 zW!h0_%+59m)k`$}Zj0ix%|VK| zlc*cE{+Rs0F5$%jrbwZMmg-LFk`D%TK$SYWGey#|RMRo4Cxv9t0* zIp%{Im)vNymJ(VN59TIp)%9rcA+KIO4h=Y-Z%-$$y}s_6p*a~Gj!oTMWV0Kaev8*N z5X*}c*gDcC2bA2A(cG|ofaYc)pTe@P0$C0-Xn>-foRyZMND`Jy73EN)&T#uu2=$G%y0WW*BTCD@w z73LS1y0AHe)nQsx;6$YBN6cKrd2$ob-c2q^_DcD`<0^lFqnf!q?zsASuvtQvmLL{V zqRk71iK-86thFM-ad~Fe^-6Hfb`jSboirt54H+PY&C*H|el=|@nARrux%G77WIv{M zEwdE1l*2HxF1D4re|vQ%%4`GOhWMlEYlBMI5j#b60CK1`T)JB3SgbXVt#EZY9~nHF zflco$GNhIN)0Pjdf{Hy3xrEp%UQHOsY69jta#KNZ$K8%unDnFG4h-1AlnQlykjXqv z%@Y(553yAz&lINkX0JHUW}B==hy>(_s)$!naOq~U%i<11xHsWk?Lrf)dXUrH!GobQ zVUAZ!OtqM~Z?jg{ucr>S|73$f4<;7FBNLXWz>82Mt2q9HoOh;z*vdq`yn@Iz)dAr_ zqA@K&X}XlEyTr-vga?q;HRKHg9e`ciJeFHnU4`6&HLNca&kFWXZYSN5w)(KEg}5#p zO$QH#MKs|_B8ZM6drH?XFGmf#Al!)s1e-JBC2xd$IPQc>S=)lmjc`(G0Mro_YMkBz zOB!oL9j^{um9;Tm!^3~$s)v!Ebg&Pc?ywT)(7N4#^C{+$TZ)OivYT8IjD7QjnnRmXx<_eUcNq)w<+>`T_M$F3^G&4E0`sB4j>7u*$DG%`ywc02oMpvM+Li)fB%igpIOiCyF zP_=m@fx=`o;83YH=^7mD5Mp;)wHS;xDzhFTFlb4ff+H=-CKE}sW-Fx^Y3YVWGv}tQ zEzymP1m*xdmmzb&ER@A|pV!MHLU;*+cd1YqDEZRH4KmY072-63Kl$I+gO#r3ZL&z_ z=;CrPcS!h$G?fjJQFYzI+>r;Bl?uXwmHWff!N3$un+?cW5e}*5)))UFsf>K`(12H2 zj)m<&CXb$R!eWf%@Iu%3V?Hq^htJ=5B%$We0A`}_SdmmD9a~|6jxGBGc~AT#vMPwP zHfHH|{SnEOix$aaaN-9E=Wu*Mc@`c49l1>o9pPfYfgTjz^X9g1#wq}B3edgzEzAgT zrh^u; zW#%&s#t``-1}3#}ki=9FPdrQ>u2J%((4r&<6hS+RXeDWpNLdUyn1Y}p*pVxPumd_i zW?`X@w?zfZ0tM(sA!)gU5|oUoBfFOO6mT!g`6D5g;E_2f7N0;w9>wU}IBm)SBr z$&QYiXk+_H6{ms4(j$-zZuzND)CA9(DxRr0dTNCT3W)}EM~SVK_YwV5m`PDG+&q^} ztw50yfp_)O;^(JG#7eo_i!z+NJbUwiRy$OyJ_T{oTgPZlT7(HQ zx`6PVBDN`bjd{_)dIcTK!CE3zt%%SW2!8IDu;EO7A`8oi3?Xj7&J*yTo*5HE z_B30!VBvh&6$0-Gb!hO`!Lle~#4;R^=wmE^kcHn}X;#3;w8JGNttEcYRA&khBQ;#q z5E_gKvNVf>SiXaEF2MM)gD=ASr(%0}NTkFfbhsgualI;VC zR|baV z?tEffDB5BCD!A{PT?j;Mp%-zJ$SzH<+s2wb@tCdq1`&mm_7_y% zu=F^=_Mx#I>oYh#*qg%6J!u52W6K}pi!p)0<-=N$u)f9{=W{qSrwV(G_xe}*9Xc6y zC1D$otaB%569Kxsiv0u#en;wfa6 zCJW9hHNRes&Jwn&EU{2%wlq&~;DMy$K?WYkf#_s+5(*SD)oU=-At%eR_i>@+l;!_# zx%QZrdKR-PUquc%2xj(VQ$C7SA{x|Yz|ux`W*OS1OJUoIm4+ZBo|PCJtsx;|7z^|8 zRhSB{jHwC^FE^06J_uV64@8`&Kw;~*=}1xceV)R7AsHX~%>3w-v*9k`h6pxfj}~DE zCZc^wEA2*w4JQlSGbd~xRrzm~jsSFxQxK|jrO;NWq9Y z+3;6^)X>EBX!F3wCoa>i;3qCGz?`AY2hC7mNa9d8I%Nj_G(LNYW||W@9td*x*nTS* z&U)_haXVDzx5S|e$Id_D?bg>N*ad;nwErPGB`{x;ubbV3s=?C!mE<7@@u(y^;)s(+ zKB2*%uSwgi6VZpcrY2MCC~+O4jNIpNWYSs4wCGu*6ekz=THLE$VbZxB!YZNLGR;5dj*4P2UQJB1ssDTCO+eE;sYk8E8Wk`6E zA*R`EOd5@%RggT=v+;P;*laV3Z>ue^>`6``Su&v{+kk6hbl~M;tSHhwgOyqi8O1A4 z!GA2gN2+4deJr)YDJpibkcokma0~KWQe;)CmctP;C#2`VV?ZYu2EhPndLja9PfgD1 zP9Vf|bXOYlIE0D0(eziZI7h+D_B}-eI?>k@=jdycLTEu>X2QcAEFowVSU_j%NqZ41 zcW}}M*4@LA2G|aXr5nkKNN@@u9ZutTH#T9hRlOOGdxprzmoP_$)q=`i@edyt2!5t5 zj}~8yJ$K**oT9@UqR3G(u}DL)*TNz;SDD_gmK{x=52rAGG@b_+mcL}T8k_TXSB~+m zA>vfxdmPW1ve`?&>*G*#Y06BV44M+YqNVx}`IPjSEDb_H`++7-JT%x(84)-MMKwdp zkQyeZ78GVu#(&5ZBArZ10t*l9HKM1Dq?u4)MeHn<4{>OGA=_IJGFX_f4R)xJeCMem zUwx&`w#uCvYag`~Xl+i))Urpk9jP{(!nUosX8dgidg0$xw(POqDS-&$zB7y9AG<1=ID|t4<83&u- z3D4*3q%uoH+K0{>v=7QA5IN|CnFysum=2V=f9sQPGh>KwY-n^KN|VFMMB8rN(J045 zw-P`PDSAi~H7PjMZD~pX-a{YvK@0_}xwOA$p|o5;lb6CXA2c?0=dgDZE4PWmwpxqf zfjyyJAl@)L7mLnLK4y?=WD#gP4R>sNeaNS^Ji6B0b_Sc6c+~~b`Zz-;xk>_i6Q8jb zZ!=DoXLW>lCCX!=kS=7znpvt|rW%(G7<|!AJCMvDZ2k7DIKmRg_(jJ4$xQ26>MTw9 zC*Zcj$HoD1v zYhf6Yy#d2;lZ6&xAGYxqol<$;e6*aF#Dg)~FekR{=TuY=S@VVIm%i5r=Qd$}j)kA{ zxR;qkzjZ9EKiCP75>~=uIChe0oSk5lg@lK#{RXC+IZZ@xXwW8J&0nDl6w4D2hb=_j zAB6jn_-T53?Ssupj zd(JSgjtt|^;YFNNF-%`TbB5VY7{>RR>5P|Qe+fQX`iSgO6`9x_UWK!2WXlIMAhNGH zGaQ7~DX&N#_BZ82Z%39~(sH9#LHbsYUJQg^gXK$S7;MDx#?Hg@`w?Vw8vdQG*}Yf) zu!{LOy0c^HpO5>; zZ{bO*UvRnG63hZpgm6BNcvoDkfcHMq~66z&h|E<6u43+;TY7qSF z`a(@$<+u0Pbr)#=HI)9XS8#I2u4|f}%1{5S{6gh8i}-``w)`fqlqYu0Jz4JU{g&S9 zzfaR|5;A9r8Wcaf9z0j>DLti$r+*nRD&k}DkAJ1;mG~?CvvL@=j6cHZhcvxi+x9|D zupxH6BbNRkH7I^|wR#FVD8FU(ME7JvT-V(AZjTGHF~ z>J>{VS8F#z?~SEjd!bO+wf;n-lxzI`p8-rmREMpz!I$(KZj|fs`tJ{s_WV%#t2O=A znqG1Nf&T2?u1A4W``P<*nm(uL1BTc1@&5l7aN>|4eM552-6Gc^?N}PS^t1O`K0gI4 z{Qgo)C`z7O-^-~6|5rG3D6Wq+xTUk}gR%72_cUEqu2&CU&3{=sgCB#! z6K3^qYx=gPfA7sIKs15X+tQzd^i= +#include +#include +#endif + +#include "gguf-util.h" +#include "gguf-llama.h" + +#include "ggml.h" +#ifdef GGML_USE_CUBLAS +#include "ggml-cuda.h" +#elif defined(GGML_USE_CLBLAST) +#include "ggml-opencl.h" +#endif + +#ifdef GGML_USE_METAL +#include "ggml-metal.h" +#endif +#ifdef GGML_USE_MPI +#include "ggml-mpi.h" +#endif +#ifdef GGML_USE_K_QUANTS +#ifndef QK_K +#ifdef GGML_QKK_64 +#define QK_K 64 +#else +#define QK_K 256 +#endif +#endif +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +#define LLAMA_USE_SCRATCH +#define LLAMA_MAX_SCRATCH_BUFFERS 16 + +// available llama models +enum e_model { + MODEL_UNKNOWN, + MODEL_3B, + MODEL_7B, + MODEL_13B, + MODEL_30B, + MODEL_65B, + MODEL_70B, +}; + +static const size_t kB = 1024; +static const size_t MB = 1024*1024; + +// computed for n_ctx == 2048 +// TODO: dynamically determine these sizes +// needs modifications in ggml + +typedef void (*offload_func_t)(struct ggml_tensor * tensor); + +void llama_nop(struct ggml_tensor * tensor) { // don't offload by default + (void) tensor; +} + +// +// ggml helpers +// + +static void ggml_graph_compute_helper(std::vector & buf, ggml_cgraph * graph, int n_threads) { + struct ggml_cplan plan = ggml_graph_plan(graph, n_threads); + + if (plan.work_size > 0) { + buf.resize(plan.work_size); + plan.work_data = buf.data(); + } + + ggml_graph_compute(graph, &plan); +} + +// +// memory sizes (calculated for n_batch == 512) +// + +static const std::map & MEM_REQ_SCRATCH0(int n_ctx) +{ + static std::map k_sizes = { + { MODEL_3B, ((size_t) n_ctx / 16ull + 92ull) * MB }, + { MODEL_7B, ((size_t) n_ctx / 16ull + 100ull) * MB }, + { MODEL_13B, ((size_t) n_ctx / 12ull + 120ull) * MB }, + { MODEL_30B, ((size_t) n_ctx / 9ull + 160ull) * MB }, + { MODEL_65B, ((size_t) n_ctx / 6ull + 256ull) * MB }, // guess + { MODEL_70B, ((size_t) n_ctx / 7ull + 164ull) * MB }, + }; + return k_sizes; +} + +static const std::map & MEM_REQ_SCRATCH1() +{ + static std::map k_sizes = { + { MODEL_3B, 128ull * MB }, + { MODEL_7B, 160ull * MB }, + { MODEL_13B, 192ull * MB }, + { MODEL_30B, 256ull * MB }, + { MODEL_65B, 384ull * MB }, // guess + { MODEL_70B, 304ull * MB }, + }; + return k_sizes; +} + +// used to store the compute graph tensors + non-scratch data +static const std::map & MEM_REQ_EVAL() +{ + static std::map k_sizes = { + { MODEL_3B, 8ull * MB }, + { MODEL_7B, 10ull * MB }, + { MODEL_13B, 12ull * MB }, + { MODEL_30B, 16ull * MB }, + { MODEL_65B, 24ull * MB }, // guess + { MODEL_70B, 24ull * MB }, + }; + return k_sizes; +} + +// amount of VRAM needed per batch size to hold temporary results +// the values for 3b and 65b are not derived from testing but instead chosen conservatively +static const std::map & VRAM_REQ_SCRATCH_BASE() +{ + static std::map k_sizes = { + { MODEL_3B, 512ull * kB }, + { MODEL_7B, 512ull * kB }, + { MODEL_13B, 640ull * kB }, + { MODEL_30B, 768ull * kB }, + { MODEL_65B, 1536ull * kB }, + { MODEL_70B, 1536ull * kB }, // TODO (likely can be reduced) + }; + return k_sizes; +} + +// amount of VRAM needed per batch size and context to hold temporary results +// the values for 3b and 65b are not derived from testing but instead chosen conservatively +static const std::map & VRAM_REQ_SCRATCH_PER_CONTEXT() +{ + static std::map k_sizes = { + { MODEL_3B, 128ull }, + { MODEL_7B, 128ull }, + { MODEL_13B, 160ull }, + { MODEL_30B, 208ull }, + { MODEL_65B, 416ull }, + { MODEL_70B, 416ull }, // TODO (likely can be reduced) + }; + return k_sizes; +} + +// default hparams (LLaMA 7B) +struct llama_hparams { + uint32_t n_vocab = 32000; + uint32_t n_ctx = 512; // this is provided as user input? + uint32_t n_embd = 4096; + uint32_t n_mult = 256; + uint32_t n_head = 32; + uint32_t n_head_kv = 32; + uint32_t n_layer = 32; + uint32_t n_rot = 64; + + // LLaMAv2 + // TODO: load from model data hparams + float f_ffn_mult = 1.0f; + float f_rms_norm_eps = LLAMA_DEFAULT_RMS_EPS; + + float rope_freq_base = 10000.0f; + float rope_freq_scale = 1.0f; + + enum llama_ftype ftype = LLAMA_FTYPE_MOSTLY_F16; + + bool operator!=(const llama_hparams & other) const { + return static_cast(memcmp(this, &other, sizeof(llama_hparams))); // NOLINT + } + + uint32_t n_gqa() const { + return n_head/n_head_kv; + } + + uint32_t n_embd_head() const { + return n_embd/n_head; + } + + uint32_t n_embd_gqa() const { + return n_embd/n_gqa(); + } + + size_t kv_size() const { + size_t result = 2ull; + result *= (size_t) n_embd_gqa(); + result *= (size_t) n_ctx; + result *= (size_t) n_layer; + result *= sizeof(ggml_fp16_t); + return result; + } +}; + +struct llama_layer { + // normalization + struct ggml_tensor * attention_norm; + + // attention + struct ggml_tensor * wq; + struct ggml_tensor * wk; + struct ggml_tensor * wv; + struct ggml_tensor * wo; + + // normalization + struct ggml_tensor * ffn_norm; + + // ff + struct ggml_tensor * w1; + struct ggml_tensor * w2; + struct ggml_tensor * w3; +}; + +struct llama_kv_cache { + struct ggml_tensor * k = NULL; + struct ggml_tensor * v = NULL; + + struct ggml_context * ctx = NULL; + + gguf_ctx_buffer buf; + + int n; // number of tokens currently in the cache + + ~llama_kv_cache() { + if (ctx) { + ggml_free(ctx); + } + +#ifdef GGML_USE_CUBLAS + ggml_cuda_free_data(k); + ggml_cuda_free_data(v); +#endif // GGML_USE_CUBLAS + } +}; + +struct llama_vocab { + using id = int32_t; + using token = std::string; + + struct token_score { + token tok; + float score; + }; + + std::unordered_map token_to_id; + std::vector id_to_token; +}; + +struct llama_model { + e_model type = MODEL_UNKNOWN; + + llama_hparams hparams; + + struct ggml_tensor * tok_embeddings; + + struct ggml_tensor * norm; + struct ggml_tensor * output; + + std::vector layers; + int n_gpu_layers; + + // context + struct ggml_context * ctx = NULL; + + // the model memory buffer + gguf_ctx_buffer buf; + + // model memory mapped file + std::unique_ptr mapping; + + // objects representing data potentially being locked in memory + gguf_mlock mlock_buf; + gguf_mlock mlock_mmap; + + // for quantize-stats only + std::vector> tensors_by_name; + + int64_t t_load_us = 0; + int64_t t_start_us = 0; + + llama_vocab vocab; + + ~llama_model() { + if (ctx) { + ggml_free(ctx); + } + +#ifdef GGML_USE_CUBLAS + for (size_t i = 0; i < tensors_by_name.size(); ++i) { + ggml_cuda_free_data(tensors_by_name[i].second); + } + ggml_cuda_free_scratch(); +#elif defined(GGML_USE_CLBLAST) + for (size_t i = 0; i < tensors_by_name.size(); ++i) { + ggml_cl_free_data(tensors_by_name[i].second); + } +#endif + } +}; + +struct llama_context { + llama_context(const llama_model & model) : model(model), t_load_us(model.t_load_us), t_start_us(model.t_start_us) {} +#ifdef GGML_USE_METAL + ~llama_context() { + if (ctx_metal) { + ggml_metal_free(ctx_metal); + } + } +#endif + std::mt19937 rng; + + bool has_evaluated_once = false; + + int64_t t_sample_us = 0; + int64_t t_eval_us = 0; + int64_t t_p_eval_us = 0; + + int32_t n_sample = 0; // number of tokens sampled + int32_t n_eval = 0; // number of eval calls + int32_t n_p_eval = 0; // number of tokens in eval calls for the prompt (with batch size > 1) + + const llama_model & model; + + bool model_owner = false; + + int64_t t_load_us; + int64_t t_start_us; + + // key + value cache for the self attention + struct llama_kv_cache kv_self; + + size_t mem_per_token = 0; + + // decode output (2-dimensional array: [n_tokens][n_vocab]) + std::vector logits; + bool logits_all = false; + + // input embedding (1-dimensional array: [n_embd]) + std::vector embedding; + + // reusable buffer for `struct ggml_graph_plan.work_data` + std::vector work_buffer; + + // memory buffers used to evaluate the model + // TODO: move in llama_state + gguf_ctx_buffer buf_compute; + gguf_ctx_buffer buf_scratch[LLAMA_MAX_SCRATCH_BUFFERS]; + +#ifdef GGML_USE_METAL + ggml_metal_context * ctx_metal = NULL; +#endif + +#ifdef GGML_USE_MPI + ggml_mpi_context * ctx_mpi = NULL; +#endif + + int buf_last = 0; + size_t buf_max_size[LLAMA_MAX_SCRATCH_BUFFERS] = { 0 }; + + void use_buf(struct ggml_context * ctx, int i) { +#if defined(LLAMA_USE_SCRATCH) + size_t last_size = 0; + + if (i == -1) { + last_size = ggml_set_scratch(ctx, { 0, 0, nullptr, }); + } else { + auto & buf = buf_scratch[i]; + last_size = ggml_set_scratch(ctx, { 0, buf.size, buf.addr, }); + } + + if (buf_last >= 0) { + buf_max_size[buf_last] = std::max(buf_max_size[buf_last], last_size); + } + + buf_last = i; +#else + (void) i; + (void) ctx; +#endif + } + + size_t get_buf_max_mem(int i) const { +#if defined(LLAMA_USE_SCRATCH) + return buf_max_size[i]; +#else + (void) i; + return 0; +#endif + } +}; + +template +static T checked_mul(T a, T b) { + T ret = a * b; + if (a != 0 && ret / a != b) { + throw std::runtime_error(format("overflow multiplying %llu * %llu", + (unsigned long long) a, (unsigned long long) b)); + } + return ret; +} + +static size_t checked_div(size_t a, size_t b) { + if (b == 0 || a % b != 0) { + throw std::runtime_error(format("error dividing %zu / %zu", a, b)); + } + return a / b; +} + +static std::string llama_format_tensor_shape(const std::vector & ne) { + char buf[256]; + snprintf(buf, sizeof(buf), "%5u", ne.at(0)); + for (size_t i = 1; i < ne.size(); i++) { + snprintf(buf + strlen(buf), sizeof(buf) - strlen(buf), " x %5u", ne.at(i)); + } + return buf; +} + +static size_t llama_calc_tensor_size(const std::vector & ne, enum ggml_type type) { + size_t size = ggml_type_size(type); + for (uint32_t dim : ne) { + size = checked_mul(size, dim); + } + return size / ggml_blck_size(type); +} + +struct llama_load_tensor { + std::string name; + enum ggml_type type = GGML_TYPE_F32; + std::vector ne; + size_t file_off; + size_t size; + struct ggml_tensor * ggml_tensor = NULL; + uint8_t * data; +}; + +struct llama_load_tensors_map { + // tensors is kept in a separate vector to preserve file order + std::vector tensors; + std::unordered_map name_to_idx; +}; + +enum gguf_file_version { + gguf_file_VERSION_GGML, + gguf_file_VERSION_GGMF_V1, // added version field and scores in vocab + gguf_file_VERSION_GGJT_V1, // added padding + gguf_file_VERSION_GGJT_V2, // changed quantization format + gguf_file_VERSION_GGJT_V3, // changed Q4 and Q8 quantization format +}; + +struct gguf_file_loader { + gguf_file file; + gguf_context * gguf_ctx; + gguf_file_version file_version; + llama_hparams hparams; + llama_vocab vocab; +struct ggml_context * ctx_data = NULL; + + gguf_file_loader(const char * fname, llama_load_tensors_map & tensors_map) + : file(fname, "rb") { + fprintf(stderr, "llama.cpp: loading model from %s\n", fname); + + struct gguf_init_params params = { + /*.no_alloc = */ true, + /*.ctx = */ &ctx_data, + }; + + gguf_ctx = gguf_init_from_file(fname, params); + + read_tensor_metadata(tensors_map); + } + + uint32_t read_u32(const char * key) { + int i = gguf_find_key(gguf_ctx, key); + if (i == -1) { + throw std::runtime_error(format("cannot find param with key %s\n", key)); + } + + return gguf_get_val_u32(gguf_ctx, i); + } + + int read_n_vocab() { + int i = gguf_find_key(gguf_ctx, "tokenizer.ggml.tokens"); + if (i == -1) { + throw std::runtime_error("cannot find token list in GGUF file\n"); + } + + return gguf_get_arr_n(gguf_ctx, i); + } + + void read_hparams() { + + // TODO make keysconstants in header + // TODO: read all hparams from file + hparams.n_vocab = read_n_vocab(); + hparams.n_embd = read_u32("llama.embedding_length"); + //hparams.n_mult = file.read_u32(); + hparams.n_head = read_u32("llama.attention.head_count"); + hparams.n_layer = read_u32("llama.layer_count"); + //hparams.n_rot = file.read_u32(); + //hparams.ftype = (enum llama_ftype) file.read_u32(); + + // LLaMAv2 + hparams.n_head_kv = read_u32("llama.attention.head_count_kv"); + } + + void read_vocab() { + vocab.id_to_token.resize(hparams.n_vocab); + int token_idx = gguf_find_key(gguf_ctx, "tokenizer.ggml.tokens"); + if (token_idx == -1) { + throw std::runtime_error("cannot find token list in GGUF file\n"); + } + + int score_idx = gguf_find_key(gguf_ctx, "tokenizer.ggml.scores"); + if (score_idx == -1) { + throw std::runtime_error("cannot find token scores list in GGUF file\n"); + } + + for (uint32_t i = 0; i < hparams.n_vocab; i++) { + + std::string word = gguf_get_arr_str(gguf_ctx, token_idx, i); + + vocab.token_to_id[word] = i; + + auto & tok_score = vocab.id_to_token[i]; + tok_score.tok = std::move(word); + tok_score.score = gguf_get_arr_f32(gguf_ctx, score_idx, i); + } + } + + void read_tensor_metadata(llama_load_tensors_map & tensors_map) { + const int n_tensors = gguf_get_n_tensors(gguf_ctx); + + for (int i = 0; i < n_tensors; ++i) { + llama_load_tensor tensor; + const char * name = gguf_get_tensor_name(gguf_ctx, i); + + struct ggml_tensor * cur = ggml_get_tensor(ctx_data, name); + uint32_t n_dims = cur->n_dims; + tensor.type = cur->type; + tensor.ne.resize(n_dims); + memcpy(tensor.ne.data(), &cur->ne[0], sizeof(tensor.ne[0]) * n_dims); + if (n_dims < 1 || n_dims > 2) { + throw std::runtime_error(format("llama.cpp: tensor '%s' should not be %u-dimensional", name, n_dims)); + } + switch (tensor.type) { + case GGML_TYPE_F32: + case GGML_TYPE_F16: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + break; + default: { + throw std::runtime_error(format("unrecognized tensor type %u\n", tensor.type)); + } + } + + + tensor.file_off = gguf_get_data_offset(gguf_ctx) + gguf_get_tensor_offset(gguf_ctx, i); + + tensor.name = name; + tensor.size = llama_calc_tensor_size(tensor.ne, tensor.type); + + tensors_map.tensors.push_back(tensor); + tensors_map.name_to_idx[name] = tensors_map.tensors.size() - 1; + } + } +}; + +struct gguf_file_saver { + gguf_file file; + gguf_file_loader * any_file_loader; + gguf_file_saver(const char * fname, gguf_file_loader * any_file_loader, enum llama_ftype new_ftype) + : file(fname, "wb"), any_file_loader(any_file_loader) { + fprintf(stderr, "llama.cpp: saving model to %s\n", fname); + write_magic(); + write_hparams(new_ftype); + write_vocab(); + } + void write_magic() { + } + void write_hparams(enum llama_ftype new_ftype) { + const llama_hparams & hparams = any_file_loader->hparams; + GGML_UNUSED(hparams); + GGML_UNUSED(new_ftype); + } + void write_vocab() { + uint32_t n_vocab = any_file_loader->hparams.n_vocab; + GGML_UNUSED(n_vocab); + } + void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { + switch (new_type) { + case GGML_TYPE_F32: + case GGML_TYPE_F16: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + break; + default: GGML_ASSERT(false); + } + + } +}; + +struct llama_model_loader { + std::unique_ptr file_loader; + llama_load_tensors_map tensors_map; + bool use_mmap; + size_t num_ggml_tensors_created = 0; + struct ggml_context * ggml_ctx = NULL; + std::unique_ptr mapping; + + llama_model_loader(const std::string & fname_base, bool use_mmap) { + file_loader = std::unique_ptr(new gguf_file_loader(fname_base.c_str(), tensors_map)); + if (!gguf_mmap::SUPPORTED) { + use_mmap = false; + } + this->use_mmap = use_mmap; + } + + void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const { + *ctx_size_p = *mmapped_size_p = 0; + for (const llama_load_tensor & lt : tensors_map.tensors) { + *ctx_size_p += sizeof(struct ggml_tensor) + GGML_OBJECT_SIZE; + *(use_mmap ? mmapped_size_p : ctx_size_p) += lt.size + 16; + } + } + + struct ggml_tensor * get_tensor(const std::string & name, const std::vector & ne, ggml_backend backend) { + auto it = tensors_map.name_to_idx.find(name); + if (it == tensors_map.name_to_idx.end()) { + throw std::runtime_error(std::runtime_error(format("llama.cpp: tensor '%s' is missing from model", name.c_str()))); + } + llama_load_tensor & lt = tensors_map.tensors.at(it->second); + if (lt.ne != ne) { + throw std::runtime_error(format("llama.cpp: tensor '%s' has wrong shape; expected %s, got %s", + name.c_str(), llama_format_tensor_shape(ne).c_str(), llama_format_tensor_shape(lt.ne).c_str())); + } + + return get_tensor_for(lt, backend); + } + + struct ggml_tensor * get_tensor_for(llama_load_tensor & lt, ggml_backend backend) { + struct ggml_tensor * tensor; + if (backend != GGML_BACKEND_CPU) { + ggml_set_no_alloc(ggml_ctx, true); + } + if (lt.ne.size() == 2) { + tensor = ggml_new_tensor_2d(ggml_ctx, lt.type, lt.ne.at(0), lt.ne.at(1)); + } else { + GGML_ASSERT(lt.ne.size() == 1); + tensor = ggml_new_tensor_1d(ggml_ctx, lt.type, lt.ne.at(0)); + } + ggml_set_name(tensor, lt.name.c_str()); + GGML_ASSERT(lt.ggml_tensor == NULL); // if this fails, we called get_tensor twice on the same tensor + + if (backend != GGML_BACKEND_CPU) { + ggml_set_no_alloc(ggml_ctx, use_mmap); + } + tensor->backend = backend; + lt.ggml_tensor = tensor; + num_ggml_tensors_created++; + return tensor; + } + + void done_getting_tensors() const { + if (num_ggml_tensors_created != tensors_map.tensors.size()) { + throw std::runtime_error(std::string("llama.cpp: file contained more tensors than expected")); + } + } + + void load_all_data(llama_progress_callback progress_callback, void * progress_callback_user_data, gguf_mlock * lmlock) { + size_t data_size = 0; + size_t prefetch_size = 0; + size_t lock_size = 0; + for (const llama_load_tensor & lt : tensors_map.tensors) { + data_size += lt.size; + if (lt.ggml_tensor->backend == GGML_BACKEND_CPU) { + prefetch_size += lt.size; + } + } + + if (use_mmap) { + mapping.reset(new gguf_mmap(&file_loader->file, prefetch_size, ggml_is_numa())); + if (lmlock) { + lmlock->init(mapping->addr); + } + } + + size_t done_size = 0; + for (llama_load_tensor & lt : tensors_map.tensors) { + if (progress_callback) { + progress_callback((float) done_size / data_size, progress_callback_user_data); + } + GGML_ASSERT(lt.ggml_tensor); // unused tensors should have been caught by load_data already + lt.data = (uint8_t *) lt.ggml_tensor->data; + + // allocate temp buffer if not using mmap + if (!use_mmap && lt.data == NULL) { + GGML_ASSERT(lt.ggml_tensor->backend != GGML_BACKEND_CPU); + lt.data = (uint8_t*)malloc(ggml_nbytes(lt.ggml_tensor)); + } + + load_data_for(lt); + + switch(lt.ggml_tensor->backend) { + case GGML_BACKEND_CPU: + lt.ggml_tensor->data = lt.data; + if (use_mmap && lmlock) { + lock_size += lt.size; + lmlock->grow_to(lock_size); + } + break; +#if defined(GGML_USE_CUBLAS) + case GGML_BACKEND_GPU: + case GGML_BACKEND_GPU_SPLIT: + ggml_cuda_transform_tensor(lt.data, lt.ggml_tensor); + if (!use_mmap) { + free(lt.data); + } + break; +#elif defined(GGML_USE_CLBLAST) + case GGML_BACKEND_GPU: + ggml_cl_transform_tensor(lt.data, lt.ggml_tensor); + if (!use_mmap) { + free(lt.data); + } + break; +#endif + default: + continue; + } + + done_size += lt.size; + } + } + + void load_data_for(llama_load_tensor & lt) { + if (use_mmap) { + lt.data = (uint8_t *) mapping->addr + lt.file_off; + } else { + gguf_file & file = file_loader->file; + file.seek(lt.file_off, SEEK_SET); + // TODO + //file.read_raw(lt.data, lt.size); + } + + if (0) { + print_checksum(lt); + } + } + + static void print_checksum(llama_load_tensor & lt) { + uint32_t sum = 0; + for (size_t i = 0; i < lt.size; i++) { + uint8_t byte = lt.data[i]; + sum = byte + (sum << 6) + (sum << 16) - sum; // sdbm hash + } + fprintf(stderr, "%s checksum: %#08x (%s, size %zu)\n", lt.name.c_str(), sum, + llama_format_tensor_shape(lt.ne).c_str(), lt.size); + } + +}; + +// +// kv cache +// + +static bool kv_cache_init( + const struct llama_hparams & hparams, + struct llama_kv_cache & cache, + ggml_type wtype, + int n_ctx, + int n_gpu_layers) { + const int n_embd = hparams.n_embd_gqa(); + const int n_layer = hparams.n_layer; + + const int64_t n_mem = n_layer*n_ctx; + const int64_t n_elements = n_embd*n_mem; + + cache.buf.resize(2u*n_elements*ggml_type_size(wtype) + 2u*MB); + cache.n = 0; + + struct ggml_init_params params; + params.mem_size = cache.buf.size; + params.mem_buffer = cache.buf.addr; + params.no_alloc = false; + + cache.ctx = ggml_init(params); + + if (!cache.ctx) { + fprintf(stderr, "%s: failed to allocate memory for kv cache\n", __func__); + return false; + } + + cache.k = ggml_new_tensor_1d(cache.ctx, wtype, n_elements); + cache.v = ggml_new_tensor_1d(cache.ctx, wtype, n_elements); + ggml_set_name(cache.k, "cache_k"); + ggml_set_name(cache.v, "cache_v"); + + (void) n_gpu_layers; +#ifdef GGML_USE_CUBLAS + if (n_gpu_layers > n_layer + 1) { + ggml_cuda_assign_buffers_no_scratch(cache.v); + } + if (n_gpu_layers > n_layer + 2) { + ggml_cuda_assign_buffers_no_scratch(cache.k); + } +#endif // GGML_USE_CUBLAS + + return true; +} + +struct llama_context_params llama_context_default_params() { + struct llama_context_params result = { + /*.seed =*/ LLAMA_DEFAULT_SEED, + /*.n_ctx =*/ 512, + /*.n_batch =*/ 512, + /*.n_gqa =*/ 1, + /*.rms_norm_eps =*/ LLAMA_DEFAULT_RMS_EPS, + /*.gpu_layers =*/ 0, + /*.main_gpu =*/ 0, + /*.tensor_split =*/ nullptr, + /*.rope_freq_base =*/ 10000.0f, + /*.rope_freq_scale =*/ 1.0f, + /*.progress_callback =*/ nullptr, + /*.progress_callback_user_data =*/ nullptr, + /*.low_vram =*/ false, + /*.f16_kv =*/ true, + /*.logits_all =*/ false, + /*.vocab_only =*/ false, + /*.use_mmap =*/ true, + /*.use_mlock =*/ false, + /*.embedding =*/ false, + }; + + return result; +} + +struct llama_model_quantize_params llama_model_quantize_default_params() { + struct llama_model_quantize_params result = { + /*.nthread =*/ 0, + /*.ftype =*/ LLAMA_FTYPE_MOSTLY_Q5_1, + /*.allow_requantize =*/ false, + /*.quantize_output_tensor =*/ true, + }; + + return result; +} + +int llama_max_devices() { + return LLAMA_MAX_DEVICES; +} + +bool llama_mmap_supported() { + return gguf_mmap::SUPPORTED; +} + +bool llama_mlock_supported() { + return gguf_mlock::SUPPORTED; +} + +void llama_backend_init(bool numa) { + ggml_time_init(); + + // needed to initialize f16 tables + { + struct ggml_init_params params = { 0, NULL, false }; + struct ggml_context * ctx = ggml_init(params); + ggml_free(ctx); + } + + if (numa) { + ggml_numa_init(); + } + +#ifdef GGML_USE_MPI + ggml_mpi_backend_init(); +#endif +} + +void llama_backend_free() { +#ifdef GGML_USE_MPI + ggml_mpi_backend_free(); +#endif +} + +int64_t llama_time_us() { + return ggml_time_us(); +} + +// +// model loading +// + +static const char *gguf_file_version_name(gguf_file_version version) { + switch (version) { + case gguf_file_VERSION_GGML: return "'ggml' (old version with low tokenizer quality and no mmap support)"; + case gguf_file_VERSION_GGMF_V1: return "ggmf v1 (old version with no mmap support)"; + case gguf_file_VERSION_GGJT_V1: return "ggjt v1 (pre #1405)"; + case gguf_file_VERSION_GGJT_V2: return "ggjt v2 (pre #1508)"; + case gguf_file_VERSION_GGJT_V3: return "ggjt v3 (latest)"; + } + + return "unknown"; +} + +static const char *llama_ftype_name(enum llama_ftype ftype) { + switch (ftype) { + case LLAMA_FTYPE_ALL_F32: return "all F32"; + case LLAMA_FTYPE_MOSTLY_F16: return "mostly F16"; + case LLAMA_FTYPE_MOSTLY_Q4_0: return "mostly Q4_0"; + case LLAMA_FTYPE_MOSTLY_Q4_1: return "mostly Q4_1"; + case LLAMA_FTYPE_MOSTLY_Q4_1_SOME_F16: + return "mostly Q4_1, some F16"; + case LLAMA_FTYPE_MOSTLY_Q5_0: return "mostly Q5_0"; + case LLAMA_FTYPE_MOSTLY_Q5_1: return "mostly Q5_1"; + case LLAMA_FTYPE_MOSTLY_Q8_0: return "mostly Q8_0"; + // K-quants + case LLAMA_FTYPE_MOSTLY_Q2_K: return "mostly Q2_K"; + case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "mostly Q3_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "mostly Q3_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "mostly Q3_K - Large"; + case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "mostly Q4_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "mostly Q4_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "mostly Q5_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "mostly Q5_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q6_K: return "mostly Q6_K"; + default: return "unknown, may not work"; + } +} + +static const char *llama_model_type_name(e_model type) { + switch (type) { + case MODEL_3B: return "3B"; + case MODEL_7B: return "7B"; + case MODEL_13B: return "13B"; + case MODEL_30B: return "30B"; + case MODEL_65B: return "65B"; + case MODEL_70B: return "70B"; + default: GGML_ASSERT(false); + } +} + +static void llama_model_load_internal( + const std::string & fname, + llama_model & model, + llama_vocab & vocab, + int n_ctx, + int n_batch, + int n_gqa, + float rms_norm_eps, + int n_gpu_layers, + int main_gpu, + const float * tensor_split, + float rope_freq_base, + float rope_freq_scale, + bool low_vram, + ggml_type memory_type, + bool use_mmap, + bool use_mlock, + bool vocab_only, + llama_progress_callback progress_callback, + void * progress_callback_user_data) { + + model.t_start_us = ggml_time_us(); + + std::unique_ptr ml(new llama_model_loader(fname, use_mmap)); + + vocab = std::move(ml->file_loader->vocab); + model.hparams = ml->file_loader->hparams; + model.n_gpu_layers = n_gpu_layers; + gguf_file_version file_version = ml->file_loader->file_version; + + auto & hparams = model.hparams; + + // TODO: read from file + hparams.f_rms_norm_eps = rms_norm_eps; + + { + switch (hparams.n_layer) { + case 26: model.type = e_model::MODEL_3B; break; + case 32: model.type = e_model::MODEL_7B; break; + case 40: model.type = e_model::MODEL_13B; break; + case 60: model.type = e_model::MODEL_30B; break; + case 80: model.type = e_model::MODEL_65B; break; + default: + { + if (hparams.n_layer < 32) { + model.type = e_model::MODEL_7B; + } + } break; + } + + hparams.n_ctx = n_ctx; + + // LLaMAv2 + hparams.n_head_kv = hparams.n_head / n_gqa; + if (model.type == e_model::MODEL_65B && n_gqa == 8) { + fprintf(stderr, "%s: warning: assuming 70B model based on GQA == %d\n", __func__, n_gqa); + model.type = e_model::MODEL_70B; + hparams.f_ffn_mult = 1.3f; // from the params.json of the 70B model + } + + hparams.rope_freq_base = rope_freq_base; + hparams.rope_freq_scale = rope_freq_scale; + } + + // ref: https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/model.py#L194-L199 + const uint32_t n_ff_raw = 2*(4*hparams.n_embd)/3; + const uint32_t n_ff_mult = hparams.f_ffn_mult*n_ff_raw; + const uint32_t n_ff = ((n_ff_mult + hparams.n_mult - 1)/hparams.n_mult)*hparams.n_mult; + //const uint32_t n_ff = 28672; + + { + fprintf(stderr, "%s: format = %s\n", __func__, gguf_file_version_name(file_version)); + fprintf(stderr, "%s: n_vocab = %u\n", __func__, hparams.n_vocab); + fprintf(stderr, "%s: n_ctx = %u\n", __func__, hparams.n_ctx); + fprintf(stderr, "%s: n_embd = %u\n", __func__, hparams.n_embd); + fprintf(stderr, "%s: n_mult = %u\n", __func__, hparams.n_mult); + fprintf(stderr, "%s: n_head = %u\n", __func__, hparams.n_head); + fprintf(stderr, "%s: n_head_kv = %u\n", __func__, hparams.n_head_kv); + fprintf(stderr, "%s: n_layer = %u\n", __func__, hparams.n_layer); + fprintf(stderr, "%s: n_rot = %u\n", __func__, hparams.n_rot); // a.k.a. n_embd_head, n_head_dim + fprintf(stderr, "%s: n_gqa = %u\n", __func__, hparams.n_gqa()); + fprintf(stderr, "%s: rnorm_eps = %.1e\n", __func__, hparams.f_rms_norm_eps); + fprintf(stderr, "%s: n_ff = %u\n", __func__, n_ff); + fprintf(stderr, "%s: freq_base = %.1f\n", __func__, hparams.rope_freq_base); + fprintf(stderr, "%s: freq_scale = %g\n", __func__, hparams.rope_freq_scale); + fprintf(stderr, "%s: ftype = %u (%s)\n", __func__, hparams.ftype, llama_ftype_name(hparams.ftype)); + fprintf(stderr, "%s: model size = %s\n", __func__, llama_model_type_name(model.type)); + } + + if (file_version < gguf_file_VERSION_GGJT_V2) { + if (hparams.ftype != LLAMA_FTYPE_ALL_F32 && + hparams.ftype != LLAMA_FTYPE_MOSTLY_F16 && + hparams.ftype != LLAMA_FTYPE_MOSTLY_Q8_0) { + throw std::runtime_error(format("this format is no longer supported (see https://github.com/ggerganov/llama.cpp/pull/1405)")); + } + } + + if (file_version < gguf_file_VERSION_GGJT_V3) { + if (hparams.ftype == LLAMA_FTYPE_MOSTLY_Q4_0 || + hparams.ftype == LLAMA_FTYPE_MOSTLY_Q4_1 || + hparams.ftype == LLAMA_FTYPE_MOSTLY_Q8_0) { + throw std::runtime_error(format("this format is no longer supported (see https://github.com/ggerganov/llama.cpp/pull/1508)")); + } + } + + if (vocab_only) { + return; + } + + auto & ctx = model.ctx; + + size_t ctx_size; + size_t mmapped_size; + ml->calc_sizes(&ctx_size, &mmapped_size); + fprintf(stderr, "%s: ggml ctx size = %7.2f MB\n", __func__, ctx_size/1024.0/1024.0); + + // create the ggml context + { + model.buf.resize(ctx_size); + if (use_mlock) { + model.mlock_buf.init (model.buf.addr); + model.mlock_buf.grow_to(model.buf.size); + } + + struct ggml_init_params params = { + /*.mem_size =*/ model.buf.size, + /*.mem_buffer =*/ model.buf.addr, + /*.no_alloc =*/ ml->use_mmap, + }; + + model.ctx = ggml_init(params); + if (!model.ctx) { + throw std::runtime_error(format("ggml_init() failed")); + } + } + + (void) main_gpu; +#if defined(GGML_USE_CUBLAS) + fprintf(stderr, "%s: using CUDA for GPU acceleration\n", __func__); + ggml_cuda_set_main_device(main_gpu); +#define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_GPU +#define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_GPU_SPLIT +#elif defined(GGML_USE_CLBLAST) + fprintf(stderr, "%s: using OpenCL for GPU acceleration\n", __func__); +#define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_GPU +#define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_GPU +#else +#define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_CPU +#define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_CPU +#endif + + // prepare memory for the weights + size_t vram_weights = 0; + size_t vram_scratch = 0; + { + const uint32_t n_embd = hparams.n_embd; + const uint32_t n_embd_gqa = hparams.n_embd_gqa(); + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_vocab = hparams.n_vocab; + + ml->ggml_ctx = ctx; + + model.tok_embeddings = ml->get_tensor("tok_embeddings.weight", {n_embd, n_vocab}, GGML_BACKEND_CPU); + + // "output" tensor + { + ggml_backend backend_norm; + ggml_backend backend_output; + if (n_gpu_layers > int(n_layer)) { // NOLINT + // norm is not performance relevant on its own but keeping it in VRAM reduces data copying + // on Windows however this is detrimental unless everything is on the GPU +#ifndef _WIN32 + backend_norm = low_vram ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; +#else + backend_norm = low_vram || n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; +#endif // _WIN32 + + backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.norm = ml->get_tensor("norm.weight", {n_embd}, backend_norm); + model.output = ml->get_tensor("output.weight", {n_embd, n_vocab}, backend_output); + if (backend_norm == GGML_BACKEND_GPU) { + vram_weights += ggml_nbytes(model.norm); + } + if (backend_output == GGML_BACKEND_GPU_SPLIT) { + vram_weights += ggml_nbytes(model.output); + } + } + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT + const ggml_backend backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + + auto & layer = model.layers[i]; + + std::string layers_i = "layers." + std::to_string(i); + + layer.attention_norm = ml->get_tensor(layers_i + ".attention_norm.weight", {n_embd}, backend); + + layer.wq = ml->get_tensor(layers_i + ".attention.wq.weight", {n_embd, n_embd}, backend_split); + layer.wk = ml->get_tensor(layers_i + ".attention.wk.weight", {n_embd, n_embd_gqa}, backend_split); + layer.wv = ml->get_tensor(layers_i + ".attention.wv.weight", {n_embd, n_embd_gqa}, backend_split); + layer.wo = ml->get_tensor(layers_i + ".attention.wo.weight", {n_embd, n_embd}, backend_split); + + layer.ffn_norm = ml->get_tensor(layers_i + ".ffn_norm.weight", {n_embd}, backend); + + layer.w1 = ml->get_tensor(layers_i + ".feed_forward.w1.weight", {n_embd, n_ff}, backend_split); + layer.w2 = ml->get_tensor(layers_i + ".feed_forward.w2.weight", { n_ff, n_embd}, backend_split); + layer.w3 = ml->get_tensor(layers_i + ".feed_forward.w3.weight", {n_embd, n_ff}, backend_split); + + if (backend == GGML_BACKEND_GPU) { + vram_weights += + ggml_nbytes(layer.attention_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + + ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + + ggml_nbytes(layer.w1) + ggml_nbytes(layer.w2) + ggml_nbytes(layer.w3); + } + } + } + + ml->done_getting_tensors(); + + // print memory requirements + { + const size_t scale = memory_type == GGML_TYPE_F32 ? 2 : 1; + + // this is the total memory required to run the inference + const size_t mem_required = + ctx_size + + mmapped_size - vram_weights + // weights in VRAM not in memory + MEM_REQ_SCRATCH0(hparams.n_ctx).at(model.type) + + MEM_REQ_SCRATCH1().at(model.type) + + MEM_REQ_EVAL().at(model.type); + + // this is the memory required by one llama_state + const size_t mem_required_state = + scale*hparams.kv_size(); + + fprintf(stderr, "%s: mem required = %7.2f MB (+ %7.2f MB per state)\n", __func__, + mem_required / 1024.0 / 1024.0, mem_required_state / 1024.0 / 1024.0); + + (void) vram_scratch; + (void) n_batch; +#ifdef GGML_USE_CUBLAS + if (low_vram) { + fprintf(stderr, "%s: not allocating a VRAM scratch buffer due to low VRAM option\n", __func__); + ggml_cuda_set_scratch_size(0); // disable scratch + } else { + const size_t vram_scratch_base = VRAM_REQ_SCRATCH_BASE().at(model.type); + const size_t vram_scratch_per_context = VRAM_REQ_SCRATCH_PER_CONTEXT().at(model.type); + vram_scratch = n_batch * (vram_scratch_base + n_ctx * vram_scratch_per_context); + ggml_cuda_set_scratch_size(vram_scratch); + if (n_gpu_layers > 0) { + fprintf(stderr, "%s: allocating batch_size x (%zd kB + n_ctx x %zd B) = %zd MB VRAM for the scratch buffer\n", + __func__, vram_scratch_base / kB, vram_scratch_per_context, + (vram_scratch + MB - 1) / MB); // round up + } + } +#endif // GGML_USE_CUBLAS + +#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) + const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); + + fprintf(stderr, "%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); + if (n_gpu_layers > (int) hparams.n_layer) { + fprintf(stderr, "%s: offloading non-repeating layers to GPU\n", __func__); + } + size_t vram_kv_cache = 0; + +#ifdef GGML_USE_CUBLAS + const int max_backend_supported_layers = hparams.n_layer + 3; + const int max_offloadable_layers = low_vram ? hparams.n_layer + 1 : hparams.n_layer + 3; + if (n_gpu_layers > (int) hparams.n_layer + 1) { + if (low_vram) { + fprintf(stderr, "%s: cannot offload v cache to GPU due to low VRAM option\n", __func__); + } else { + fprintf(stderr, "%s: offloading v cache to GPU\n", __func__); + vram_kv_cache += hparams.kv_size() / 2; + } + } + if (n_gpu_layers > (int) hparams.n_layer + 2) { + if (low_vram) { + fprintf(stderr, "%s: cannot offload k cache to GPU due to low VRAM option\n", __func__); + } else { + fprintf(stderr, "%s: offloading k cache to GPU\n", __func__); + vram_kv_cache += hparams.kv_size() / 2; + } + } +#elif defined(GGML_USE_CLBLAST) + const int max_backend_supported_layers = hparams.n_layer + 1; + const int max_offloadable_layers = hparams.n_layer + 1; +#endif // GGML_USE_CUBLAS + + fprintf(stderr, "%s: offloaded %d/%d layers to GPU\n", + __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); + fprintf(stderr, "%s: total VRAM used: %zu MB\n", + __func__, (vram_weights + vram_scratch + vram_kv_cache + MB - 1) / MB); // round up +#else + (void) n_gpu_layers; +#endif // defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) + } + + // populate `tensors_by_name` + for (llama_load_tensor & lt : ml->tensors_map.tensors) { + model.tensors_by_name.emplace_back(lt.name, lt.ggml_tensor); + } + + (void) tensor_split; +#if defined(GGML_USE_CUBLAS) + { + ggml_cuda_set_tensor_split(tensor_split); + } +#endif + + ml->load_all_data(progress_callback, progress_callback_user_data, use_mlock ? &model.mlock_mmap : NULL); + + if (progress_callback) { + progress_callback(1.0f, progress_callback_user_data); + } + + model.mapping = std::move(ml->mapping); + + // loading time will be recalculate after the first eval, so + // we take page faults deferred by mmap() into consideration + model.t_load_us = ggml_time_us() - model.t_start_us; +} + +static bool llama_model_load( + const std::string & fname, + llama_model & model, + llama_vocab & vocab, + int n_ctx, + int n_batch, + int n_gqa, + float rms_norm_eps, + int n_gpu_layers, + int main_gpu, + const float * tensor_split, + float rope_freq_base, + float rope_freq_scale, + bool low_vram, + ggml_type memory_type, + bool use_mmap, + bool use_mlock, + bool vocab_only, + llama_progress_callback progress_callback, + void *progress_callback_user_data) { + try { + llama_model_load_internal(fname, model, vocab, n_ctx, n_batch, n_gqa, rms_norm_eps, n_gpu_layers, main_gpu, tensor_split, rope_freq_base, rope_freq_scale, low_vram, memory_type, + use_mmap, use_mlock, vocab_only, progress_callback, progress_callback_user_data); + return true; + } catch (const std::exception & err) { + fprintf(stderr, "error loading model: %s\n", err.what()); + return false; + } +} + +// evaluate the transformer +// +// - lctx: llama context +// - tokens: new batch of tokens to process +// - embd embeddings input +// - n_tokens number of tokens +// - n_past: the context size so far +// - n_threads: number of threads to use +// +static bool llama_eval_internal( + llama_context & lctx, + const llama_token * tokens, + const float * embd, + int n_tokens, + int n_past, + int n_threads, + const char * cgraph_fname) { + + GGML_ASSERT((!tokens && embd) || (tokens && !embd)); + +#ifdef GGML_USE_MPI + ggml_mpi_eval_init(lctx.ctx_mpi, &n_tokens, &n_past, &n_threads); +#endif + + const int64_t t_start_us = ggml_time_us(); + + const int N = n_tokens; + + const auto & model = lctx.model; + const auto & hparams = model.hparams; + + const auto & kv_self = lctx.kv_self; + + GGML_ASSERT(!!kv_self.ctx); + + const int64_t n_embd = hparams.n_embd; + const int64_t n_layer = hparams.n_layer; + const int64_t n_ctx = hparams.n_ctx; + const int64_t n_head = hparams.n_head; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_head = hparams.n_embd_head(); + const int64_t n_vocab = hparams.n_vocab; + const int64_t n_embd_gqa = hparams.n_embd_gqa(); + + + GGML_ASSERT(n_embd_head == hparams.n_rot); + + const float freq_base = hparams.rope_freq_base; + const float freq_scale = hparams.rope_freq_scale; + const float rms_norm_eps = hparams.f_rms_norm_eps; + + const int n_gpu_layers = model.n_gpu_layers; + + auto & mem_per_token = lctx.mem_per_token; + auto & buf_compute = lctx.buf_compute; + + struct ggml_init_params params = { + /*.mem_size =*/ buf_compute.size, + /*.mem_buffer =*/ buf_compute.addr, + /*.no_alloc =*/ false, + }; + + struct ggml_context * ctx0 = ggml_init(params); + + ggml_cgraph * gf = ggml_new_graph(ctx0); + + // for big prompts, if BLAS is enabled, it is better to use only one thread + // otherwise, the threads are spin-lock waiting for the BLAS calls and are degrading the performance + n_threads = N >= 32 && ggml_cpu_has_blas() && !ggml_cpu_has_gpublas() ? 1 : n_threads; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + if (tokens) { + struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, N); + memcpy(inp_tokens->data, tokens, N*ggml_element_size(inp_tokens)); + ggml_set_name(inp_tokens, "inp_tokens"); + + inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); + } else { +#ifdef GGML_USE_MPI + GGML_ASSERT(false && "not implemented"); +#endif + + inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N); + memcpy(inpL->data, embd, N * n_embd * ggml_element_size(inpL)); + } + + const int i_gpu_start = n_layer - n_gpu_layers; + (void) i_gpu_start; + + // offload functions set the tensor output backend to GPU + // tensors are GPU-accelerated if any input or the output has been offloaded + // + // with the low VRAM option VRAM scratch is disabled in llama_load_model_internal + // in that case ggml_cuda_assign_buffers has no effect + offload_func_t offload_func_nr = llama_nop; // nr = non-repeating + offload_func_t offload_func_kq = llama_nop; + offload_func_t offload_func_v = llama_nop; + +#ifdef GGML_USE_CUBLAS + if (n_gpu_layers > n_layer) { + offload_func_nr = ggml_cuda_assign_buffers; + } + if (n_gpu_layers > n_layer + 1) { + offload_func_v = ggml_cuda_assign_buffers; + } + if (n_gpu_layers > n_layer + 2) { + offload_func_kq = ggml_cuda_assign_buffers; + } +#endif // GGML_USE_CUBLAS + + for (int il = 0; il < n_layer; ++il) { + ggml_format_name(inpL, "layer_inp_%d", il); + + offload_func_t offload_func = llama_nop; + +#ifdef GGML_USE_CUBLAS + if (il >= i_gpu_start) { + offload_func = ggml_cuda_assign_buffers; + } +#endif // GGML_USE_CUBLAS + + struct ggml_tensor * inpSA = inpL; + + lctx.use_buf(ctx0, 0); + + // norm + { + cur = ggml_rms_norm(ctx0, inpL, rms_norm_eps); + offload_func(cur); + ggml_set_name(cur, "rms_norm_0"); + + // cur = cur*attention_norm(broadcasted) + cur = ggml_mul(ctx0, cur, model.layers[il].attention_norm); + offload_func(cur); + ggml_set_name(cur, "attention_norm_0"); + } + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * tmpk = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + offload_func_kq(tmpk); + ggml_set_name(tmpk, "tmpk"); + + struct ggml_tensor * tmpq = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + offload_func_kq(tmpq); + ggml_set_name(tmpq, "tmpq"); + + struct ggml_tensor * Kcur = ggml_rope_custom_inplace(ctx0, ggml_reshape_3d(ctx0, tmpk, n_embd_head, n_head_kv, N), n_past, n_embd_head, 0, 0, freq_base, freq_scale); + offload_func_kq(Kcur); + ggml_set_name(Kcur, "Kcur"); + + struct ggml_tensor * Qcur = ggml_rope_custom_inplace(ctx0, ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, N), n_past, n_embd_head, 0, 0, freq_base, freq_scale); + offload_func_kq(Qcur); + ggml_set_name(Qcur, "Qcur"); + + // store key and value to memory + { + // compute the transposed [N, n_embd] V matrix + + struct ggml_tensor * tmpv = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + offload_func_v(tmpv); + ggml_set_name(tmpv, "tmpv"); + + struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, tmpv, n_embd_gqa, N)); + offload_func_v(Vcur); + ggml_set_name(Vcur, "Vcur"); + + struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, N*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + n_past)); + offload_func_kq(k); + ggml_set_name(k, "k"); + + struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, N, n_embd_gqa, + ( n_ctx)*ggml_element_size(kv_self.v), + (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + n_past*ggml_element_size(kv_self.v)); + offload_func_v(v); + ggml_set_name(v, "v"); + + // important: storing RoPE-ed version of K in the KV cache! + ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); + ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); + } + + struct ggml_tensor * Q = + ggml_permute(ctx0, + Qcur, + 0, 2, 1, 3); + offload_func_kq(Q); + ggml_set_name(Q, "Q"); + + struct ggml_tensor * K = + ggml_permute(ctx0, + ggml_reshape_3d(ctx0, + ggml_view_1d(ctx0, kv_self.k, (n_past + N)*n_embd_gqa, il*n_ctx*ggml_element_size(kv_self.k)*n_embd_gqa), + n_embd_head, n_head_kv, n_past + N), + 0, 2, 1, 3); + offload_func_kq(K); + ggml_set_name(K, "K"); + + // K * Q + struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); + offload_func_kq(KQ); + ggml_set_name(KQ, "KQ"); + + // KQ_scaled = KQ / sqrt(n_embd_head) + struct ggml_tensor * KQ_scale = ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head)); + ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); + + // KQ_scaled shape [n_past + N, N, n_head, 1] + struct ggml_tensor * KQ_scaled = ggml_scale_inplace(ctx0, KQ, KQ_scale); + offload_func_kq(KQ_scaled); + ggml_set_name(KQ_scaled, "KQ_scaled"); + + // KQ_masked = mask_past(KQ_scaled) + struct ggml_tensor * KQ_masked = ggml_diag_mask_inf_inplace(ctx0, KQ_scaled, n_past); + offload_func_kq(KQ_masked); + ggml_set_name(KQ_masked, "KQ_masked"); + + // KQ = soft_max(KQ_masked) + struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); + offload_func_v(KQ_soft_max); + ggml_set_name(KQ_soft_max, "KQ_soft_max"); + + // split cached V into n_head heads + struct ggml_tensor * V = + ggml_view_3d(ctx0, kv_self.v, + n_past + N, n_embd_head, n_head_kv, + n_ctx*ggml_element_size(kv_self.v), + n_ctx*ggml_element_size(kv_self.v)*n_embd_head, + n_ctx*ggml_element_size(kv_self.v)*n_embd_gqa*il); + offload_func_v(V); + ggml_set_name(V, "V"); + +#if 1 + struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); + offload_func_v(KQV); + ggml_set_name(KQV, "KQV"); +#else + // make V contiguous in memory to speed up the matmul, however we waste time on the copy + // on M1 this is faster for the perplexity computation, but ~5% slower for the single-token generation + // is there a better way? + struct ggml_tensor * V_cont = ggml_cpy(ctx0, V, ggml_new_tensor_3d(ctx0, kv_self.v->type, n_past + N, n_embd_head, n_head)); + struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V_cont, KQ_soft_max); +#endif + + // KQV_merged = KQV.permute(0, 2, 1, 3) + struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); + offload_func_v(KQV_merged); + ggml_set_name(KQV_merged, "KQV_merged"); + + // cur = KQV_merged.contiguous().view(n_embd, N) + cur = ggml_cpy(ctx0, + KQV_merged, + ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); + offload_func_v(cur); + ggml_set_name(cur, "KQV_merged_contiguous"); + + // projection (no bias) + cur = ggml_mul_mat(ctx0, + model.layers[il].wo, + cur); + offload_func(cur); + ggml_set_name(cur, "result_wo"); + } + + lctx.use_buf(ctx0, 1); + + struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpSA); + offload_func(inpFF); + ggml_set_name(inpFF, "inpFF"); + + // feed-forward network + { + // norm + { + cur = ggml_rms_norm(ctx0, inpFF, rms_norm_eps); + offload_func(cur); + ggml_set_name(cur, "rms_norm_1"); + + // cur = cur*ffn_norm(broadcasted) + cur = ggml_mul(ctx0, cur, model.layers[il].ffn_norm); + offload_func(cur); + ggml_set_name(cur, "ffn_norm"); + } + + struct ggml_tensor * tmp = ggml_mul_mat(ctx0, + model.layers[il].w3, + cur); + offload_func(tmp); + ggml_set_name(tmp, "result_w3"); + + cur = ggml_mul_mat(ctx0, + model.layers[il].w1, + cur); + offload_func(cur); + ggml_set_name(cur, "result_w1"); + + // SILU activation + cur = ggml_silu(ctx0, cur); + offload_func(cur); + ggml_set_name(cur, "silu"); + + cur = ggml_mul(ctx0, cur, tmp); + offload_func(cur); + ggml_set_name(cur, "silu_x_result_w3"); + + cur = ggml_mul_mat(ctx0, + model.layers[il].w2, + cur); + offload_func(cur); + ggml_set_name(cur, "result_w2"); + } + + cur = ggml_add(ctx0, cur, inpFF); + offload_func(cur); + ggml_set_name(cur, "inpFF_+_result_w2"); + + // input for next layer + inpL = cur; + } + + lctx.use_buf(ctx0, 0); + + // used at the end to optionally extract the embeddings + struct ggml_tensor * embeddings = NULL; + + // norm + { + cur = ggml_rms_norm(ctx0, inpL, rms_norm_eps); + offload_func_nr(cur); + ggml_set_name(cur, "rms_norm_2"); + + // cur = cur*norm(broadcasted) + cur = ggml_mul(ctx0, cur, model.norm); + // offload_func_nr(cur); // TODO CPU + GPU mirrored backend + ggml_set_name(cur, "result_norm"); + + embeddings = cur; + } + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + ggml_set_name(cur, "result_output"); + + lctx.use_buf(ctx0, -1); + + // logits -> probs + //cur = ggml_soft_max_inplace(ctx0, cur); + + // run the computation + ggml_build_forward_expand(gf, cur); + + // fprintf(stderr, "graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf.n_nodes, gf.n_leafs); + +#if GGML_USE_MPI + ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); +#endif + +#ifdef GGML_USE_METAL + if (lctx.ctx_metal && N == 1) { + if (!ggml_metal_if_optimized(lctx.ctx_metal)) { + ggml_metal_graph_find_concurrency(lctx.ctx_metal, gf); + } + ggml_metal_set_n_cb (lctx.ctx_metal, n_threads); + ggml_metal_graph_compute(lctx.ctx_metal, gf); + ggml_metal_get_tensor (lctx.ctx_metal, cur); + } else { + // IMPORTANT: + // Since we don't have efficient Matrix x Matrix Metal multiplication yet, we fallback to vanilla + // ggml_graph_compute(). It uses Apple's Accelerate CBLAS API which takes advantage of the ANE or the AMX + // coprocessor. + // + // When we implement Matrix x Matrix Metal multiplication, we can avoid this branch. + // But for now, we have focused only on Matrix x Vector Metal multiplication. + // + // TODO: avoid these syncs via shared memory (ref #1696) + // + if (lctx.ctx_metal) { + // We need to sync the GPU KV cache with the CPU KV cache + ggml_metal_get_tensor(lctx.ctx_metal, kv_self.k); + ggml_metal_get_tensor(lctx.ctx_metal, kv_self.v); + } + + ggml_graph_compute_helper(lctx.work_buffer, gf, n_threads); + } +#else + ggml_graph_compute_helper(lctx.work_buffer, gf, n_threads); +#endif + +#if GGML_USE_MPI + ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); +#endif + + // update kv token count + lctx.kv_self.n = n_past + N; + + struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + + if (cgraph_fname) { + ggml_graph_export(gf, cgraph_fname); + } + +#ifdef GGML_PERF + // print timing information per ggml operation (for debugging purposes) + // requires GGML_PERF to be defined + ggml_graph_print(gf); +#endif + + // plot the computation graph in dot format (for debugging purposes) + //if (n_past%100 == 0) { + // ggml_graph_dump_dot(gf, NULL, "llama.dot"); + //} + + // extract logits + { + auto & logits_out = lctx.logits; + + if (lctx.logits_all) { + logits_out.resize(n_vocab * N); + memcpy(logits_out.data(), (float *) ggml_get_data(res), sizeof(float)*n_vocab*N); + } else { + // return result for just the last token + logits_out.resize(n_vocab); + memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(N-1)), sizeof(float)*n_vocab); + } + } + + // extract embeddings + if (!lctx.embedding.empty()) { + auto & embedding_out = lctx.embedding; + + embedding_out.resize(n_embd); + memcpy(embedding_out.data(), (float *) ggml_get_data(embeddings) + (n_embd*(N - 1)), sizeof(float)*n_embd); + } + + if (mem_per_token == 0) { + mem_per_token = ggml_used_mem(ctx0)/N; + } + +#if 0 + printf("\n%s: used_mem: eval ctx %.3f MB, scratch %.3f MB %.3f MB, work buf %.3f MB, n_past = %d, N = %d\n", __func__, + ggml_used_mem(ctx0)/1024.0/1024.0, + lctx.get_buf_max_mem(0)/1024.0/1024.0, + lctx.get_buf_max_mem(1)/1024.0/1024.0, + lctx.work_buffer.size()/1024.0/1024.0, + n_past, N); +#endif + + ggml_free(ctx0); + + // measure the performance only for the single-token evals + if (N == 1) { + lctx.t_eval_us += ggml_time_us() - t_start_us; + lctx.n_eval++; + } + else if (N > 1) { + lctx.t_p_eval_us += ggml_time_us() - t_start_us; + lctx.n_p_eval += N; + } + + return true; +} + +// +// tokenizer +// + +static size_t utf8_len(char src) { + const size_t lookup[] = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4 }; + uint8_t highbits = static_cast(src) >> 4; + return lookup[highbits]; +} + +struct llama_sp_symbol { + using index = int; + index prev; + index next; + const char * text; + size_t n; +}; + +static_assert(std::is_trivially_copyable::value, "llama_sp_symbol is not trivially copyable"); + +struct llama_sp_bigram { + struct comparator { + bool operator()(llama_sp_bigram & l, llama_sp_bigram & r) { + return (l.score < r.score) || (l.score == r.score && l.left > r.left); + } + }; + using queue_storage = std::vector; + using queue = std::priority_queue; + llama_sp_symbol::index left; + llama_sp_symbol::index right; + float score; + size_t size; +}; + +// original implementation: +// https://github.com/ggerganov/llama.cpp/commit/074bea2eb1f1349a0118239c4152914aecaa1be4 +struct llama_tokenizer { + llama_tokenizer(const llama_vocab & vocab): vocab_(vocab) {} + + void tokenize(const std::string & text, std::vector & output) { + // split string into utf8 chars + int index = 0; + size_t offs = 0; + while (offs < text.size()) { + llama_sp_symbol sym; + size_t char_len = std::min(text.size() - offs, utf8_len(text[offs])); + sym.text = text.c_str() + offs; + sym.n = char_len; + offs += char_len; + sym.prev = index - 1; + sym.next = offs == text.size() ? -1 : index + 1; + index++; + symbols_.emplace_back(sym); + } + + // seed the work queue with all possible 2-character tokens. + for (size_t i = 1; i < symbols_.size(); ++i) { + try_add_bigram(i - 1, i); + } + + // keep substituting the highest frequency pairs for as long as we can. + while (!work_queue_.empty()) { + auto bigram = work_queue_.top(); + work_queue_.pop(); + + auto & left_sym = symbols_[bigram.left]; + auto & right_sym = symbols_[bigram.right]; + + // if one of the symbols already got merged, skip it. + if (left_sym.n == 0 || right_sym.n == 0 || + left_sym.n + right_sym.n != bigram.size) { + continue; + } + + // merge the right sym into the left one + left_sym.n += right_sym.n; + right_sym.n = 0; + + //printf("left = '%*s' size = %zu\n", (int) left_sym.n, left_sym.text, bigram.size); + + // remove the right sym from the chain + left_sym.next = right_sym.next; + if (right_sym.next >= 0) { + symbols_[right_sym.next].prev = bigram.left; + } + + // find more substitutions + try_add_bigram(left_sym.prev, bigram.left); + try_add_bigram(bigram.left, left_sym.next); + } + + for (int i = 0; i != -1; i = symbols_[i].next) { + auto & symbol = symbols_[i]; + auto token = vocab_.token_to_id.find(std::string(symbol.text, symbol.n)); + + if (token == vocab_.token_to_id.end()) { + // output any symbols that did not form tokens as bytes. + for (int j = 0; j < (int) symbol.n; ++j) { + llama_vocab::id token_id = static_cast(symbol.text[j]) + 3; + output.push_back(token_id); + } + } else { + output.push_back((*token).second); + } + } + } + +private: + void try_add_bigram(int left, int right) { + if (left == -1 || right == -1) { + return; + } + + const std::string text = std::string(symbols_[left].text, symbols_[left].n + symbols_[right].n); + auto token = vocab_.token_to_id.find(text); + + if (token == vocab_.token_to_id.end()) { + return; + } + + if (static_cast((*token).second) >= vocab_.id_to_token.size()) { + return; + } + + const auto &tok_score = vocab_.id_to_token[(*token).second]; + + llama_sp_bigram bigram; + bigram.left = left; + bigram.right = right; + bigram.score = tok_score.score; + bigram.size = text.size(); + work_queue_.push(bigram); + } + + const llama_vocab & vocab_; + std::vector symbols_; + llama_sp_bigram::queue work_queue_; +}; + +static std::vector llama_tokenize(const llama_vocab & vocab, const std::string & text, bool bos) { + llama_tokenizer tokenizer(vocab); + std::vector output; + + if (text.empty()) { + return output; + } + + if (bos) { + output.push_back(llama_token_bos()); + } + + tokenizer.tokenize(text, output); + return output; +} + +// +// grammar - internal +// + +struct llama_grammar { + const std::vector> rules; + std::vector> stacks; +}; + +struct llama_grammar_candidate { + size_t index; + const uint32_t * code_points; +}; + +// NOTE: assumes valid utf8 (but checks for overrun) +// adds a terminating 0 for use as pointer +std::vector decode_utf8(const char * src) { + static const int lookup[] = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4 }; + const char * pos = src; + std::vector code_points; + while (*pos != 0) { + uint8_t first_byte = static_cast(*pos); + uint8_t highbits = first_byte >> 4; + int len = lookup[highbits]; + uint8_t mask = (1 << (8 - len)) - 1; + uint32_t value = first_byte & mask; + const char * end = pos + len; // may overrun! + ++pos; + for ( ; pos < end && *pos != 0; ++pos) { + value = (value << 6) + (static_cast(*pos) & 0x3F); + } + code_points.push_back(value); + } + code_points.push_back(0); + return code_points; +} + +// returns true iff pos points to the end of one of the definitions of a rule +static bool llama_grammar_is_end_of_sequence(const llama_grammar_element * pos) { + switch (pos->type) { + case LLAMA_GRETYPE_END: return true; + case LLAMA_GRETYPE_ALT: return true; + default: return false; + } +} + +// returns true iff chr satisfies the char range at pos (regular or inverse range) +// asserts that pos is pointing to a char range element +static std::pair llama_grammar_match_char( + const llama_grammar_element * pos, + const uint32_t chr) { + + bool found = false; + bool is_positive_char = pos->type == LLAMA_GRETYPE_CHAR; + GGML_ASSERT(is_positive_char || pos->type == LLAMA_GRETYPE_CHAR_NOT); + + do { + if (pos[1].type == LLAMA_GRETYPE_CHAR_RNG_UPPER) { + // inclusive range, e.g. [a-z] + found = found || (pos->value <= chr && chr <= pos[1].value); + pos += 2; + } else { + // exact char match, e.g. [a] or "a" + found = found || pos->value == chr; + pos += 1; + } + } while (pos->type == LLAMA_GRETYPE_CHAR_ALT); + + return std::make_pair(found == is_positive_char, pos); +} + +// transforms a grammar pushdown stack into N possible stacks, all ending +// at a character range (terminal element) +static void llama_grammar_advance_stack( + const std::vector> & rules, + const std::vector & stack, + std::vector> & new_stacks) { + + if (stack.empty()) { + new_stacks.push_back(stack); + return; + } + + const llama_grammar_element * pos = stack.back(); + + switch (pos->type) { + case LLAMA_GRETYPE_RULE_REF: { + const size_t rule_id = static_cast(pos->value); + const llama_grammar_element * subpos = rules[rule_id].data(); + do { + // init new stack without the top (pos) + std::vector new_stack(stack.begin(), stack.end() - 1); + if (!llama_grammar_is_end_of_sequence(pos + 1)) { + // if this rule ref is followed by another element, add that to stack + new_stack.push_back(pos + 1); + } + if (!llama_grammar_is_end_of_sequence(subpos)) { + // if alternate is nonempty, add to stack + new_stack.push_back(subpos); + } + llama_grammar_advance_stack(rules, new_stack, new_stacks); + while (!llama_grammar_is_end_of_sequence(subpos)) { + // scan to end of alternate def + subpos++; + } + if (subpos->type == LLAMA_GRETYPE_ALT) { + // there's another alternate def of this rule to process + subpos++; + } else { + break; + } + } while (true); + break; + } + case LLAMA_GRETYPE_CHAR: + case LLAMA_GRETYPE_CHAR_NOT: + new_stacks.push_back(stack); + break; + default: + // end of alternate (LLAMA_GRETYPE_END, LLAMA_GRETYPE_ALT) or middle of char range + // (LLAMA_GRETYPE_CHAR_ALT, LLAMA_GRETYPE_CHAR_RNG_UPPER); stack should never be left on + // those + GGML_ASSERT(false); + } +} + +// takes a set of possible pushdown stacks on a grammar, which are required to +// be positioned at a character range (see `llama_grammar_advance_stack`), and +// produces the N possible stacks if the given char is accepted at those +// positions +static std::vector> llama_grammar_accept( + const std::vector> & rules, + const std::vector> & stacks, + const uint32_t chr) { + + std::vector> new_stacks; + + for (const auto & stack : stacks) { + if (stack.empty()) { + continue; + } + + auto match = llama_grammar_match_char(stack.back(), chr); + if (match.first) { + const llama_grammar_element * pos = match.second; + + // update top of stack to next element, if any + std::vector new_stack(stack.begin(), stack.end() - 1); + if (!llama_grammar_is_end_of_sequence(pos)) { + new_stack.push_back(pos); + } + llama_grammar_advance_stack(rules, new_stack, new_stacks); + } + } + + return new_stacks; +} + +static std::vector llama_grammar_reject_candidates( + const std::vector> & rules, + const std::vector> & stacks, + const std::vector & candidates); + +static std::vector llama_grammar_reject_candidates_for_stack( + const std::vector> & rules, + const std::vector & stack, + const std::vector & candidates) { + + std::vector rejects; + + if (stack.empty()) { + // accept nothing; EOS is handled elsewhere + rejects.insert(rejects.end(), candidates.begin(), candidates.end()); + return rejects; + } + + const llama_grammar_element * stack_pos = stack.back(); + + std::vector next_candidates; + for (auto tok : candidates) { + if (llama_grammar_match_char(stack_pos, tok.code_points[0]).first) { + if (tok.code_points[1] != 0) { + next_candidates.push_back({ tok.index, tok.code_points + 1 }); + } + } else { + rejects.push_back(tok); + } + } + + auto stack_pos_after = llama_grammar_match_char(stack_pos, 0).second; + + // update top of stack to next element, if any + std::vector stack_after(stack.begin(), stack.end() - 1); + if (!llama_grammar_is_end_of_sequence(stack_pos_after)) { + stack_after.push_back(stack_pos_after); + } + std::vector> next_stacks; + llama_grammar_advance_stack(rules, stack_after, next_stacks); + + auto next_rejects = llama_grammar_reject_candidates(rules, next_stacks, next_candidates); + for (auto tok : next_rejects) { + rejects.push_back({ tok.index, tok.code_points - 1 }); + } + + return rejects; +} + +static std::vector llama_grammar_reject_candidates( + const std::vector> & rules, + const std::vector> & stacks, + const std::vector & candidates) { + GGML_ASSERT(!stacks.empty()); // REVIEW + + if (candidates.empty()) { + return std::vector(); + } + + auto rejects = llama_grammar_reject_candidates_for_stack(rules, stacks.front(), candidates); + + for (size_t i = 1, size = stacks.size(); i < size; ++i) { + rejects = llama_grammar_reject_candidates_for_stack(rules, stacks[i], rejects); + } + return rejects; +} + +// +// grammar - external +// + +struct llama_grammar * llama_grammar_init( + const llama_grammar_element ** rules, + size_t n_rules, + size_t start_rule_index) { + const llama_grammar_element * pos; + + // copy rule definitions into vectors + std::vector> vec_rules(n_rules); + for (size_t i = 0; i < n_rules; i++) { + for (pos = rules[i]; pos->type != LLAMA_GRETYPE_END; pos++) { + vec_rules[i].push_back(*pos); + } + vec_rules[i].push_back({LLAMA_GRETYPE_END, 0}); + } + + // loop over alternates of start rule to build initial stacks + std::vector> stacks; + pos = rules[start_rule_index]; + do { + std::vector stack; + if (!llama_grammar_is_end_of_sequence(pos)) { + // if alternate is nonempty, add to stack + stack.push_back(pos); + } + llama_grammar_advance_stack(vec_rules, stack, stacks); + while (!llama_grammar_is_end_of_sequence(pos)) { + // scan to end of alternate def + pos++; + } + if (pos->type == LLAMA_GRETYPE_ALT) { + // there's another alternate def of this rule to process + pos++; + } else { + break; + } + } while (true); + + return new llama_grammar{ std::move(vec_rules), std::move(stacks) }; +} + +void llama_grammar_free(struct llama_grammar * grammar) { + delete grammar; +} + +// +// sampling +// + +void llama_sample_softmax(struct llama_context * ctx, llama_token_data_array * candidates) { + assert(candidates->size > 0); + + const int64_t t_start_sample_us = ggml_time_us(); + + // Sort the logits in descending order + if (!candidates->sorted) { + std::sort(candidates->data, candidates->data + candidates->size, [](const llama_token_data & a, const llama_token_data & b) { + return a.logit > b.logit; + }); + candidates->sorted = true; + } + + float max_l = candidates->data[0].logit; + float cum_sum = 0.0f; + for (size_t i = 0; i < candidates->size; ++i) { + float p = expf(candidates->data[i].logit - max_l); + candidates->data[i].p = p; + cum_sum += p; + } + for (size_t i = 0; i < candidates->size; ++i) { + candidates->data[i].p /= cum_sum; + } + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * candidates, int k, size_t min_keep) { + const int64_t t_start_sample_us = ggml_time_us(); + + k = std::max(k, (int) min_keep); + k = std::min(k, (int) candidates->size); + + // Sort scores in descending order + if (!candidates->sorted) { + auto comp = [](const llama_token_data & a, const llama_token_data & b) { + return a.logit > b.logit; + }; + if (k == (int) candidates->size) { + std::sort(candidates->data, candidates->data + candidates->size, comp); + } else { + std::partial_sort(candidates->data, candidates->data + k, candidates->data + candidates->size, comp); + } + candidates->sorted = true; + } + candidates->size = k; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_top_p(struct llama_context * ctx, llama_token_data_array * candidates, float p, size_t min_keep) { + if (p >= 1.0f) { + return; + } + + llama_sample_softmax(ctx, candidates); + + const int64_t t_start_sample_us = ggml_time_us(); + + // Compute the cumulative probabilities + float cum_sum = 0.0f; + size_t last_idx = candidates->size; + + for (size_t i = 0; i < candidates->size; ++i) { + cum_sum += candidates->data[i].p; + + // Check if the running sum is at least p or if we have kept at least min_keep tokens + // we set the last index to i+1 to indicate that the current iterate should be included in the set + if (cum_sum >= p && i + 1 >= min_keep) { + last_idx = i + 1; + break; + } + } + + // Resize the output vector to keep only the top-p tokens + candidates->size = last_idx; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_tail_free(struct llama_context * ctx, llama_token_data_array * candidates, float z, size_t min_keep) { + if (z >= 1.0f || candidates->size <= 2) { + return; + } + + llama_sample_softmax(nullptr, candidates); + const int64_t t_start_sample_us = ggml_time_us(); + + // Compute the first and second derivatives + std::vector first_derivatives(candidates->size - 1); + std::vector second_derivatives(candidates->size - 2); + + for (size_t i = 0; i < first_derivatives.size(); ++i) { + first_derivatives[i] = candidates->data[i].p - candidates->data[i + 1].p; + } + for (size_t i = 0; i < second_derivatives.size(); ++i) { + second_derivatives[i] = first_derivatives[i] - first_derivatives[i + 1]; + } + + // Calculate absolute value of second derivatives + for (size_t i = 0; i < second_derivatives.size(); ++i) { + second_derivatives[i] = abs(second_derivatives[i]); + } + + // Normalize the second derivatives + { + const float second_derivatives_sum = std::accumulate(second_derivatives.begin(), second_derivatives.end(), 0.0f); + + if (second_derivatives_sum > 1e-6f) { + for (float & value : second_derivatives) { + value /= second_derivatives_sum; + } + } else { + for (float & value : second_derivatives) { + value = 1.0f / second_derivatives.size(); + } + } + } + + float cum_sum = 0.0f; + size_t last_idx = candidates->size; + for (size_t i = 0; i < second_derivatives.size(); ++i) { + cum_sum += second_derivatives[i]; + + // Check if the running sum is greater than z or if we have kept at least min_keep tokens + if (cum_sum > z && i >= min_keep) { + last_idx = i; + break; + } + } + + // Resize the output vector to keep only the tokens above the tail location + candidates->size = last_idx; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + + +void llama_sample_typical(struct llama_context * ctx, llama_token_data_array * candidates, float p, size_t min_keep) { + // Reference implementation: + // https://github.com/huggingface/transformers/compare/main...cimeister:typical-sampling:typical-pr + if (p >= 1.0f) { + return; + } + + // Compute the softmax of logits and calculate entropy + llama_sample_softmax(nullptr, candidates); + + const int64_t t_start_sample_us = ggml_time_us(); + + float entropy = 0.0f; + for (size_t i = 0; i < candidates->size; ++i) { + entropy += -candidates->data[i].p * logf(candidates->data[i].p); + } + + // Compute the absolute difference between negative log probability and entropy for each candidate + std::vector shifted_scores; + for (size_t i = 0; i < candidates->size; ++i) { + float shifted_score = fabsf(-logf(candidates->data[i].p) - entropy); + shifted_scores.push_back(shifted_score); + } + + // Sort tokens based on the shifted_scores and their corresponding indices + std::vector indices(candidates->size); + std::iota(indices.begin(), indices.end(), 0); + + std::sort(indices.begin(), indices.end(), [&](size_t a, size_t b) { + return shifted_scores[a] < shifted_scores[b]; + }); + + // Compute the cumulative probabilities + float cum_sum = 0.0f; + size_t last_idx = indices.size(); + + for (size_t i = 0; i < indices.size(); ++i) { + size_t idx = indices[i]; + cum_sum += candidates->data[idx].p; + + // Check if the running sum is greater than typical or if we have kept at least min_keep tokens + if (cum_sum > p && i >= min_keep - 1) { + last_idx = i + 1; + break; + } + } + + // Resize the output vector to keep only the locally typical tokens + std::vector new_candidates; + for (size_t i = 0; i < last_idx; ++i) { + size_t idx = indices[i]; + new_candidates.push_back(candidates->data[idx]); + } + + // Replace the data in candidates with the new_candidates data + std::copy(new_candidates.begin(), new_candidates.end(), candidates->data); + candidates->size = new_candidates.size(); + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_temperature(struct llama_context * ctx, llama_token_data_array * candidates_p, float temp) { + const int64_t t_start_sample_us = ggml_time_us(); + + for (size_t i = 0; i < candidates_p->size; ++i) { + candidates_p->data[i].logit /= temp; + } + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_repetition_penalty(struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens, size_t last_tokens_size, float penalty) { + if (last_tokens_size == 0 || penalty == 1.0f) { + return; + } + + const int64_t t_start_sample_us = ggml_time_us(); + + for (size_t i = 0; i < candidates->size; ++i) { + const auto * token_iter = std::find(last_tokens, last_tokens + last_tokens_size, candidates->data[i].id); + if (token_iter == last_tokens + last_tokens_size) { + continue; + } + + // The academic publication that described this technique actually just only divided, but that would cause tokens with negative logits to become more likely, which is obviously wrong. + // This is common fix for this problem, which is to multiply by the penalty instead of dividing. + if (candidates->data[i].logit <= 0) { + candidates->data[i].logit *= penalty; + } else { + candidates->data[i].logit /= penalty; + } + } + + candidates->sorted = false; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_frequency_and_presence_penalties(struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens_p, size_t last_tokens_size, float alpha_frequency, float alpha_presence) { + if (last_tokens_size == 0 || (alpha_frequency == 0.0f && alpha_presence == 0.0f)) { + return; + } + + const int64_t t_start_sample_us = ggml_time_us(); + + // Create a frequency map to count occurrences of each token in last_tokens + std::unordered_map token_count; + for (size_t i = 0; i < last_tokens_size; ++i) { + token_count[last_tokens_p[i]]++; + } + + // Apply frequency and presence penalties to the candidates + for (size_t i = 0; i < candidates->size; ++i) { + auto token_iter = token_count.find(candidates->data[i].id); + if (token_iter == token_count.end()) { + continue; + } + + int count = token_iter->second; + candidates->data[i].logit -= float(count) * alpha_frequency + float(count > 0) * alpha_presence; + } + + candidates->sorted = false; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * candidates, const struct llama_grammar * grammar) { + assert(ctx); + const int64_t t_start_sample_us = ggml_time_us(); + + bool allow_eos = false; + for (const auto & stack : grammar->stacks) { + if (stack.empty()) { + allow_eos = true; + break; + } + } + + const llama_token eos = llama_token_eos(); + + std::vector> candidates_decoded; + std::vector candidates_grammar; + + for (size_t i = 0; i < candidates->size; ++i) { + const llama_token id = candidates->data[i].id; + const char * str = llama_token_to_str(ctx, id); + if (id == eos) { + if (!allow_eos) { + candidates->data[i].logit = -INFINITY; + } + } else if (*str == 0) { + candidates->data[i].logit = -INFINITY; + } else { + candidates_decoded.push_back(decode_utf8(str)); + candidates_grammar.push_back({ i, candidates_decoded.back().data() }); + } + } + + const auto rejects = + llama_grammar_reject_candidates(grammar->rules, grammar->stacks, candidates_grammar); + for (auto & reject : rejects) { + candidates->data[reject.index].logit = -INFINITY; + } + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; +} + +static void llama_log_softmax(float * array, size_t size) { + float max_l = *std::max_element(array, array + size); + float sum = 0.f; + for (size_t i = 0; i < size; ++i) { + float p = expf(array[i] - max_l); + sum += p; + array[i] = p; + } + + for (size_t i = 0; i < size; ++i) { + array[i] = logf(array[i] / sum); + } +} + +void llama_sample_classifier_free_guidance( + struct llama_context * ctx, + llama_token_data_array * candidates, + struct llama_context * guidance_ctx, + float scale) { + int64_t t_start_sample_us = ggml_time_us(); + + assert(ctx); + auto n_vocab = llama_n_vocab(ctx); + assert(n_vocab == (int)candidates->size); + assert(!candidates->sorted); + + std::vector logits_base; + logits_base.reserve(candidates->size); + for (size_t i = 0; i < candidates->size; ++i) { + logits_base.push_back(candidates->data[i].logit); + } + llama_log_softmax(logits_base.data(), candidates->size); + + float* logits_guidance = llama_get_logits(guidance_ctx); + llama_log_softmax(logits_guidance, n_vocab); + + for (int i = 0; i < n_vocab; ++i) { + float logit_guidance = logits_guidance[i]; + float logit_base = logits_base[i]; + candidates->data[i].logit = scale * (logit_base - logit_guidance) + logit_guidance; + } + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + +llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int m, float * mu) { + assert(ctx); + auto N = float(llama_n_vocab(ctx)); + int64_t t_start_sample_us; + t_start_sample_us = ggml_time_us(); + + llama_sample_softmax(nullptr, candidates); + + // Estimate s_hat using the most probable m tokens + float s_hat = 0.0; + float sum_ti_bi = 0.0; + float sum_ti_sq = 0.0; + for (size_t i = 0; i < size_t(m - 1) && i < candidates->size - 1; ++i) { + float t_i = logf(float(i + 2) / float(i + 1)); + float b_i = logf(candidates->data[i].p / candidates->data[i + 1].p); + sum_ti_bi += t_i * b_i; + sum_ti_sq += t_i * t_i; + } + s_hat = sum_ti_bi / sum_ti_sq; + + // Compute k from the estimated s_hat and target surprise value + float epsilon_hat = s_hat - 1; + float k = powf((epsilon_hat * powf(2, *mu)) / (1 - powf(N, -epsilon_hat)), 1 / s_hat); + + // Sample the next word X using top-k sampling + llama_sample_top_k(nullptr, candidates, int(k), 1); + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } + llama_token X = llama_sample_token(ctx, candidates); + t_start_sample_us = ggml_time_us(); + + // Compute error as the difference between observed surprise and target surprise value + size_t X_idx = std::distance(candidates->data, std::find_if(candidates->data, candidates->data + candidates->size, [&](const llama_token_data & candidate) { + return candidate.id == X; + })); + float observed_surprise = -log2f(candidates->data[X_idx].p); + float e = observed_surprise - tau; + + // Update mu using the learning rate and error + *mu = *mu - eta * e; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } + return X; +} + +llama_token llama_sample_token_mirostat_v2(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, float * mu) { + int64_t t_start_sample_us; + t_start_sample_us = ggml_time_us(); + + llama_sample_softmax(ctx, candidates); + + // Truncate the words with surprise values greater than mu + candidates->size = std::distance(candidates->data, std::find_if(candidates->data, candidates->data + candidates->size, [&](const llama_token_data & candidate) { + return -log2f(candidate.p) > *mu; + })); + + if (candidates->size == 0) { + candidates->size = 1; + } + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } + + // Normalize the probabilities of the remaining words + llama_sample_softmax(ctx, candidates); + + // Sample the next word X from the remaining words + llama_token X = llama_sample_token(ctx, candidates); + t_start_sample_us = ggml_time_us(); + + // Compute error as the difference between observed surprise and target surprise value + size_t X_idx = std::distance(candidates->data, std::find_if(candidates->data, candidates->data + candidates->size, [&](const llama_token_data & candidate) { + return candidate.id == X; + })); + float observed_surprise = -log2f(candidates->data[X_idx].p); + float e = observed_surprise - tau; + + // Update mu using the learning rate and error + *mu = *mu - eta * e; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } + return X; +} + +llama_token llama_sample_token_greedy(struct llama_context * ctx, llama_token_data_array * candidates) { + const int64_t t_start_sample_us = ggml_time_us(); + + // Find max element + auto * max_iter = std::max_element(candidates->data, candidates->data + candidates->size, [](const llama_token_data & a, const llama_token_data & b) { + return a.logit < b.logit; + }); + + llama_token result = max_iter->id; + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + ctx->n_sample++; + } + return result; +} + +llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_array * candidates) { + assert(ctx); + const int64_t t_start_sample_us = ggml_time_us(); + llama_sample_softmax(nullptr, candidates); + + std::vector probs; + probs.reserve(candidates->size); + for (size_t i = 0; i < candidates->size; ++i) { + probs.push_back(candidates->data[i].p); + } + + std::discrete_distribution<> dist(probs.begin(), probs.end()); + auto & rng = ctx->rng; + int idx = dist(rng); + + llama_token result = candidates->data[idx].id; + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + ctx->n_sample++; + return result; +} + +void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar * grammar, llama_token token) { + const int64_t t_start_sample_us = ggml_time_us(); + + if (token == llama_token_eos()) { + for (const auto & stack : grammar->stacks) { + if (stack.empty()) { + return; + } + } + GGML_ASSERT(false); + } + + const char * str = llama_token_to_str(ctx, token); + // Note terminating 0 in decoded string + auto code_points = decode_utf8(str); + for (auto it = code_points.begin(), end = code_points.end() - 1; it != end; ++it) { + grammar->stacks = llama_grammar_accept(grammar->rules, grammar->stacks, *it); + } + GGML_ASSERT(!grammar->stacks.empty()); + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; +} + +// +// quantization +// + +static void llama_convert_tensor_internal(const llama_load_tensor & tensor, gguf_buffer & output, const int nelements, const int nthread) { + if (output.size < nelements * sizeof(float)) { + output.resize(nelements * sizeof(float)); + } + float * f32_output = (float *) output.addr; + + ggml_type_traits_t qtype; + if (ggml_is_quantized(tensor.type)) { + qtype = ggml_internal_get_type_traits(tensor.type); + if (qtype.to_float == NULL) { + throw std::runtime_error(format("type %s unsupported for integer quantization: no dequantization available", ggml_type_name(tensor.type))); + } + } else if (tensor.type != GGML_TYPE_F16) { + throw std::runtime_error(format("cannot dequantize/convert tensor type %s", ggml_type_name(tensor.type))); + } + + if (nthread < 2) { + if (tensor.type == GGML_TYPE_F16) { + ggml_fp16_to_fp32_row((ggml_fp16_t *)tensor.data, f32_output, nelements); + } else if (ggml_is_quantized(tensor.type)) { + qtype.to_float(tensor.data, f32_output, nelements); + } else { + GGML_ASSERT(false); // unreachable + } + return; + } + + auto block_size = tensor.type == GGML_TYPE_F16 ? 1 : (size_t)ggml_blck_size(tensor.type); + auto block_size_bytes = ggml_type_size(tensor.type); + + GGML_ASSERT(nelements % block_size == 0); + auto nblocks = nelements / block_size; + auto blocks_per_thread = nblocks / nthread; + auto spare_blocks = nblocks - (blocks_per_thread * nthread); // if blocks aren't divisible by thread count + + std::vector workers; + for (auto tnum = 0, in_buff_offs = 0, out_buff_offs = 0; tnum < nthread; tnum++) { + auto thr_blocks = blocks_per_thread + (tnum == nthread - 1 ? spare_blocks : 0); // num blocks for this thread + auto thr_elems = thr_blocks * block_size; // number of elements for this thread + auto thr_block_bytes = thr_blocks * block_size_bytes; // number of input bytes for this thread + + auto compute = [qtype] (ggml_type typ, uint8_t * inbuf, float * outbuf, int nels) { + if (typ == GGML_TYPE_F16) { + ggml_fp16_to_fp32_row((ggml_fp16_t *)inbuf, outbuf, nels); + } else { + qtype.to_float(inbuf, outbuf, nels); + } + }; + workers.push_back(std::thread(compute, tensor.type, tensor.data + in_buff_offs, f32_output + out_buff_offs, thr_elems)); + in_buff_offs += thr_block_bytes; + out_buff_offs += thr_elems; + } + for (auto & worker : workers) { + worker.join(); + } + +} + +static void llama_model_quantize_internal(const std::string & fname_inp, const std::string & fname_out, const llama_model_quantize_params * params) { + ggml_type quantized_type; + llama_ftype ftype = params->ftype; + int nthread = params->nthread; + + switch (params->ftype) { + case LLAMA_FTYPE_MOSTLY_Q4_0: quantized_type = GGML_TYPE_Q4_0; break; + case LLAMA_FTYPE_MOSTLY_Q4_1: quantized_type = GGML_TYPE_Q4_1; break; + case LLAMA_FTYPE_MOSTLY_Q5_0: quantized_type = GGML_TYPE_Q5_0; break; + case LLAMA_FTYPE_MOSTLY_Q5_1: quantized_type = GGML_TYPE_Q5_1; break; + case LLAMA_FTYPE_MOSTLY_Q8_0: quantized_type = GGML_TYPE_Q8_0; break; + case LLAMA_FTYPE_MOSTLY_F16: quantized_type = GGML_TYPE_F16; break; + case LLAMA_FTYPE_ALL_F32: quantized_type = GGML_TYPE_F32; break; + +#ifdef GGML_USE_K_QUANTS + // K-quants + case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; + case LLAMA_FTYPE_MOSTLY_Q3_K_S: + case LLAMA_FTYPE_MOSTLY_Q3_K_M: + case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; + case LLAMA_FTYPE_MOSTLY_Q4_K_S: + case LLAMA_FTYPE_MOSTLY_Q4_K_M: quantized_type = GGML_TYPE_Q4_K; break; + case LLAMA_FTYPE_MOSTLY_Q5_K_S: + case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; + case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; +#endif + default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); + } + + if (nthread <= 0) { + nthread = std::thread::hardware_concurrency(); + } + + std::unique_ptr model_loader(new llama_model_loader(fname_inp, /*use_mmap*/ false)); + gguf_file_saver file_saver(fname_out.c_str(), model_loader->file_loader.get(), params->ftype); + +#ifdef GGML_USE_K_QUANTS + int n_attention_wv = 0; + int n_feed_forward_w2 = 0; + for (auto& tensor : model_loader->tensors_map.tensors) { + if (tensor.name.find("attention.wv.weight") != std::string::npos) { + ++n_attention_wv; + } + else if (tensor.name.find("feed_forward.w2.weight") != std::string::npos) { + ++n_feed_forward_w2; + } + } + + int i_attention_wv = 0; + int i_feed_forward_w2 = 0; +#endif + + size_t total_size_org = 0; + size_t total_size_new = 0; + std::vector hist_all(1 << 4, 0); + + std::vector workers; + std::mutex mutex; + + auto use_more_bits = [] (int i_layer, int num_layers) -> bool { + return i_layer < num_layers/8 || i_layer >= 7*num_layers/8 || (i_layer - num_layers/8)%3 == 2; + }; + + size_t idx = 0; + for (llama_load_tensor & tensor : model_loader->tensors_map.tensors) { + gguf_buffer read_data; + read_data.resize(tensor.size); + tensor.data = read_data.addr; + model_loader->load_data_for(tensor); + + printf("[%4zu/%4zu] %36s - %16s, type = %6s, ", + ++idx, model_loader->tensors_map.tensors.size(), + tensor.name.c_str(), llama_format_tensor_shape(tensor.ne).c_str(), + ggml_type_name(tensor.type)); + + // This used to be a regex, but has an extreme cost to compile times. + bool quantize = tensor.name.rfind("weight") == tensor.name.size() - 6; // ends with 'weight'? + + // quantize only 2D tensors + quantize &= (tensor.ne.size() == 2); + quantize &= params->quantize_output_tensor || tensor.name != "output.weight"; + quantize &= quantized_type != tensor.type; + + enum ggml_type new_type; + void * new_data; + size_t new_size; + gguf_buffer work; + + if (!quantize) { + new_type = tensor.type; + new_data = tensor.data; + new_size = tensor.size; + printf("size = %8.3f MB\n", tensor.size/1024.0/1024.0); + } else { + new_type = quantized_type; +#ifdef GGML_USE_K_QUANTS + if (tensor.name == "output.weight") { + int nx = tensor.ne.at(0); + int ny = tensor.ne.at(1); + if (nx % QK_K == 0 && ny % QK_K == 0) { + new_type = GGML_TYPE_Q6_K; + } + } else if (tensor.name.find("attention.wv.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + else if ((ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) && + use_more_bits(i_attention_wv, n_attention_wv)) new_type = GGML_TYPE_Q6_K; + else if (QK_K == 64 && (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S) && + (i_attention_wv < n_attention_wv/8 || i_attention_wv >= 7*n_attention_wv/8)) new_type = GGML_TYPE_Q6_K; + ++i_attention_wv; + } else if (tensor.name.find("feed_forward.w2.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + else if ((ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) && + use_more_bits(i_feed_forward_w2, n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; + //else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && i_feed_forward_w2 < n_feed_forward_w2/8) new_type = GGML_TYPE_Q6_K; + ++i_feed_forward_w2; + } else if (tensor.name.find("attention.wo.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + } + bool convert_incompatible_tensor = false; + if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || + new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K) { + int nx = tensor.ne.at(0); + int ny = tensor.ne.at(1); + if (nx % QK_K != 0 || ny % QK_K != 0) { + fprintf(stderr, "\n\nTensor sizes %d x %d are not divisible by %d, required for k-quants.\n",nx,ny,QK_K); + convert_incompatible_tensor = true; + } + } + if (convert_incompatible_tensor) { + if (tensor.name == "output.weight") { + new_type = GGML_TYPE_F16; //fall back to F16 instead of just failing. + fprintf(stderr, "F16 will be used for this tensor instead.\n"); + } else if (tensor.name == "tok_embeddings.weight") { + new_type = GGML_TYPE_Q4_0; //fall back to Q4_0 instead of just failing. + fprintf(stderr, "Q4_0 will be used for this tensor instead.\n"); + } else { + throw std::runtime_error("Unsupported tensor size encountered\n"); + } + } +#endif + + float * f32_data; + size_t nelements = tensor.ne.at(0) * tensor.ne.at(1); + gguf_buffer f32_conv_buf; + + if (tensor.type == GGML_TYPE_F32) { + f32_data = (float *) tensor.data; + } else if (ggml_is_quantized(tensor.type) && !params->allow_requantize) { + throw std::runtime_error(format("requantizing from type %s is disabled", ggml_type_name(tensor.type))); + } else { + llama_convert_tensor_internal(tensor, f32_conv_buf, nelements, nthread); + f32_data = (float *) f32_conv_buf.addr; + } + + printf("quantizing to %s .. ", ggml_type_name(new_type)); + fflush(stdout); + + work.resize(nelements * 4); // upper bound on size + new_data = work.addr; + std::vector hist_cur(1 << 4, 0); + + int chunk_size = 32 * 512; + const int nchunk = (nelements + chunk_size - 1)/chunk_size; + const int nthread_use = nthread > 1 ? std::max(1, std::min(nthread, nchunk)) : 1; + if (nthread_use < 2) { + new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nelements, hist_cur.data()); + } else { + size_t counter = 0; + new_size = 0; + auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, nelements, chunk_size] () { + std::vector local_hist; + size_t local_size = 0; + while (true) { + std::unique_lock lock(mutex); + size_t first = counter; counter += chunk_size; + if (first >= nelements) { + if (!local_hist.empty()) { + for (int j=0; j %8.2f MB | hist: ", tensor.size/1024.0/1024.0, new_size/1024.0/1024.0); + int64_t tot_count = 0; + for (size_t i = 0; i < hist_cur.size(); i++) { + hist_all[i] += hist_cur[i]; + tot_count += hist_cur[i]; + } + + if (tot_count > 0) { + for (size_t i = 0; i < hist_cur.size(); i++) { + printf("%5.3f ", hist_cur[i] / float(nelements)); + } + } + printf("\n"); + } + total_size_org += tensor.size; + total_size_new += new_size; + file_saver.write_tensor(tensor, new_type, new_data, new_size); + } + + printf("%s: model size = %8.2f MB\n", __func__, total_size_org/1024.0/1024.0); + printf("%s: quant size = %8.2f MB\n", __func__, total_size_new/1024.0/1024.0); + + { + int64_t sum_all = 0; + for (size_t i = 0; i < hist_all.size(); i++) { + sum_all += hist_all[i]; + } + + if (sum_all > 0) { + printf("%s: hist: ", __func__); + for (size_t i = 0; i < hist_all.size(); i++) { + printf("%5.3f ", hist_all[i] / float(sum_all)); + } + printf("\n"); + } + } +} + + + +// +// interface implementation +// + +struct llama_model * llama_load_model_from_file( + const char * path_model, + struct llama_context_params params) { + ggml_time_init(); + + llama_model * model = new llama_model; + + ggml_type memory_type = params.f16_kv ? GGML_TYPE_F16 : GGML_TYPE_F32; + + if (!llama_model_load(path_model, *model, model->vocab, params.n_ctx, params.n_batch, params.n_gqa, params.rms_norm_eps, params.n_gpu_layers, + params.main_gpu, params.tensor_split, params.rope_freq_base, params.rope_freq_scale,params.low_vram, + memory_type, params.use_mmap, params.use_mlock, params.vocab_only, params.progress_callback, + params.progress_callback_user_data)) { + delete model; + fprintf(stderr, "%s: failed to load model\n", __func__); + return nullptr; + } + + return model; +} + +void llama_free_model(struct llama_model * model) { + delete model; +} + +struct llama_context * llama_new_context_with_model( + struct llama_model * model, + struct llama_context_params params) { + + if (!model) { + return nullptr; + } + + llama_context * ctx = new llama_context(*model); + + if (params.seed == LLAMA_DEFAULT_SEED) { + params.seed = time(NULL); + } + + unsigned cur_percentage = 0; + if (params.progress_callback == NULL) { + params.progress_callback_user_data = &cur_percentage; + params.progress_callback = [](float progress, void * ctx) { + unsigned * cur_percentage_p = (unsigned *) ctx; + unsigned percentage = (unsigned) (100 * progress); + while (percentage > *cur_percentage_p) { + *cur_percentage_p = percentage; + fprintf(stderr, "."); + fflush(stderr); + if (percentage >= 100) { + fprintf(stderr, "\n"); + } + } + }; + } + + ctx->rng = std::mt19937(params.seed); + ctx->logits_all = params.logits_all; + + ggml_type memory_type = params.f16_kv ? GGML_TYPE_F16 : GGML_TYPE_F32; + + // reserve memory for context buffers + if (!params.vocab_only) { + if (!kv_cache_init(ctx->model.hparams, ctx->kv_self, memory_type, ctx->model.hparams.n_ctx, params.n_gpu_layers)) { + fprintf(stderr, "%s: kv_cache_init() failed for self-attention cache\n", __func__); + llama_free(ctx); + return nullptr; + } + + { + const size_t memory_size = ggml_nbytes(ctx->kv_self.k) + ggml_nbytes(ctx->kv_self.v); + fprintf(stderr, "%s: kv self size = %7.2f MB\n", __func__, memory_size / 1024.0 / 1024.0); + } + + const auto & hparams = ctx->model.hparams; + + // resized during inference + if (params.logits_all) { + ctx->logits.reserve(hparams.n_ctx*hparams.n_vocab); + } else { + ctx->logits.reserve(hparams.n_vocab); + } + + if (params.embedding){ + ctx->embedding.resize(hparams.n_embd); + } + + ctx->buf_compute.resize(MEM_REQ_EVAL().at(ctx->model.type) + ggml_graph_overhead()); + + ctx->buf_scratch[0].resize(MEM_REQ_SCRATCH0(hparams.n_ctx).at(ctx->model.type)); + ctx->buf_scratch[1].resize(MEM_REQ_SCRATCH1().at(ctx->model.type)); + } + +#ifdef GGML_USE_METAL + if (params.n_gpu_layers > 0) { + // this allocates all Metal resources and memory buffers + ctx->ctx_metal = ggml_metal_init(1); + + void * data_ptr = NULL; + size_t data_size = 0; + + if (params.use_mmap) { + data_ptr = ctx->model.mapping->addr; + data_size = ctx->model.mapping->size; + } else { + data_ptr = ggml_get_mem_buffer(ctx->model.ctx); + data_size = ggml_get_mem_size (ctx->model.ctx); + } + + const size_t max_size = ggml_get_max_tensor_size(ctx->model.ctx); + + fprintf(stderr, "%s: max tensor size = %8.2f MB\n", __func__, max_size/1024.0/1024.0); + +#define LLAMA_METAL_CHECK_BUF(result) \ + if (!(result)) { \ + fprintf(stderr, "%s: failed to add buffer\n", __func__); \ + llama_free(ctx); \ + return NULL; \ + } + + LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "data", data_ptr, data_size, max_size)); + + LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "eval", ctx->buf_compute.addr, ctx->buf_compute.size, 0)); + LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "kv", ctx->kv_self.buf.addr, ctx->kv_self.buf.size, 0)); + + LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "scr0", ctx->buf_scratch[0].addr, ctx->buf_scratch[0].size, 0)); + LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "scr1", ctx->buf_scratch[1].addr, ctx->buf_scratch[1].size, 0)); +#undef LLAMA_METAL_CHECK_BUF + } +#endif + +#ifdef GGML_USE_MPI + ctx->ctx_mpi = ggml_mpi_init(); + + if (ggml_mpi_rank(ctx->ctx_mpi) > 0) { + // Enter a blocking eval loop with dummy input, letting rank=0 drive the process + const std::vector tmp(ctx->model.hparams.n_ctx, llama_token_bos()); + while (!llama_eval(ctx, tmp.data(), tmp.size(), 0, 0)) {}; + llama_backend_free(); + exit(1); + } +#endif + + return ctx; +} + +struct llama_context * llama_init_from_file( + const char * path_model, + struct llama_context_params params) { + + struct llama_model * model = llama_load_model_from_file(path_model, params); + if (!model) { + return nullptr; + } + struct llama_context * ctx = llama_new_context_with_model(model, params); + ctx->model_owner = true; + return ctx; +} + +void llama_free(struct llama_context * ctx) { + if (ctx->model_owner) { + delete &ctx->model; + } + delete ctx; +} + +int llama_model_quantize( + const char * fname_inp, + const char * fname_out, + const llama_model_quantize_params *params) { + try { + llama_model_quantize_internal(fname_inp, fname_out, params); + return 0; + } catch (const std::exception & err) { + fprintf(stderr, "%s: failed to quantize: %s\n", __func__, err.what()); + return 1; + } +} + +int llama_apply_lora_from_file_internal(const struct llama_model & model, const char * path_lora, const char * path_base_model, int n_threads) { + fprintf(stderr, "%s: applying lora adapter from '%s' - please wait ...\n", __func__, path_lora); + + const int64_t t_start_lora_us = ggml_time_us(); + + auto fin = std::ifstream(path_lora, std::ios::binary); + if (!fin) { + fprintf(stderr, "%s: failed to open '%s'\n", __func__, path_lora); + return 1; + } + + // verify magic and version + { + uint32_t magic; + fin.read((char *) &magic, sizeof(magic)); + if (magic != LLAMA_FILE_MAGIC_GGLA) { + fprintf(stderr, "%s: bad file magic\n", __func__); + return 1; + } + uint32_t format_version; + fin.read((char *) &format_version, sizeof(format_version)); + + if (format_version != 1) { + fprintf(stderr, "%s: unsupported file version\n", __func__ ); + return 1; + } + } + + int32_t lora_r; + int32_t lora_alpha; + fin.read((char *) &lora_r, sizeof(lora_r)); + fin.read((char *) &lora_alpha, sizeof(lora_alpha)); + float scaling = (float)lora_alpha / (float)lora_r; + + fprintf(stderr, "%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); + + + // create a temporary ggml context to store the lora tensors + // todo: calculate size from biggest possible tensor + std::vector lora_buf(1024ull * 1024ull * 1024ull); + struct ggml_init_params params; + params.mem_size = lora_buf.size(); + params.mem_buffer = lora_buf.data(); + params.no_alloc = false; + + ggml_context * lora_ctx = ggml_init(params); + std::unordered_map lora_tensors; + + // create a name -> tensor map of the model to accelerate lookups + std::unordered_map model_tensors; + for (const auto & kv: model.tensors_by_name) { + model_tensors.insert(kv); + } + + + // load base model + std::unique_ptr model_loader; + ggml_context * base_ctx = NULL; + gguf_buffer base_buf; + if (path_base_model) { + fprintf(stderr, "%s: loading base model from '%s'\n", __func__, path_base_model); + model_loader.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true)); + + size_t ctx_size; + size_t mmapped_size; + model_loader->calc_sizes(&ctx_size, &mmapped_size); + base_buf.resize(ctx_size); + + ggml_init_params base_params; + base_params.mem_size = base_buf.size; + base_params.mem_buffer = base_buf.addr; + base_params.no_alloc = model_loader->use_mmap; + + base_ctx = ggml_init(base_params); + + model_loader->ggml_ctx = base_ctx; + + // maybe this should in llama_model_loader + if (model_loader->use_mmap) { + model_loader->mapping.reset(new gguf_mmap(&model_loader->file_loader->file, /* prefetch */ 0, ggml_is_numa())); + } + } + + // read tensors and apply + bool warned = false; + int n_tensors = 0; + + std::vector work_buffer; + + while (true) { + int32_t n_dims; + int32_t length; + int32_t ftype; + + fin.read(reinterpret_cast(&n_dims), sizeof(n_dims)); + fin.read(reinterpret_cast(&length), sizeof(length)); + fin.read(reinterpret_cast(&ftype), sizeof(ftype)); + if (fin.eof()) { + break; + } + + int32_t ne[2] = { 1, 1 }; + for (int i = 0; i < n_dims; ++i) { + fin.read(reinterpret_cast(&ne[i]), sizeof(ne[i])); + } + + std::string name; + { + char buf[1024]; + fin.read(buf, length); + name = std::string(buf, length); + } + + // check for lora suffix and get the type of tensor + const std::string lora_suffix = ".lora"; + size_t pos = name.rfind(lora_suffix); + if (pos == std::string::npos) { + fprintf(stderr, "%s: error: '%s' is not a lora tensor\n", __func__, name.c_str()); + return 1; + } + + std::string lora_type = name.substr(pos + lora_suffix.length()); + std::string base_name = name; + base_name.erase(pos); + // fprintf(stderr, "%s: %s => %s (lora type %s) ", __func__, name.c_str(),base_name.c_str(), lora_type.c_str()); + + if (model_tensors.find(base_name) == model_tensors.end()) { + fprintf(stderr, "%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); + return 1; + } + + // create ggml tensor + ggml_type wtype; + switch (ftype) { + case 0: wtype = GGML_TYPE_F32; break; + case 1: wtype = GGML_TYPE_F16; break; + default: + { + fprintf(stderr, "%s: invalid tensor data type '%d'\n", + __func__, ftype); + return false; + } + } + ggml_tensor * lora_tensor; + if (n_dims == 2) { + lora_tensor = ggml_new_tensor_2d(lora_ctx, wtype, ne[0], ne[1]); + } + else { + fprintf(stderr, "%s: unsupported tensor dimension %d\n", __func__, n_dims); + return 1; + } + ggml_set_name(lora_tensor, "lora_tensor"); + + // load tensor data + size_t offset = fin.tellg(); + size_t tensor_data_size = ggml_nbytes(lora_tensor); + offset = (offset + 31) & -32; + fin.seekg(offset); + fin.read((char*)lora_tensor->data, tensor_data_size); + + lora_tensors[name] = lora_tensor; + + // check if we have both A and B tensors and apply + if (lora_tensors.find(base_name + ".loraA") != lora_tensors.end() && + lora_tensors.find(base_name + ".loraB") != lora_tensors.end()) { + + ggml_tensor * dest_t = model_tensors[base_name]; + + offload_func_t offload_func = llama_nop; + offload_func_t offload_func_force_inplace = llama_nop; + +#ifdef GGML_USE_CUBLAS + if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { + if (dest_t->type != GGML_TYPE_F16) { + throw std::runtime_error(format( + "%s: error: the simultaneous use of LoRAs and GPU acceleration is only supported for f16 models", __func__)); + } + offload_func = ggml_cuda_assign_buffers; + offload_func_force_inplace = ggml_cuda_assign_buffers_force_inplace; + } +#endif // GGML_USE_CUBLAS + + ggml_tensor * base_t; + if (model_loader) { + // load from base model + if (model_loader->tensors_map.name_to_idx.find(base_name) == model_loader->tensors_map.name_to_idx.end()) { + fprintf(stderr, "%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); + return 1; + } + size_t idx = model_loader->tensors_map.name_to_idx[base_name]; + llama_load_tensor & lt = model_loader->tensors_map.tensors[idx]; + base_t = model_loader->get_tensor(base_name, { (uint32_t)dest_t->ne[0], (uint32_t)dest_t->ne[1] }, GGML_BACKEND_CPU); + lt.data = (uint8_t *) lt.ggml_tensor->data; + model_loader->load_data_for(lt); + lt.ggml_tensor->data = lt.data; + } + else { + base_t = dest_t; + } + + if (ggml_is_quantized(base_t->type)) { + if (!warned) { + fprintf(stderr, "%s: warning: using a lora adapter with a quantized model may result in poor quality, " + "use a f16 or f32 base model with --lora-base\n", __func__); + warned = true; + } + } + + ggml_tensor * loraA = lora_tensors[base_name + ".loraA"]; + GGML_ASSERT(loraA->type == GGML_TYPE_F32); + ggml_set_name(loraA, "loraA"); + + ggml_tensor * loraB = lora_tensors[base_name + ".loraB"]; + GGML_ASSERT(loraB->type == GGML_TYPE_F32); + ggml_set_name(loraB, "loraB"); + + if (base_t->ne[0] != loraA->ne[1] || base_t->ne[1] != loraB->ne[1]) { + fprintf(stderr, "%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" + " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); + return 1; + } + + // w = w + BA*s + ggml_tensor * BA = ggml_mul_mat(lora_ctx, loraA, loraB); + offload_func(BA); + ggml_set_name(BA, "BA"); + + if (scaling != 1.0f) { + ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx, scaling); + ggml_set_name(scale_tensor, "scale_tensor"); + + BA = ggml_scale_inplace(lora_ctx, BA, scale_tensor); + offload_func(BA); + ggml_set_name(BA, "BA_scaled"); + } + + ggml_tensor * r; + if (base_t == dest_t) { + r = ggml_add_inplace(lora_ctx, dest_t, BA); + offload_func_force_inplace(r); + ggml_set_name(r, "r_add_inplace"); + } + else { + r = ggml_add(lora_ctx, base_t, BA); + offload_func(r); + ggml_set_name(r, "r_add"); + + r = ggml_cpy(lora_ctx, r, dest_t); + offload_func(r); + ggml_set_name(r, "r_cpy"); + } + + struct ggml_cgraph gf = ggml_build_forward(r); + + ggml_graph_compute_helper(work_buffer, &gf, n_threads); + + // we won't need these tensors again, reset the context to save memory + ggml_free(lora_ctx); + lora_ctx = ggml_init(params); + lora_tensors.clear(); + + n_tensors++; + if (n_tensors % 4 == 0) { + fprintf(stderr, "."); + } + } + } + + // TODO: this should be in a destructor, it will leak on failure + ggml_free(lora_ctx); + if (base_ctx) { + ggml_free(base_ctx); + } + + const int64_t t_lora_us = ggml_time_us() - t_start_lora_us; + fprintf(stderr, " done (%.2f ms)\n", t_lora_us / 1000.0); + + return 0; +} + +int llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lora, const char * path_base_model, int n_threads) { + try { + return llama_apply_lora_from_file_internal(ctx->model, path_lora, path_base_model, n_threads); + } catch (const std::exception & err) { + fprintf(stderr, "%s: failed to apply lora adapter: %s\n", __func__, err.what()); + return 1; + } +} + +int llama_model_apply_lora_from_file(const struct llama_model * model, const char * path_lora, const char * path_base_model, int n_threads) { + try { + return llama_apply_lora_from_file_internal(*model, path_lora, path_base_model, n_threads); + } catch (const std::exception & err) { + fprintf(stderr, "%s: failed to apply lora adapter: %s\n", __func__, err.what()); + return 1; + } +} + +int llama_get_kv_cache_token_count(const struct llama_context * ctx) { + return ctx->kv_self.n; +} + +#define LLAMA_MAX_RNG_STATE (64*1024) + +void llama_set_rng_seed(struct llama_context * ctx, uint32_t seed) { + if (seed == LLAMA_DEFAULT_SEED) { + seed = time(NULL); + } + ctx->rng.seed(seed); +} + +// Returns the *maximum* size of the state +size_t llama_get_state_size(const struct llama_context * ctx) { + // we don't know size of rng until we actually serialize it. so reserve more than enough memory for its serialized state. + // for reference, std::mt19937(1337) serializes to 6701 bytes. + const size_t s_rng_size = sizeof(size_t); + const size_t s_rng = LLAMA_MAX_RNG_STATE; + const size_t s_logits_capacity = sizeof(size_t); + const size_t s_logits_size = sizeof(size_t); + const size_t s_logits = ctx->logits.capacity() * sizeof(float); + const size_t s_embedding_size = sizeof(size_t); + const size_t s_embedding = ctx->embedding.size() * sizeof(float); + const size_t s_kv_size = sizeof(size_t); + const size_t s_kv_ntok = sizeof(int); + const size_t s_kv = ctx->kv_self.buf.size; + + const size_t s_total = ( + + s_rng_size + + s_rng + + s_logits_capacity + + s_logits_size + + s_logits + + s_embedding_size + + s_embedding + + s_kv_size + + s_kv_ntok + + s_kv + ); + + return s_total; +} + +// Copies the state to the specified destination address +size_t llama_copy_state_data(struct llama_context * ctx, uint8_t * dst) { + uint8_t * out = dst; + + // copy rng + { + std::stringstream rng_ss; + rng_ss << ctx->rng; + + const size_t rng_size = rng_ss.str().size(); + char rng_buf[LLAMA_MAX_RNG_STATE]; + + memset(&rng_buf[0], 0, LLAMA_MAX_RNG_STATE); + memcpy(&rng_buf[0], rng_ss.str().data(), rng_ss.str().size()); + + memcpy(out, &rng_size, sizeof(rng_size)); out += sizeof(rng_size); + memcpy(out, &rng_buf[0], LLAMA_MAX_RNG_STATE); out += LLAMA_MAX_RNG_STATE; + } + + // copy logits + { + const size_t logits_cap = ctx->logits.capacity(); + const size_t logits_size = ctx->logits.size(); + + memcpy(out, &logits_cap, sizeof(logits_cap)); out += sizeof(logits_cap); + memcpy(out, &logits_size, sizeof(logits_size)); out += sizeof(logits_size); + + if (logits_size) { + memcpy(out, ctx->logits.data(), logits_size * sizeof(float)); + } + + out += logits_cap * sizeof(float); + } + + // copy embeddings + { + const size_t embedding_size = ctx->embedding.size(); + + memcpy(out, &embedding_size, sizeof(embedding_size)); out += sizeof(embedding_size); + + if (embedding_size) { + memcpy(out, ctx->embedding.data(), embedding_size * sizeof(float)); + out += embedding_size * sizeof(float); + } + } + + // copy kv cache + { + const auto & kv_self = ctx->kv_self; + const auto & hparams = ctx->model.hparams; + const int n_layer = hparams.n_layer; + const int n_embd = hparams.n_embd; + const int n_ctx = hparams.n_ctx; + + const size_t kv_size = kv_self.buf.size; + const int kv_ntok = llama_get_kv_cache_token_count(ctx); + + memcpy(out, &kv_size, sizeof(kv_size)); out += sizeof(kv_size); + memcpy(out, &kv_ntok, sizeof(kv_ntok)); out += sizeof(kv_ntok); + + if (kv_size) { + const size_t elt_size = ggml_element_size(kv_self.k); + + ggml_context * cpy_ctx = ggml_init({ 4096, NULL, /* no_alloc */ true }); + ggml_cgraph gf{}; + + ggml_tensor * kout3d = ggml_new_tensor_3d(cpy_ctx, kv_self.k->type, n_embd, kv_ntok, n_layer); + kout3d->data = out; + out += ggml_nbytes(kout3d); + + ggml_tensor * vout3d = ggml_new_tensor_3d(cpy_ctx, kv_self.v->type, kv_ntok, n_embd, n_layer); + vout3d->data = out; + out += ggml_nbytes(vout3d); + + ggml_tensor * k3d = ggml_view_3d(cpy_ctx, kv_self.k, + n_embd, kv_ntok, n_layer, + elt_size*n_embd, elt_size*n_embd*n_ctx, 0); + + ggml_tensor * v3d = ggml_view_3d(cpy_ctx, kv_self.v, + kv_ntok, n_embd, n_layer, + elt_size*n_ctx, elt_size*n_ctx*n_embd, 0); + + ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, k3d, kout3d)); + ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, v3d, vout3d)); + ggml_graph_compute_helper(ctx->work_buffer, &gf, /*n_threads*/ 1); + + ggml_free(cpy_ctx); + } + } + + const size_t written = out - dst; + const size_t max_size = llama_get_state_size(ctx); + + GGML_ASSERT(written <= max_size); + + return written; +} + +// Sets the state reading from the specified source address +size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { + uint8_t * inp = src; + + // set rng + { + size_t rng_size; + char rng_buf[LLAMA_MAX_RNG_STATE]; + + memcpy(&rng_size, inp, sizeof(rng_size)); inp += sizeof(rng_size); + memcpy(&rng_buf[0], inp, LLAMA_MAX_RNG_STATE); inp += LLAMA_MAX_RNG_STATE; + + std::stringstream rng_ss; + rng_ss.str(std::string(&rng_buf[0], rng_size)); + rng_ss >> ctx->rng; + + GGML_ASSERT(rng_ss.fail() == false); + } + + // set logits + { + size_t logits_cap; + size_t logits_size; + + memcpy(&logits_cap, inp, sizeof(logits_cap)); inp += sizeof(logits_cap); + memcpy(&logits_size, inp, sizeof(logits_size)); inp += sizeof(logits_size); + + GGML_ASSERT(ctx->logits.capacity() == logits_cap); + + if (logits_size) { + ctx->logits.resize(logits_size); + memcpy(ctx->logits.data(), inp, logits_size * sizeof(float)); + } + + inp += logits_cap * sizeof(float); + } + + // set embeddings + { + size_t embedding_size; + + memcpy(&embedding_size, inp, sizeof(embedding_size)); inp += sizeof(embedding_size); + + GGML_ASSERT(ctx->embedding.capacity() == embedding_size); + + if (embedding_size) { + memcpy(ctx->embedding.data(), inp, embedding_size * sizeof(float)); + inp += embedding_size * sizeof(float); + } + } + + // set kv cache + { + const auto & kv_self = ctx->kv_self; + const auto & hparams = ctx->model.hparams; + const int n_layer = hparams.n_layer; + const int n_embd = hparams.n_embd; + const int n_ctx = hparams.n_ctx; + + size_t kv_size; + int kv_ntok; + + memcpy(&kv_size, inp, sizeof(kv_size)); inp += sizeof(kv_size); + memcpy(&kv_ntok, inp, sizeof(kv_ntok)); inp += sizeof(kv_ntok); + + if (kv_size) { + GGML_ASSERT(kv_self.buf.size == kv_size); + + const size_t elt_size = ggml_element_size(kv_self.k); + + ggml_context * cpy_ctx = ggml_init({ 4096, NULL, /* no_alloc */ true }); + ggml_cgraph gf{}; + + ggml_tensor * kin3d = ggml_new_tensor_3d(cpy_ctx, kv_self.k->type, n_embd, kv_ntok, n_layer); + kin3d->data = (void *) inp; + inp += ggml_nbytes(kin3d); + + ggml_tensor * vin3d = ggml_new_tensor_3d(cpy_ctx, kv_self.v->type, kv_ntok, n_embd, n_layer); + vin3d->data = (void *) inp; + inp += ggml_nbytes(vin3d); + + ggml_tensor * k3d = ggml_view_3d(cpy_ctx, kv_self.k, + n_embd, kv_ntok, n_layer, + elt_size*n_embd, elt_size*n_embd*n_ctx, 0); + + ggml_tensor * v3d = ggml_view_3d(cpy_ctx, kv_self.v, + kv_ntok, n_embd, n_layer, + elt_size*n_ctx, elt_size*n_ctx*n_embd, 0); + + ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, kin3d, k3d)); + ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, vin3d, v3d)); + ggml_graph_compute_helper(ctx->work_buffer, &gf, /*n_threads*/ 1); + + ggml_free(cpy_ctx); + } + + ctx->kv_self.n = kv_ntok; + } + + const size_t nread = inp - src; + const size_t max_size = llama_get_state_size(ctx); + + GGML_ASSERT(nread <= max_size); + + return nread; +} + +static bool llama_load_session_file_internal(struct llama_context * ctx, const char * path_session, llama_token * tokens_out, size_t n_token_capacity, size_t * n_token_count_out) { + gguf_file file(path_session, "rb"); + GGML_UNUSED(ctx); + GGML_UNUSED(path_session); + GGML_UNUSED(tokens_out); + GGML_UNUSED(n_token_capacity); + GGML_UNUSED(n_token_count_out); + + +// TODO: implement with GGUF format + return true; +} + +bool llama_load_session_file(struct llama_context * ctx, const char * path_session, llama_token * tokens_out, size_t n_token_capacity, size_t * n_token_count_out) { + try { + return llama_load_session_file_internal(ctx, path_session, tokens_out, n_token_capacity, n_token_count_out); + } catch (const std::exception & err) { + fprintf(stderr, "error loading session file: %s\n", err.what()); + return false; + } +} + +bool llama_save_session_file(struct llama_context * ctx, const char * path_session, const llama_token * tokens, size_t n_token_count) { + gguf_file file(path_session, "wb"); + + // TODO: implement with GGUF format + + return true; +} + +int llama_eval( + struct llama_context * ctx, + const llama_token * tokens, + int n_tokens, + int n_past, + int n_threads) { + if (!llama_eval_internal(*ctx, tokens, nullptr, n_tokens, n_past, n_threads, nullptr)) { + fprintf(stderr, "%s: failed to eval\n", __func__); + return 1; + } + + // get a more accurate load time, upon first eval + // TODO: fix this + if (!ctx->has_evaluated_once) { + ctx->t_load_us = ggml_time_us() - ctx->t_start_us; + ctx->has_evaluated_once = true; + } + + return 0; +} + + +int llama_eval_embd( + struct llama_context * ctx, + const float * embd, + int n_tokens, + int n_past, + int n_threads) { + if (!llama_eval_internal(*ctx, nullptr, embd, n_tokens, n_past, n_threads, nullptr)) { + fprintf(stderr, "%s: failed to eval\n", __func__); + return 1; + } + + // get a more accurate load time, upon first eval + // TODO: fix this + if (!ctx->has_evaluated_once) { + ctx->t_load_us = ggml_time_us() - ctx->t_start_us; + ctx->has_evaluated_once = true; + } + + return 0; +} + +int llama_eval_export(struct llama_context * ctx, const char * fname) { + const int n_batch = 1; + const int n_ctx = 512 - n_batch; + + const std::vector tmp(n_batch, llama_token_bos()); + + if (!llama_eval_internal(*ctx, tmp.data(), nullptr, tmp.size(), n_ctx, 1, fname)) { + fprintf(stderr, "%s: failed to eval\n", __func__); + return 1; + } + + return 0; +} + +int llama_tokenize_with_model( + const struct llama_model * model, + const char * text, + llama_token * tokens, + int n_max_tokens, + bool add_bos) { + auto res = llama_tokenize(model->vocab, text, add_bos); + + if (n_max_tokens < (int) res.size()) { + fprintf(stderr, "%s: too many tokens\n", __func__); + return -((int) res.size()); + } + + for (size_t i = 0; i < res.size(); i++) { + tokens[i] = res[i]; + } + + return res.size(); +} + +int llama_tokenize( + struct llama_context * ctx, + const char * text, + llama_token * tokens, + int n_max_tokens, + bool add_bos) { + return llama_tokenize_with_model(&ctx->model, text, tokens, n_max_tokens, add_bos); +} + +int llama_n_vocab_from_model(const struct llama_model * model) { + return model->vocab.id_to_token.size(); +} + +int llama_n_ctx_from_model(const struct llama_model * model) { + return model->hparams.n_ctx; +} + +int llama_n_embd_from_model(const struct llama_model * model) { + return model->hparams.n_embd; +} + +int llama_n_vocab(const struct llama_context * ctx) { + return ctx->model.vocab.id_to_token.size(); +} + +int llama_n_ctx(const struct llama_context * ctx) { + return ctx->model.hparams.n_ctx; +} + +int llama_n_embd(const struct llama_context * ctx) { + return ctx->model.hparams.n_embd; +} + +int llama_get_vocab_from_model( + const struct llama_model * model, + const char * * strings, + float * scores, + int capacity) { + int n = std::min(capacity, (int) model->vocab.id_to_token.size()); + for (int i = 0; ivocab.id_to_token[i].tok.c_str(); + scores[i] = model->vocab.id_to_token[i].score; + } + return n; +} + +int llama_get_vocab( + const struct llama_context * ctx, + const char * * strings, + float * scores, + int capacity) { + return llama_get_vocab_from_model(&ctx->model, strings, scores, capacity); +} + +float * llama_get_logits(struct llama_context * ctx) { + return ctx->logits.data(); +} + +float * llama_get_embeddings(struct llama_context * ctx) { + return ctx->embedding.data(); +} + +const char * llama_token_to_str_with_model(const struct llama_model * model, llama_token token) { + if (token >= llama_n_vocab_from_model(model)) { + return nullptr; + } + + return model->vocab.id_to_token[token].tok.c_str(); +} + +const char * llama_token_to_str(const struct llama_context * ctx, llama_token token) { + return llama_token_to_str_with_model(&ctx->model, token); +} + +llama_token llama_token_bos() { + return 1; +} + +llama_token llama_token_eos() { + return 2; +} + +llama_token llama_token_nl() { + return 13; +} + +struct llama_timings llama_get_timings(struct llama_context * ctx) { + struct llama_timings result = { + /*.t_start_ms =*/ 1e-3 * ctx->t_start_us, + /*.t_end_ms =*/ 1.00 * ggml_time_ms(), + /*.t_load_ms =*/ 1e-3 * ctx->t_load_us, + /*.t_sample_ms =*/ 1e-3 * ctx->t_sample_us, + /*.t_p_eval_ms =*/ 1e-3 * ctx->t_p_eval_us, + /*.t_eval_ms =*/ 1e-3 * ctx->t_eval_us, + + /*.n_sample =*/ std::max(1, ctx->n_sample), + /*.n_p_eval =*/ std::max(1, ctx->n_p_eval), + /*.n_eval =*/ std::max(1, ctx->n_eval), + }; + + return result; +} + +void llama_print_timings(struct llama_context * ctx) { + const llama_timings timings = llama_get_timings(ctx); + + fprintf(stderr, "\n"); + fprintf(stderr, "%s: load time = %8.2f ms\n", __func__, timings.t_load_ms); + fprintf(stderr, "%s: sample time = %8.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", + __func__, timings.t_sample_ms, timings.n_sample, timings.t_sample_ms / timings.n_sample, 1e3 / timings.t_sample_ms * timings.n_sample); + fprintf(stderr, "%s: prompt eval time = %8.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", + __func__, timings.t_p_eval_ms, timings.n_p_eval, timings.t_p_eval_ms / timings.n_p_eval, 1e3 / timings.t_p_eval_ms * timings.n_p_eval); + fprintf(stderr, "%s: eval time = %8.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", + __func__, timings.t_eval_ms, timings.n_eval, timings.t_eval_ms / timings.n_eval, 1e3 / timings.t_eval_ms * timings.n_eval); + fprintf(stderr, "%s: total time = %8.2f ms\n", __func__, (timings.t_end_ms - timings.t_start_ms)); +} + +void llama_reset_timings(struct llama_context * ctx) { + ctx->t_start_us = ggml_time_us(); + ctx->t_sample_us = ctx->n_sample = 0; + ctx->t_eval_us = ctx->n_eval = 0; + ctx->t_p_eval_us = ctx->n_p_eval = 0; +} + +const char * llama_print_system_info(void) { + static std::string s; + + s = ""; + s += "AVX = " + std::to_string(ggml_cpu_has_avx()) + " | "; + s += "AVX2 = " + std::to_string(ggml_cpu_has_avx2()) + " | "; + s += "AVX512 = " + std::to_string(ggml_cpu_has_avx512()) + " | "; + s += "AVX512_VBMI = " + std::to_string(ggml_cpu_has_avx512_vbmi()) + " | "; + s += "AVX512_VNNI = " + std::to_string(ggml_cpu_has_avx512_vnni()) + " | "; + s += "FMA = " + std::to_string(ggml_cpu_has_fma()) + " | "; + s += "NEON = " + std::to_string(ggml_cpu_has_neon()) + " | "; + s += "ARM_FMA = " + std::to_string(ggml_cpu_has_arm_fma()) + " | "; + s += "F16C = " + std::to_string(ggml_cpu_has_f16c()) + " | "; + s += "FP16_VA = " + std::to_string(ggml_cpu_has_fp16_va()) + " | "; + s += "WASM_SIMD = " + std::to_string(ggml_cpu_has_wasm_simd()) + " | "; + s += "BLAS = " + std::to_string(ggml_cpu_has_blas()) + " | "; + s += "SSE3 = " + std::to_string(ggml_cpu_has_sse3()) + " | "; + s += "VSX = " + std::to_string(ggml_cpu_has_vsx()) + " | "; + + return s.c_str(); +} + +// For internal test use +const std::vector>& llama_internal_get_tensor_map(struct llama_context * ctx) { + return ctx->model.tensors_by_name; +} diff --git a/gguf-llama.h b/gguf-llama.h new file mode 100644 index 000000000..20dcc9f63 --- /dev/null +++ b/gguf-llama.h @@ -0,0 +1,468 @@ +#ifndef LLAMA_H +#define LLAMA_H + +#include "ggml.h" +#ifdef GGML_USE_CUBLAS +#include "ggml-cuda.h" +#define LLAMA_MAX_DEVICES GGML_CUDA_MAX_DEVICES +#else +#define LLAMA_MAX_DEVICES 1 +#endif // GGML_USE_CUBLAS +#include +#include +#include + +#ifdef LLAMA_SHARED +# if defined(_WIN32) && !defined(__MINGW32__) +# ifdef LLAMA_BUILD +# define LLAMA_API __declspec(dllexport) +# else +# define LLAMA_API __declspec(dllimport) +# endif +# else +# define LLAMA_API __attribute__ ((visibility ("default"))) +# endif +#else +# define LLAMA_API +#endif + +#ifdef __GNUC__ +# define DEPRECATED(func, hint) func __attribute__((deprecated(hint))) +#elif defined(_MSC_VER) +# define DEPRECATED(func, hint) __declspec(deprecated(hint)) func +#else +# define DEPRECATED(func, hint) func +#endif + +#define LLAMA_FILE_MAGIC_GGJT 0x67676a74u // 'ggjt' +#define LLAMA_FILE_MAGIC_GGLA 0x67676c61u // 'ggla' +#define LLAMA_FILE_MAGIC_GGMF 0x67676d66u // 'ggmf' +#define LLAMA_FILE_MAGIC_GGML 0x67676d6cu // 'ggml' +#define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' + +#define LLAMA_FILE_VERSION 3 +#define LLAMA_FILE_MAGIC LLAMA_FILE_MAGIC_GGJT +#define LLAMA_FILE_MAGIC_UNVERSIONED LLAMA_FILE_MAGIC_GGML +#define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN +#define LLAMA_SESSION_VERSION 1 + +#define LLAMA_DEFAULT_SEED 0xFFFFFFFF + +#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) +// Defined when llama.cpp is compiled with support for offloading model layers to GPU. +#define LLAMA_SUPPORTS_GPU_OFFLOAD +#endif + +#ifndef LLAMA_DEFAULT_RMS_EPS +#define LLAMA_DEFAULT_RMS_EPS 5e-6f +#endif + +#ifdef __cplusplus +extern "C" { +#endif + + // + // C interface + // + // TODO: show sample usage + // + + struct llama_model; + struct llama_context; + + typedef int llama_token; + + typedef struct llama_token_data { + llama_token id; // token id + float logit; // log-odds of the token + float p; // probability of the token + } llama_token_data; + + typedef struct llama_token_data_array { + llama_token_data * data; + size_t size; + bool sorted; + } llama_token_data_array; + + typedef void (*llama_progress_callback)(float progress, void *ctx); + + struct llama_context_params { + uint32_t seed; // RNG seed, -1 for random + int32_t n_ctx; // text context + int32_t n_batch; // prompt processing batch size + int32_t n_gqa; // grouped-query attention (TEMP - will be moved to model hparams) + float rms_norm_eps; // rms norm epsilon (TEMP - will be moved to model hparams) + int32_t n_gpu_layers; // number of layers to store in VRAM + int32_t main_gpu; // the GPU that is used for scratch and small tensors + + const float * tensor_split; // how to split layers across multiple GPUs (size: LLAMA_MAX_DEVICES) + + // ref: https://github.com/ggerganov/llama.cpp/pull/2054 + float rope_freq_base; // RoPE base frequency + float rope_freq_scale; // RoPE frequency scaling factor + + // called with a progress value between 0 and 1, pass NULL to disable + llama_progress_callback progress_callback; + // context pointer passed to the progress callback + void * progress_callback_user_data; + + // Keep the booleans together to avoid misalignment during copy-by-value. + bool low_vram; // if true, reduce VRAM usage at the cost of performance + bool f16_kv; // use fp16 for KV cache + bool logits_all; // the llama_eval() call computes all logits, not just the last one + bool vocab_only; // only load the vocabulary, no weights + bool use_mmap; // use mmap if possible + bool use_mlock; // force system to keep model in RAM + bool embedding; // embedding mode only + }; + // model file types + enum llama_ftype { + LLAMA_FTYPE_ALL_F32 = 0, + LLAMA_FTYPE_MOSTLY_F16 = 1, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q4_0 = 2, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q4_1 = 3, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q4_1_SOME_F16 = 4, // tok_embeddings.weight and output.weight are F16 + // LLAMA_FTYPE_MOSTLY_Q4_2 = 5, // support has been removed + // LLAMA_FTYPE_MOSTLY_Q4_3 = 6, // support has been removed + LLAMA_FTYPE_MOSTLY_Q8_0 = 7, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q5_0 = 8, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q5_1 = 9, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q2_K = 10,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q3_K_S = 11,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q3_K_M = 12,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q3_K_L = 13,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q4_K_S = 14,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q4_K_M = 15,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q5_K_S = 16,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q5_K_M = 17,// except 1d tensors + LLAMA_FTYPE_MOSTLY_Q6_K = 18,// except 1d tensors + }; + + // model quantization parameters + typedef struct llama_model_quantize_params { + int nthread; // number of threads to use for quantizing, if <=0 will use std::thread::hardware_concurrency() + enum llama_ftype ftype; // quantize to this llama_ftype + bool allow_requantize; // allow quantizing non-f32/f16 tensors + bool quantize_output_tensor; // quantize output.weight + } llama_model_quantize_params; + + // grammar types + struct llama_grammar; + + // grammar element type + enum llama_gretype { + // end of rule definition + LLAMA_GRETYPE_END = 0, + + // start of alternate definition for rule + LLAMA_GRETYPE_ALT = 1, + + // non-terminal element: reference to rule + LLAMA_GRETYPE_RULE_REF = 2, + + // terminal element: character (code point) + LLAMA_GRETYPE_CHAR = 3, + + // inverse char(s) ([^a], [^a-b] [^abc]) + LLAMA_GRETYPE_CHAR_NOT = 4, + + // modifies a preceding LLAMA_GRETYPE_CHAR or LLAMA_GRETYPE_CHAR_ALT to + // be an inclusive range ([a-z]) + LLAMA_GRETYPE_CHAR_RNG_UPPER = 5, + + // modifies a preceding LLAMA_GRETYPE_CHAR or + // LLAMA_GRETYPE_CHAR_RNG_UPPER to add an alternate char to match ([ab], [a-zA]) + LLAMA_GRETYPE_CHAR_ALT = 6, + }; + + typedef struct llama_grammar_element { + enum llama_gretype type; + uint32_t value; // Unicode code point or rule ID + } llama_grammar_element; + + // performance timing information + struct llama_timings { + double t_start_ms; + double t_end_ms; + double t_load_ms; + double t_sample_ms; + double t_p_eval_ms; + double t_eval_ms; + + int32_t n_sample; + int32_t n_p_eval; + int32_t n_eval; + }; + + LLAMA_API int llama_max_devices(); + + LLAMA_API struct llama_context_params llama_context_default_params(); + LLAMA_API struct llama_model_quantize_params llama_model_quantize_default_params(); + + LLAMA_API bool llama_mmap_supported(); + LLAMA_API bool llama_mlock_supported(); + + // TODO: not great API - very likely to change + // Initialize the llama + ggml backend + // If numa is true, use NUMA optimizations + // Call once at the start of the program + LLAMA_API void llama_backend_init(bool numa); + // Call once at the end of the program - currently only used for MPI + LLAMA_API void llama_backend_free(); + + LLAMA_API int64_t llama_time_us(); + + LLAMA_API struct llama_model * llama_load_model_from_file( + const char * path_model, + struct llama_context_params params); + + LLAMA_API void llama_free_model(struct llama_model * model); + + LLAMA_API struct llama_context * llama_new_context_with_model( + struct llama_model * model, + struct llama_context_params params); + + // Various functions for loading a ggml llama model. + // Allocate (almost) all memory needed for the model. + // Return NULL on failure + LLAMA_API DEPRECATED(struct llama_context * llama_init_from_file( + const char * path_model, + struct llama_context_params params), + "please use llama_load_model_from_file combined with llama_new_context_with_model instead"); + + // Frees all allocated memory + LLAMA_API void llama_free(struct llama_context * ctx); + + // Returns 0 on success + LLAMA_API int llama_model_quantize( + const char * fname_inp, + const char * fname_out, + const llama_model_quantize_params * params); + + // Apply a LoRA adapter to a loaded model + // path_base_model is the path to a higher quality model to use as a base for + // the layers modified by the adapter. Can be NULL to use the current loaded model. + // The model needs to be reloaded before applying a new adapter, otherwise the adapter + // will be applied on top of the previous one + // Returns 0 on success + LLAMA_API DEPRECATED(int llama_apply_lora_from_file( + struct llama_context * ctx, + const char * path_lora, + const char * path_base_model, + int n_threads), + "please use llama_model_apply_lora_from_file instead"); + + LLAMA_API int llama_model_apply_lora_from_file( + const struct llama_model * model, + const char * path_lora, + const char * path_base_model, + int n_threads); + + // Returns the number of tokens in the KV cache + LLAMA_API int llama_get_kv_cache_token_count(const struct llama_context * ctx); + + // Sets the current rng seed. + LLAMA_API void llama_set_rng_seed(struct llama_context * ctx, uint32_t seed); + + // Returns the maximum size in bytes of the state (rng, logits, embedding + // and kv_cache) - will often be smaller after compacting tokens + LLAMA_API size_t llama_get_state_size(const struct llama_context * ctx); + + // Copies the state to the specified destination address. + // Destination needs to have allocated enough memory. + // Returns the number of bytes copied + LLAMA_API size_t llama_copy_state_data(struct llama_context * ctx, uint8_t * dst); + + // Set the state reading from the specified address + // Returns the number of bytes read + LLAMA_API size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src); + + // Save/load session file + LLAMA_API bool llama_load_session_file(struct llama_context * ctx, const char * path_session, llama_token * tokens_out, size_t n_token_capacity, size_t * n_token_count_out); + LLAMA_API bool llama_save_session_file(struct llama_context * ctx, const char * path_session, const llama_token * tokens, size_t n_token_count); + + // Run the llama inference to obtain the logits and probabilities for the next token. + // tokens + n_tokens is the provided batch of new tokens to process + // n_past is the number of tokens to use from previous eval calls + // Returns 0 on success + LLAMA_API int llama_eval( + struct llama_context * ctx, + const llama_token * tokens, + int n_tokens, + int n_past, + int n_threads); + + // Same as llama_eval, but use float matrix input directly. + LLAMA_API int llama_eval_embd( + struct llama_context * ctx, + const float * embd, + int n_tokens, + int n_past, + int n_threads); + + // Export a static computation graph for context of 511 and batch size of 1 + // NOTE: since this functionality is mostly for debugging and demonstration purposes, we hardcode these + // parameters here to keep things simple + // IMPORTANT: do not use for anything else other than debugging and testing! + LLAMA_API int llama_eval_export(struct llama_context * ctx, const char * fname); + + // Convert the provided text into tokens. + // The tokens pointer must be large enough to hold the resulting tokens. + // Returns the number of tokens on success, no more than n_max_tokens + // Returns a negative number on failure - the number of tokens that would have been returned + // TODO: not sure if correct + LLAMA_API int llama_tokenize( + struct llama_context * ctx, + const char * text, + llama_token * tokens, + int n_max_tokens, + bool add_bos); + + LLAMA_API int llama_tokenize_with_model( + const struct llama_model * model, + const char * text, + llama_token * tokens, + int n_max_tokens, + bool add_bos); + + LLAMA_API int llama_n_vocab(const struct llama_context * ctx); + LLAMA_API int llama_n_ctx (const struct llama_context * ctx); + LLAMA_API int llama_n_embd (const struct llama_context * ctx); + + LLAMA_API int llama_n_vocab_from_model(const struct llama_model * model); + LLAMA_API int llama_n_ctx_from_model (const struct llama_model * model); + LLAMA_API int llama_n_embd_from_model (const struct llama_model * model); + + // Get the vocabulary as output parameters. + // Returns number of results. + LLAMA_API int llama_get_vocab( + const struct llama_context * ctx, + const char * * strings, + float * scores, + int capacity); + + LLAMA_API int llama_get_vocab_from_model( + const struct llama_model * model, + const char * * strings, + float * scores, + int capacity); + + // Token logits obtained from the last call to llama_eval() + // The logits for the last token are stored in the last row + // Can be mutated in order to change the probabilities of the next token + // Rows: n_tokens + // Cols: n_vocab + LLAMA_API float * llama_get_logits(struct llama_context * ctx); + + // Get the embeddings for the input + // shape: [n_embd] (1-dimensional) + LLAMA_API float * llama_get_embeddings(struct llama_context * ctx); + + // Token Id -> String. Uses the vocabulary in the provided context + LLAMA_API const char * llama_token_to_str( + const struct llama_context * ctx, + llama_token token); + + LLAMA_API const char * llama_token_to_str_with_model( + const struct llama_model * model, + llama_token token); + + // Special tokens + LLAMA_API llama_token llama_token_bos(); // beginning-of-sentence + LLAMA_API llama_token llama_token_eos(); // end-of-sentence + LLAMA_API llama_token llama_token_nl(); // next-line + + // Grammar + // + LLAMA_API struct llama_grammar * llama_grammar_init( + const llama_grammar_element ** rules, + size_t n_rules, + size_t start_rule_index); + + LLAMA_API void llama_grammar_free(struct llama_grammar * grammar); + + // Sampling functions + + /// @details Repetition penalty described in CTRL academic paper https://arxiv.org/abs/1909.05858, with negative logit fix. + LLAMA_API void llama_sample_repetition_penalty(struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens, size_t last_tokens_size, float penalty); + + /// @details Frequency and presence penalties described in OpenAI API https://platform.openai.com/docs/api-reference/parameter-details. + LLAMA_API void llama_sample_frequency_and_presence_penalties(struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens, size_t last_tokens_size, float alpha_frequency, float alpha_presence); + + /// @details Apply classifier-free guidance to the logits as described in academic paper "Stay on topic with Classifier-Free Guidance" https://arxiv.org/abs/2306.17806 + /// @param candidates A vector of `llama_token_data` containing the candidate tokens, the logits must be directly extracted from the original generation context without being sorted. + /// @params guidance_ctx A separate context from the same model. Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context. + /// @params scale Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance. + LLAMA_API void llama_sample_classifier_free_guidance( + struct llama_context * ctx, + llama_token_data_array * candidates, + struct llama_context * guidance_ctx, + float scale); + + /// @details Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits. + LLAMA_API void llama_sample_softmax(struct llama_context * ctx, llama_token_data_array * candidates); + + /// @details Top-K sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751 + LLAMA_API void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * candidates, int k, size_t min_keep); + + /// @details Nucleus sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751 + LLAMA_API void llama_sample_top_p(struct llama_context * ctx, llama_token_data_array * candidates, float p, size_t min_keep); + + /// @details Tail Free Sampling described in https://www.trentonbricken.com/Tail-Free-Sampling/. + LLAMA_API void llama_sample_tail_free(struct llama_context * ctx, llama_token_data_array * candidates, float z, size_t min_keep); + + /// @details Locally Typical Sampling implementation described in the paper https://arxiv.org/abs/2202.00666. + LLAMA_API void llama_sample_typical(struct llama_context * ctx, llama_token_data_array * candidates, float p, size_t min_keep); + LLAMA_API void llama_sample_temperature(struct llama_context * ctx, llama_token_data_array * candidates, float temp); + + /// @details Apply constraints from grammar + LLAMA_API void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * candidates, const struct llama_grammar * grammar); + + /// @details Mirostat 1.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words. + /// @param candidates A vector of `llama_token_data` containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text. + /// @param tau The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text. + /// @param eta The learning rate used to update `mu` based on the error between the target and observed surprisal of the sampled word. A larger learning rate will cause `mu` to be updated more quickly, while a smaller learning rate will result in slower updates. + /// @param m The number of tokens considered in the estimation of `s_hat`. This is an arbitrary value that is used to calculate `s_hat`, which in turn helps to calculate the value of `k`. In the paper, they use `m = 100`, but you can experiment with different values to see how it affects the performance of the algorithm. + /// @param mu Maximum cross-entropy. This value is initialized to be twice the target cross-entropy (`2 * tau`) and is updated in the algorithm based on the error between the target and observed surprisal. + LLAMA_API llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int m, float * mu); + + /// @details Mirostat 2.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words. + /// @param candidates A vector of `llama_token_data` containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text. + /// @param tau The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text. + /// @param eta The learning rate used to update `mu` based on the error between the target and observed surprisal of the sampled word. A larger learning rate will cause `mu` to be updated more quickly, while a smaller learning rate will result in slower updates. + /// @param mu Maximum cross-entropy. This value is initialized to be twice the target cross-entropy (`2 * tau`) and is updated in the algorithm based on the error between the target and observed surprisal. + LLAMA_API llama_token llama_sample_token_mirostat_v2(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, float * mu); + + /// @details Selects the token with the highest probability. + LLAMA_API llama_token llama_sample_token_greedy(struct llama_context * ctx, llama_token_data_array * candidates); + + /// @details Randomly selects a token from the candidates based on their probabilities. + LLAMA_API llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_array * candidates); + + /// @details Accepts the sampled token into the grammar + LLAMA_API void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar * grammar, llama_token token); + + // Performance information + LLAMA_API struct llama_timings llama_get_timings(struct llama_context * ctx); + LLAMA_API void llama_print_timings(struct llama_context * ctx); + LLAMA_API void llama_reset_timings(struct llama_context * ctx); + + // Print system information + LLAMA_API const char * llama_print_system_info(void); + +#ifdef __cplusplus +} +#endif + +// Internal API to be implemented by llama.cpp and used by tests/benchmarks only +#ifdef LLAMA_API_INTERNAL + +#include +#include +struct ggml_tensor; + +const std::vector>& llama_internal_get_tensor_map(struct llama_context * ctx); + +#endif + +#endif // LLAMA_H From e59fcb2bc129881f4a269fee748fb38bce0a64de Mon Sep 17 00:00:00 2001 From: Christian Demsar Date: Thu, 10 Aug 2023 10:28:27 -0400 Subject: [PATCH 15/87] Add --n-predict -2 for stopping generation on full context (#2565) --- examples/common.cpp | 2 +- examples/main/README.md | 8 ++++++-- examples/main/main.cpp | 6 +++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/examples/common.cpp b/examples/common.cpp index 4d3ba9bb2..9f8aab9a2 100644 --- a/examples/common.cpp +++ b/examples/common.cpp @@ -543,7 +543,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { fprintf(stdout, " --in-suffix STRING string to suffix after user inputs with (default: empty)\n"); fprintf(stdout, " -f FNAME, --file FNAME\n"); fprintf(stdout, " prompt file to start generation.\n"); - fprintf(stdout, " -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity)\n", params.n_predict); + fprintf(stdout, " -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); fprintf(stdout, " -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); fprintf(stdout, " -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); fprintf(stdout, " -gqa N, --gqa N grouped-query attention factor (TEMP!!! use 8 for LLaMAv2 70B) (default: %d)\n", params.n_gqa); diff --git a/examples/main/README.md b/examples/main/README.md index 55c16096f..60e3907d5 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -160,9 +160,13 @@ The following options allow you to control the text generation process and fine- ### Number of Tokens to Predict -- `-n N, --n-predict N`: Set the number of tokens to predict when generating text (default: 128, -1 = infinity). +- `-n N, --n-predict N`: Set the number of tokens to predict when generating text (default: 128, -1 = infinity, -2 = until context filled) -The `--n-predict` option controls the number of tokens the model generates in response to the input prompt. By adjusting this value, you can influence the length of the generated text. A higher value will result in longer text, while a lower value will produce shorter text. A value of -1 will cause text to be generated without limit. +The `--n-predict` option controls the number of tokens the model generates in response to the input prompt. By adjusting this value, you can influence the length of the generated text. A higher value will result in longer text, while a lower value will produce shorter text. + +A value of -1 will enable infinite text generation, even though we have a finite context window. When the context window is full, some of the earlier tokens (half of the tokens after `--n-keep`) will be discarded. The context must then be re-evaluated before generation can resume. On large models and/or large context windows, this will result in significant pause in output. + +If the pause is undesirable, a value of -2 will stop generation immediately when the context is filled. It is important to note that the generated text may be shorter than the specified number of tokens if an End-of-Sequence (EOS) token or a reverse prompt is encountered. In interactive mode text generation will pause and control will be returned to the user. In non-interactive mode, the program will end. In both cases, the text generation may stop before reaching the specified `n-predict` value. If you want the model to keep going without ever producing End-of-Sequence on its own, you can use the `--ignore-eos` parameter. diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 56ada7e69..a632bea1c 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -431,8 +431,12 @@ int main(int argc, char ** argv) { // - take the n_keep first tokens from the original prompt (via n_past) // - take half of the last (n_ctx - n_keep) tokens and recompute the logits in batches if (n_past + (int) embd.size() + std::max(0, guidance_offset) > n_ctx) { - const int n_left = n_past - params.n_keep; + if (params.n_predict == -2) { + fprintf(stderr, "\n\n%s: context full, stopping generation\n", __func__); + break; + } + const int n_left = n_past - params.n_keep; // always keep the first token - BOS n_past = std::max(1, params.n_keep); n_past_guidance = std::max(1, params.n_keep + guidance_offset); From 4f865181aa9b3f214ed772688de5ad24068a5f04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 17:49:31 +0300 Subject: [PATCH 16/87] gguf : start implementing libllama in GGUF (WIP) --- gguf-llama-simple | Bin 607488 -> 607488 bytes gguf-llama.cpp | 5 ++++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/gguf-llama-simple b/gguf-llama-simple index d7600282e3315a4bc805a4f6c01550d58bdb80a5..bfa61de57dbc77cfb6c180b20a23a46dfebaf99f 100644 GIT binary patch delta 19466 zcmb_^34Bb~`}f?N%$XZRV#y{Ui-{~m7D9rM5KI{R7Hd%rT5UuVJCllSI%4P0s--_g zRaV-v9e~KkqxA&*Zu1`#k44&-0x9oSVu0 zI5hX;(Cq@}_Qg%2W1lZ}*5U%gxiiaRBYZ5|?Fx7%Ly(!uU!5-KGX+2Y2EMqCL6#HW zz=t1GZR!P9Q{ul)-QVm3w0M24MA^kJx?ll_#1e# zpQ0_{4Ls<&BA@&QenwPq(;IlQQm#eU^9_b)fKtHlH}K?FilQ-Z;JJE5e*7D_XQ?7T z`3*cKP?29Op36=DwqiFC!TgK|i?6CGoNM=6!vc!8IA^&fA-}*B=k#bwAi0WA*1a?y>#%_w@c8H;Xmg>LdGVS-4>J2Wu@8|YsEA(@CK5H?Zu}Z&(n@yXq z)`xS8=+Ma?zU(e|0)#tvm55BMO?!xzp zr(ZqNujh79^J9Gy|6y~x?XkWO&pBwx-}*V+eEU03^+g}9e(NwZ2Oo5fQsxbQ-!+hyE~C+S%0Y@^v@_HaUbdy|G~6%PuEDlN&rrsCvqCTJ?2ZCuYD z@JT~qCaaQ99EL*EY=_nZH=*!wll7Q*$5JVotVK3ms%-{6=uLvVmYeoT$|ukxZ!u)M zSzo0^6q#(Zjq{N*M_e@>F6YgOqDS5=q|>aI(+=O|&DN`?*y9YHiNE9&m=nvh z{a;gPGZ%TLSZmBRNd;+%#W@dC@;krED|dLzl3a8;TdHwzSA>wI*Wh@yJB1VjW6*@4mVq$i#rlfJNeU%Jz1N$ zXtq8|vlfVVlPV?K5aW1jiTF#uN-51+Y_ge+lbM2C7J<~phTu;Kf<>nM zY@-l+R!nWn3MQ;zAl#gtg=kypU$bmDxbSuScuJ>qBaO*<^{Y1U!k z_{Ma}cl{m(qDr!jlg(-|X%Ww}tDCGPW^1Lms!gS2=3X#aQ+}#(IND8eOyS{~mQKkm zXlZ%JhIg|fc#U(=p~e{n6Ji!_Vl6fVWwTnI*n)MhqpZ6no2;V6l=lL$LdTvE#OhUG z@k~ZMPopFz1Qli`X*T+r=3L5d40$fci-F-Dd)yFGzYJ|SVD+BOdhtc^YD*=4$eC*N zlxu~BkcEzoC~~cdtqnShF72>GZ`$|OR^?;%`1Vnn^t0I-!%P=Ui4U^%qLDQ^BqzfY zB)!yRPdNlEva=^tN|N{rWtNV?lNsa7c9haF_F32bHk+LN*bUlK5a4Fq!rzmTLu zp1@gb!{S*RJZ8#|4;Tku<#5{i8pBhq6|n`Pb+;rX@8)FJSy%cJ154~ZlPx~E50Aij zUF*$b$ayYa173O>D(O9c;%g~r?o>*gO3@O`)@x8{d_!fbQ{^S-g39Tt%IUyO`AL&U zbF!U3z)n$sUSM5-iQDk3dC@~D5QdM3CzkB@R98^m?p^DZ|C@x4Psb7_NOQQXqDVS^h1@DwJEDr7R8v&*q^5OV=0T! z7UXsr7t~EpDg0)(t!;~nKz3WJV{G@=`q`k zp3N%#?`CvRAUPQu-i-0ur6oG5cy)o5wu7MAW;80kqI`j6tB9lhf=G+bF+qs`Pcg8^ zS}dl5cAN7or>4o)#%QwM7vJV|x-vAx=i=%()LC6-xAmwPAa~*tF*+YY!ESV zy{UgNX~N}DQ!weypNpldf-&3k^v7Tl)^2}Or6Ji`o8?~BC6+bq=VBa`W-La4o|tSi z)c`dG@1Fq*ps^t&hW{#xz8ym5^8bmUWg%obr>A2Y5-We{C;FrzN#wW3(56PRJF=p& ztlMP0B7OiD#0#>6N5yMQFX|={-Y|y`|47|KNkXkK*|uUpvCqubJ7P=PKa?CA(=v*w z`49nUJve1Z%FvWyDQ_JWKZ;Z`e+imqb48#!go_KJ8F*A&E|*jy-c>`IeNvJ<0+V&B ze;D67scqVXXb&&5o7r4b3_7e!L35ecXp zt7J3f6$;EISXA18Mu!ovKmnX?)>~}sSli9kD9tugNK;F@DwZ{-Z-$hxDK zTQ^Q;&etc5;B@=;V`wi!^U|N!q|PZ)f0<<|9h2GvDRf~oLW=Nagp=v~$C30#IBCqy zpq>#VxZ&V%1jJU?Xqt0&W_5XCM3CqF+;A#Jl7xV5Xbau*M?KZ0yh8wjycZNH0NHnV6r|CLulis zhDDPGKJ&s5!~B4C=+~}V^*wG#heVSMbA>02UJ=ZUy`YXLSWTVAXfVE^?17k$ z?9RDN9WR2E@uEAEuuT(K3WG&ES~G^s;IB2J`7tC4{pQbK@S zVzIb!H;rsfR`Xx_(4yAlFn`RO&Tm7S7z|iLi2tcezi&gjaigeP5_z7sr!BAnb(kr~ zFaV>Dal0*-xUO~xh}{zXSbz?{a?w@@^Xt%K?TFcJwN9B_+f)B!a)e)0o0cS#ru-o<>XAac zxNsVpLVn_=(Z?wyr2gKzl~U0QPb{yx=a0LGo>3&Q2Fq30H0G38(vW zh%p=mVYQYfozB@>^kUfIqKc;aXAZi)0~u9qF|-st8gK%GT+LJx8{m#Ch1X{cbg!X4 z)c1d-laWx~J~);9!3ETdWFk`ry8EdQUg0m8$drNgJ?z~(k**x~A^oH?Ce62Lq>1eJ z9aD{k9&J(_hNsxGWhYrPIJ=p?G!b7eoqDB_7F;kjr;!*gna)Tfv2jfeN^@B0hN`Po zgC&lNe(nmGCDUTDVv|%gzppfsCj6Od%IxDuwAtt26)X%Zm<7H@Grw#o`H2ts!rNrL2fua#1B>jp~Zf@{RKJL!;v{ zg?!_p^x`jx(u+sajBXfyr_rU|$UH=?UUxE$>q=L4$K)+;ptrh{ATEO%dXP3;1WoOM z_?T!;4-y;Mf@9iC#I-Jn5A&849jpdzicE_|Zi6H(rsKPjVBTLxmvzIeUB`v`cPCT% zvJm>bC+Wt|UndvB6P^k^NNfIto_6d(qIpqAC!)B>bB+qRoVF-W+=s$gIfrHcOGH1t z;^Kz2teoT!672H*s|q?Qgv{Z0>XhYy7ftVj-V02c_dRBRg!ySdTS=o2U{PGV9%j6J_pi&b(YBt)#5E9W&y>ABY-YATg-q(>8eg;bh_?HnE77e@&DU(gkONldMT z3Z@o&U+gAw>WL>_(RT)rR{XsRx?uqE<(Ivn-wz-W-NwMxZdM1INwG9&`_H98S1`*i zU@h*`4EA?}Mlqe0_A7iX^FqkkP9j~b+e}&4)}XQJ#M?#xLN?yZ+a#7%GV__tFL8>@ z2?xzdCn<)xs6cT^CH(=3pzgBR0eQnC20wS^2~+a~(gp)DnGB_=1Ic!rLp>cxI`KR7 zG-(jITl?ru3=|cX*+x(7-?7yAwGM4Pm<;qye_DYqBFU?^6ziYZmaqeXrImEsVC-Ga zmsdzQP?{a&#;O%I#{0%8qvQvFE1Zcqu2eW2IN|^meoKQN#R5)NVP8$ppA3#rVNQcL zGbOcE*kq~ZRKAaizj&gQ8LPoxG5CfGU%}z2VhUyODHT4e!HXEYM}>_woYFQ1uT$aO z8ay|avtYNc;sy<$z{Imu_@SoUn^`_cg+J1y$1&*+D*Ubnk6>^U74~#@n$nxWUMjq< zhQ*1uWw_(9;{JS1OEU)FR^bRui$8Src4Ia(lI2FF7!9y7Aufp8{TNEuOhP$eGOHI5jgYP|5-1oq) zS&?qa;BzXRslh=EKBU5pHMjDCuw*o!>6k7a1Fl5;I~zHq6VL2 zuvvw_)Zjx5j#uH2H25n9H&Ees8Elbne+@PC3^ zG&qjI^nQg!9{d+*_)Lc9s^Xt$@B{{rRpC4hKEUAKD%?kFw%;)6I7gG}O!@6vf0q;XWMJ8Zo+ z-DrAa#WDdMq;$jI5>~jy*ygb_sFE^Tl0l3OPKf`5`XMxUH0jy;Qdxy$>)<-4z+`J; z^jP%p33eTu5p`<^*V4a`Fj5+Tg=iG zneMAXsrMN2Uj6q_C_5}KXQwvoNUUmcwf-JmJBAGAciyKj#*mTKN|;O73>4p@qsNjY zKKwR4{4VjL-;X5?u&KB>7Dp6CwBTLhO&h#RI&kkP&y(LJojG^*Je2?TCJlU#4CMOK zDesX%e8mm=`+MYFKK}+y8Ap=1?$k04J7bRi1j6t7i`IJ|+dS-E-X}dcAG+s#peL_W z*AIZspgli;Yz+PE1JZ-PdX3%($}hV{TW6Ba{3kc*>`W5P`O#gOBpy4&JDH>lzv?P& zF`hK%uV1I*#$z)WN&hpRbmW@T((yQQ+*v~XvygZHUvyL!Y0T&UMVDmZu(Jcbm_;@< zTmF(=D^x$&AGN(7>v!xLl1|k}Q|oh+g|Pa;wAUEb8-US@?wmlP{o~|}!ap+lGybC+ z^wk7X-%|O4X%-F+dQCw$Ca~`Y8cTwRYGpS>$8sB^lDN`fY!0w8E>v?3eJxiW%azA+ zy?iMPty6`Dye9M`6B@>Z{*Z;}>JLdv{^ezQ_Cqw_yvwv}HqH-y=)`PdZc^&OXBmS(? zByPM+D|5&P{H`nX-HC|xyepJWB;m0`ufVKrH*nX1g*nEbi>#Oh#_e$2WW`AwE`H*B zm9ubpy$d4Nrw=C*U&C(~u>Ck)LOmvtPq{(#t4U-ZU;YQpm`nnz{Z)rU%PtG>2&9K5 zkuPvS5G?xo zJ7@Z+FkP?HX~)Jxao3aUAE!u~aB%{2;%lP>NZ!9l3#Jm^jJ~qjNilAS z&g4H>?xYrMLH!eRG(RblJr}UB<|ox)GJO>pUxsf{arV4M4vhZZ6Xppn3YEep`pSi6 z(zeq`0KfkU9Wo77{LOiqJB=7Uw#d4a#v+!Tv+taS(^mfGS(-KjgKfd@bm0sVXj#YH zV4GkT*f_cK{4=mKA;1nUGvEp?R#LGu@81WvIKP^0@z)=6xY}=PvU$Vxw%)Qe-=J8W zXCIQs;S|`7dsMg=5AH$i*woM;hc?Er*^oCJ;Yt;)$y>cki^8e^r#7{(agHO35$ zv79lqo-dBl7)6ZnmX@Qd#<0}z0Vh=B#AzHq#?Xcr(NALxV2qwx4xPqW#2CXg#_jV? z-+yNelg9X2X0)~SaW&;z;(4VKr)rT(1#qywCcQyZlg1cT-p$t-QyHVTmLpSR>|%^6 zANp&I$BZ!$IV?&~NYXgXSQn|vV$>K}jL}ysq`JoVnlY;C@3CC}tjbUQQ`H5HQN12= zbd^*oHwJiM^W3Z;HyT(|}7$Y=ByvDfB7;zdSKx4dQj3ABSqA@J4xVwFA^tn?N z>f;bsYqBYX5QF@)a*JZEr@6{`OW}N2#Ys>(vlPxinZqKmhm|x&vq5Py7RDjL;Iz;< zqh%Wy!^jxv8l$_$7{wTw8Y8~S>iGaw3%-RAtUmvXvw{4|a#fAA5^Bw&jOT74;8aEQ zmQvX!+cHnKI>?Xj3O-uMLRl619@ZxDs}*dj6AJ)#Tfs7bi79;@((Hn4Kc$2Q4N*c> zEzd>?qDNnc8Y4zybim!|!5$jJM`KK7jChUV zuxQmo8Ka8n zajM4ogmLO=Sw_eV%*e~m07HCNfRB1`JHx6}nU$4G>|v!Vey028kYuhUt(-%;)(S%v zn5-`HTRTPNp8A^h{&TTA;D7j$?wCis_+uyN@p-tE@TL0sBsn4LZ`e`$1k_rc*J^t4 z@kzE=crIT28H)w&)i<_aNW<$_))(TA9(2Zh@&o_4J8fenm-*k;(0U)^6@t%4!qwnfHt*}1eZp~D(IBw`jGI$#T63XA$u{!OGqo1 zE=3izQxDRV8%z5xB@xZuI;e;j7oZQfSHdH9QH&BlmuayWISe;@TAQJsp(MWcJY;zN zu9+UB&zF*e9jR>Bo3DsmJHU!57Ttbeja_|3p(xT>x&1mWG?5#kEQkD3)Z2!K3K1d+@ z!=CI>v<}_}&(3|X6m>XZn7njM;KoCE{h77K@|$DOZk*T9SD#6QS5Ifi!d(zq^9A!$Wu%=)mRmzupp< z0Cn1%9yj+?(2o;v;r`Zbczk#l{cH{C)UuZc4k=#Nnr&Q1#~V>k zJ?W{9q<(F0tW9$GX$L**t&7SdT#aI|{pCh-i{rO$r8hR=j6zRiHj_s5 z+I0)*&GoRa-9lP&oCiJkB~DI~>C-RCGu&0)-AelKU$3yIe1)toTO=J_w2i!U`O=}I z_qW0(cl*_^+3AQtqrN2$7q^!x{=PkTJHyA&Q#;6WZagjej`Zb!&A0pS#H7t{UqIXL zBE9MiT>w|hBT|h)u?2-AXQX>=d`>^#MPh@~wpU0+m9kpFFq!y^9>>Jv&-e1;cR(pRR8~i|r&a>A+JR;(RYWbhfc4f_Z1;qnz-pUB<@B$b2dr9#{nat{MA6j& zmtc%O!+!bzY0dH1XVIZYh(8TGNS^R#KT;Ac4mxc|F8pgO|7(q+(o0^ofRB8V zBZPW++xHwJ^<6A;uET=zh}rU`Dzi!7gCVSwJj4(xBkwqo7-yFl;6&IG3AN^{_nM^Y zS?|nM-6^x!nJd(ZjCUdrxNEe}Nu2Kuv43=utm66aCQ$!VaQ50&nsf?P_}3L$d5T0v zG`|A%N0gwxTv;KdSc@{+TV0zKReGlyd!kD9wILy1TS>e9Og6`cGZBwHg+==#j~7)A zJX}=XR9~KjUPU}?L-0By+M{v2>~=296)RTQLr;@%j$b#P_Bcc0>U_HV^`?vW2|iwK zUw(#Ux>@?2eKTHx4EaaA;+;s0)6Xy`qQ&cG#+&iF^oy!Ti&upc(c*Q8`-SFRBq`Cp zSSpTuG9$e0k7VQ{%a@r8Ts+}O(bF>bVYqb%6k$;z3{7Qc>642jjB8I9T_SHL zh0xQNh#$?qMCx%h8QzY+f0mxPMB@01XX)ZgqyY`NjEdNQmZn|?xaq8Y@?{*(^4=qq z8tQmjsiDrNS7rtmlrar3({27_wcG~1A$>F-%KYxmoZ_eL-_8Hbg++7-5 zL2~HCatxb}FG*J5jBgR8>odaB zy(T%I>SsBhLf@keUXkJa+R-%k6=|N34hGsmQj9|5)pS&{k`5Jm;D2uNX9pIj)qWgN ze3j`HYrp%7OmpFPr`X4GhI=kCn_4r&J>9)G%hvgNo!X>4)IW{~7I@Yuqh7E?rDyzG z_PKh)F?W7_6zyKe5XEokMQ7A81oFAP=-N64AM923)G;*0diZ7?L%U|_pF&srj=H0) zhTl^YKNbD(^wP-3@!`EM1CGjO@R5$Xn@GgZ`_w+5t|67 zmHApTM|!LR6vx5|r7Wu0>^D$07|0_L;Ak^OY~h-E7GS$W}+p{^OO zg-5!%PJ}7_bV*V4VSu4uo##*>%hrS}d+-XS>GcgCR{w=DAQ&YswV$hR@Z)MdU4nAi zput87KmFW>D+3LY9UphBkkAn_dps<9H2fC?Psj{yx)9}l_r9RJET^Ni^i4S(uchzH z>7=GKGtjV$-?z{n8f5U~xz6?$AqaMdZC%(7vo&^v?ep1A*~MuZvy@OEyeP{SDtw~?WvYm`r_yt75QULmr6GEdfj1;;G=5Pj%poqT>IiulkT+1PN0=f>E* zni|HsG+vqH%)8uQ&O0Yp&iggUzVnK0)r`paG?0E7Yl!CR+AqW!2Jw;CS~)Y8_{tg2 zV7&#WwqRIM3l;kXBV#fh+T37n^cQYzQ~IY_&!zNBvp!Afn__+0&y*i^18^U6#4A(Y zT|xY|p8a%l!;Wfuk_|?)<>@JhI-=BcOHl5vL-POcU(5fF@n7-8HHqW{K1-7Rr!UL@I^U^pI`XmS zai9zEWt4qilTG_a;f(R7ve$%P&-jmLmWK`-VF;jyzB3T}pzjQiJuJd|h{QWOT>|KI zti97ea7aIa&d+p6*}(UL)}5@=P0n&iqd@0Ra7Y_L&wuEUmV;jox*b%^M%1tt6>}U? z8&F}ALz)j74k|)^%nam1dKGA!Dabby5zoNU`QBWIbRLW?vrrXaoX>MemqCR&4#_wg zhRtjJ!KF=YI1Kl#;A??BmaKgtZ80q6S6pU5us(c(*f%-0XNUKa5@A(frp@D)}R1T(`OFpCFt?BP>iu*%Q|=r zI%WeDV|3ZF$sskL3B{WoQqV_G{3CFr6Ml6_<)G((Lw?}m?+(d?a#o#lNaI;~=MfR; zNw|OrTOe>A4+ofnix5P{y`V!tw_HMjpsOy!QO7H2f+eg<*K| zH5dT7F*h8NFASV~%OQ;czUsC^+5#$ya1glf9Yh4_@Fxz*(gsId$2m!w47$Ztk_tih zf}RHzLH`C#uY><~n*%v-v{2CVpzA>eA4xg@x~je;33E}WjU=frs1PPeV?Yx?XM_4i zOVSz8|w_8SU>E8J_p%w$XQdg+RVWH9FTo3=LNYQfI{#HHV9Zr8Y&>`j1tY0AT zz+VXF3TUkWs7^J~VF5xHesUwaH9&~t3hjRe2yeObJ_2nUDMa%735`7it8ycS8r&|L zA1U;}zwZATDRjoa;uNA4tEwU@%fkQ`yoD zO(CIExxSZ2NloJ=Ek7hcrM&bh_rC4}tX^LO+NVTJSZ7GTPKF9Qg2Y zIvpI36&mvieQC8gl)Z`$j1$86paFDN99+6e5621qsIx!ggsytiD2G&?*76b>(LT*k zA=Ae=D}=6VCM0po>7UJnKGl0Ra!3~&!sVUy=sLaNLwh$zEgYb8n!~HQ>GbF3(EJ;9 zkB85r2GL>hD0eyiI$mhUj~YVn#|xdfmo&Zw0x!Hxr?!BwNcXi6I_rZn98wJ$%nQD> zeoG-50q@pQXstJ4SS?{@`O-fd2p)7-OCiYXGNxd?8UtU@aTCy>XlYBqpWFp&d`(*O zxWR`8CkWAc&xHGF~Gu@I=QvKBe!$nSXU|c%HT_1HG|y+i9(P* zd5c5ZKvP`c%&SBp+H2!C|IA;Qt<$ZiDXmZ|yXeqXsFhVm98xR#h(%>!B-H%Y3hpmB z`Oj<%a?p{fTWi6;!y2%1-Iy;*i#Y(6fbqJGYZZ0|SaXck&M+i{bpu!ou%5h*Ru2<` z=)BfKeSI4Y*l*F;HbO8x*;;78r~gUsv=-W(2D9#DV4u*epD{-#CJ8=hd%co`#!=a&Rc5dT&L-sXzyvir8*{>ejRWRS zd%Z%(K9nTH^KJg7&ys}R+=+`hD}u)2n28K?UK1I-tl`za6IKF@ z|KXv#&;N(l>6YkAcqASg@*wm?(69B-l?z+&yuMnkuRL@+s`b|C@~YR~?V(#y{Wjp2 zHT-|{(0$?_dcs3@$la*Z{pIdg>7kR{Loa*iHrDjm?xEXRQ@_STx3^{u>Y*#FTeB`2 zV$C`$P+-l_Wgfa^9!jAJ7b)3JTT`a3thb=Ds1LGOX<5G3k&&V4_f)LFp(eMgHEss!?^YI7aN+}c3Nta zHH}yx`0&HK(5VZAb5PYQAF3W=L5%6)Dvel()ONaWq3{PXE?I=s85+D8>UClElZ%BG zoS!aEvDi7C?Loh(&DTV)_ge}bxpc%*6fi!9PF;!+49}-qmtu7HJV_rT4W+@$ga?=z zYApv@Pq`JaZ87y*A-u)Mgwm-ikjFEUZe79hgwe-H!@A&=XyVDQ=({V0Vf^^6_LD1x z3=X+tR|$XA^K76PgviMsHw;|05yJ6(X~=5f93OO@R<0Hnas_tlCjx$E<|-Y#2BYgk zx|)%z_7iJ_K#n_OFZ~P?C7s+vlj4T>FX71>CAErrbth5+BroPTeR>M4m?*<=}qw1vJG((%PF?{miAGZ$bfQ z=r5a~F*%X)n`Mm`He>mLNxX8i@BuS&ix6FJd%WUXgxWlYy`?mdo;~SzTVyM*Z4n;u zo&ogim#FjPCA7;{7WGzis!Wg3ty_V|ETbV`q3q!^>35*@g5nKwsNPXSwquNd+;v7f)<}ZKi`e&8y-ygJ;Ekr{&Ej=6wqFFwCOW+hh6w1E_s3C zFV08g&nPUWSzX37QVhmXq5PSy;z6I${dR delta 19318 zcmb_^2V4}__wUZY-W>}?1zZHAxJZ+xqM)K+iHk0Z*szz_MX`WgS49k)l}LEcMKtsixVsMH8_cA96&+IF9L3Av`B9w2l>cfp>(|Qo)sJx3dOAr?_z1sK zEXzOt2v0dBSb-Q zAK~lo%6bw$!cznpH-3Z{%683~l#dwsUb2CqAK`^nvZ65`;To+hKj|YpUokx6BYa0g zSw3IFYiTt0Hqq>^6XqVlJ-kjd#yqEt-4XqIv+T97-8loQ9hq_2*+cp4p-jMR7OunA z>r5iMsT%&i&Ll8RA?yYg*9XJI8%#XA^T}sF-eB;{;@5%y@rGkFjtS-OK5$I+XDvrc=o5XPNP zo2tF+$T&mf4DCkUnoAmu(V~a(bF{6Y-c0RJ%xpL|Q~QQp^gA4!rA=jqg41lRKNA6w zv$bCA!b`kqwzh)NJ)-rjdKfxS>jA^(YTvRKFY*KCXHO1WTAc&CY6Jgfg*Jy}%a+32RoZ>b0%*Qk8_X<$L95Z-wUAB8 z75I*l-Tb-LTEU6kJ0A{g)h=d|Az_;~it&cY+q8$6A$-JlS|^6-18ugW@JJX=$z(ov zyH?LKwfPM{Xm2xUc)?HF9s$fl{^EJ?#yq zdnZ~t8caEbE?@kv)o7CPI_Zs;MTYY@fTQ1-Ea`?=qh+9hD{VL`!o_!yG^~x*J%;^w zC7x)VX)w7gTCIgt57H>9yl-J~d7T8ku_Pgw3K~lm8P?Ma{1~mXU5ysGo-i1V8m+&q zr#KpqGFnauf14{sqovrY>1dq*1s=q|t6)4J${$~s?0l3>wmeG;DKT0X8FH~?3b|uE zTF#o{%3ZQIVmZljBkAZ9)?~S3j6O-xg-bD>UZ?8p@@kjG z*Bmq&Ye&PR>~d$9&)Pa_GzZSj#i;-1s7rQK02XPJ2;2U^rm!40q9A9q&TzH2tq=+8 z^rU`b6lxYW#-iVOa||ibZ;X~dO_s7GOQCQxzEZ@bO|sk;j`gk-lPuUWCK|#X!+i9} z7+03%S=x3KywQ^;?x$ExrP_f|7Hjo^mY$@!8T;2F!z!$iY@N#`x1Q^onVOt8+BMk{ zM59bGSu+i3G&!!=vPs!C*C~D`3wnb<^aY`LTWVQHExRUJy1SaJ*kO}%-ej6kl*x*9 z*;gx&k#9^SA3e?q^OR~D0 zaAc$uCR#fC8Lfxpz>q&9$zn?iIZ=!~zUb(TI-P@)N;=142pVHZNoJIBf3@6i&BnMA zm+aOUyD)=lmP}1(Ks?QUNk?sLl4VFRextCpccZ~rBZfM|q~tFtWm-V@S$AF*Mnd#U+PjVdqUZU?a}KL6sxAEc%Ag zXo}0H3!_?~d2D7JZ3#w;U^ZsI!$i_Hb)~6Hz*H8Y5z5EfCMsky_!)DqV>H?Cn5+gA zjB?pm1GCw16#b};X`~IGNeA(t!ughR{;+ySgI;PCnnId7I-y9dCVWpcW(~%#H6HkW zyZkY^{9KkKo;6vbsOzrAxMxxi&(sPowCf=@1H-|hMk%I9d7$ZCG^kP(1py5|?ul2% z1w~iU(tyN)iGvacCk`2O{D>=-h0kJWM>%24i}@{`(U_&kO_oZNa1$l%^Z=;-8LYq;_hG?Ig@?jxp!OA3;p;M?V zGM8*O_W;eH`#FlW9BQp|>8D1E`FQErQtT_oldSDbagVa}rTwu`+PgHg^b=_|kQ*Vh zqp%DeDvhU19H+-io6$cJ^iK%>ksAwpq#w2fw$4ZtPFVCbOIwOP_c+biRCJ$aCq<}1 z+aE`LG&+$V&{JN2g5_3?puN-D+L(PDoBnnt8SvP<1vG6)wlK@#T0>ImkxOH@woQtw za#>`;u|>OdW^?${mz<8?){OGnCB@m^^Ohs_xE%_btOf)1lB0$=T5V=C_`{F1=)A)Z zvulb*!VP3;Yf6|TZ(-iapxZEy;t^p*dU8_jB>secy;+|oaPNPw(c^4`jm5-Tt zX|&E$GVv32=wF#w3p)cy6dM)+_XEgcwox>UZ$#!WIUqD57IwgIFx5cf*r;gu$w2mo z&TJy-Hd<~86EOs#pA_J6VOUey@dFYq5fXoeyg(9LXKMuYtpsyG7yG{j9xexxBV+h* zs%9!CptU-Zjtfi}RZ=2^AWgD5Vxl?(3$-zIG&OFLrQ1TfL`#HMr$y1lH$kJs#~JfJ z(aXK0KPty04tU%n9I6|WBt1c(^uckHbvIqx=40;$Br_UQ zo_7B*MstA2iMLW{iDof3$QoB(K2kEaEB&PRaFqy;F41*CNB#fzs{1D>b z=z1_lW_2?db1uxQC9hP14jU$!_E}u)gju0*DTD;-s`_J}9UQzelmyfY4OOz}9}3~2 zB!M+|fs9b{mL(xDCXB>-H^HoQ%Ny}playVgM^$u2b(Qsf35UW+9P^fc6GoOZ%ssG% zlXlDtI1^5y*_6g$3nw$NGtY=1J{~PYD#h-3&3Rm89E~|oGdZK>nXnPQk09@u6Rf|C92(*WvheR9kO~0d7Q+QKp%7&}a7m>g+XT%1AKAox^ePypa@va5bnBdy2fZ zROSU-dFX6^mP%M>OM@3Dg(L7q6q(Bo3j(KT5{|>Gbu?LmBjIE;38*<-GI3kz(wKi0 zO%|}sTbS0Iw5sckI+C(WFazj2CReb+vF79!@v4n}@vtR^1hZ|u;Cu|pX5$({?-pdf z!`p)*Y>Od{@qXi63|Yj~fL<-g0;U8=EXijs1H_U*<~$rm613e@+Q^ycBs0%hZatIb zo^YYz2MyTC3u?rX@ES+DqEUaW0cHaUXD4~X#5nRx(C+$jCcGM8A6e;53&(|TePr;B zL`#HuJfyTDo$6e{>K_i-N3f+8InwEo{=W^`Xb-BSMCgZ99kOk_WD6Z7OSgr!5-kx7 z`(S%(vYHLiL#H<6DEmo$;NnRW-DkLj3ys{Nbv#LC9)KmDyiID+4zVlss4FLM_Qm$f z!i_^~N(1QRwidkgo{YS19)mRrdNwbp+cy=HG!O;26*c1`ON0iSjtY0Nb^)PZ#N85_=MZVogAqal+a zI7vL0wY~V=;G@MA5!z`Y_;n;BYPyQJDJ$!HN_YlebR^NA~XB>EalIBLb@`{OK4yuOPTwy-AE4Vg_<<= z*e8Wscolu2^fc`TsoP*q645hPVMP*Y!EAtElSmX(2-Qg>y6I0k+AVGi9#}#6vNkOo z7gjsTV3|Zqgqd4Kag=?u!h8d1!lu-c*PhixU3&^{;ht<}Em_3^N!4xPnnX*4VOylN zr)~-N?~Z(hz?DE!(B%sH@t=#2zm{q%5t5LqJ-G>u61{9LnYt}3k!XqFwpm(y;O7`z zKq4IF(H)|bMmO^a9wy`D`wr@MBcEZ~zV1e5GiSh~J1*U0Ho~ax#19WFKI=}}FyBE@ zcTCVpc-@^uhwfwKTr_mV1kq@z=?zNODkNGWEZ885`S3iM__J#?P_G-V*tk zJH8Q2??IATZoOoLUCcsmchZ_oB2e6&M6zQTc!TCb`-l~&`nFJj_JlYzM(rd@@!u9! zD|zr=N9_~_5Pyf(whDLVYuTR%y%WIU0 zeiMBG1)9d;@c$9=dy|>EGqHF*_#L?RAtM^c(j3u#XNsdU_^E0P1&hISzaA^E|9h(| zAfdOmzRMcQ%sXTVraP6vl~CA+^uk=b_ay;1tH<^wZEIXcXCF_-+bMY73-91tRe^hN z5>;n^1yzeTpLmCo)j&wH!C!qzD|SL9_@@y)TdxXQr;(6k;avqrX`|~XZVcM~dt-10 znWfiopAF5>|KCufaFtrSh_RNsW6J5iHBE49ChuF9z|J({;gIuAa^Bs;D2zu1vdr&I zWtk)qUZ;^n9l-_^>b!&aek9TFY^59}JsJ=;ytUT}QtGUOZ~NhDaubUBksXWxS^dc; zY&3zx{mGNM9UkILQDI(WaK(F8+-UT4gI@-ae)_AgD{zR2(w;5Raue@j=pA|8chF=Y z(VNpS7Cg^c0UWzRa1GI{O6U-na0^U3y>pRQQ#peJek6U3anT445ILN3e2c* z5Y6B+1vZ*%+Lgyq@!1LuQ_rcmc)dDR{1`WjBRKD{zRaWetT>6u7PmFQ;&v0{2tlsT2-S;Pxs!9$~XkOTibb z_(&@L>V+KsX%!wy;hPHV>}+?u4~0)CFsH(uDg3hnH&fx36keskpE~2sz1*ISDZW6# zSF7Tl6dtF*=Tx{Zg?lP6=VEuIHig?La9b6wLE%6J&QxJ7h0V?izFoyDq7Z*uCWn7t zg`ZRSjskn)EGRoJQ23MrcUR#@6yBx44^{XEh1V!>H-ycymXj1;q~I-8@d64@P+*rj zcAGy@xQ_y7sPK9Uw^QIS6<$K&U z!oMo;cPiYD!h02XfeMFF_*(@YM`1HPo{I^f_-6|Kr7B*Z!jlwutqQwQxSs-NsxV98 z_6j^mh2KOX9HziyRJe@7ZqLnfoWoW8A;qho$?!lGzCz)L3fxD9Pg3}t0uNK+0t)X} z;4Uis1BEvza5D)u)9^b{&p%i2d8&9Eg{La;Xcd+kZlD6^s&E#S?xetzRroN4BNX_5 zRCpJK>p!b7OOyW+6}M4bqlka4!Y?TN_#Zhd*(%(I4xtMQ+*9qgkre(#fxlH@KMHSB zU`|$I#_(M!o~Pg@RXI!HX$t(_!z${^5Cx7@rKNFbRA9Xd-=ose3Y@0GS1H^8VKZI( zQdRsniaRLc?_H6G=aZ*$`0rCL4bMLm*twp4c-5Q;JLtcs5_Dv*cLA#b|m?ZsU;Ujy#Q_$Y0pl1 z3PVN{L&Jf8SK!TGku|skH#XGgXY~A3e-M{>4ZX7&MW(ZzpMq{Q2@5}nvUD40{AkDW z0(<4-4X^On)>4A^dGrKo$P-CDlLV7T5pU)OY#2>aS`TI|pU#?WL88CuhomMb!x^o?QV z7&6|cRG=OmGL+NP8G7{fejDZY4>TN02C&VZ!R)bQc+DZDD2m(h&X3^HSQ5`}djyH& z3EmEmBMtHPq5n9%lkcQp-;N_4m{Ld>PduP}9O=yD(d$8MXgY~!&VrbZcfkw5G=Z4d<`3ZO33y9~H!2fIcLt!vL`1vXheZ<+t%mbh zWOjg02Ieo1jk7vl35MSCy^Mu8yq)@bYb;(Vc#UuoE?52 zo=n0UzwOXyGU>$Zfw7bEY_ZuLSTh;xwt4^$CX+DM=>gQ4f~T6tq5l-JrI}kbJ;AS4 z&<7j#Aa2$11|dGVA@-_^f6*M)DoFZ(VWo_N&}=G+^x9QLD{}v;xQ6m4{{?fW5+Cy{ zMWFu&3Vx#kaj5xmtbpxGpG%#XABL^OJod-?0Nez3RB8?Spq7I#)^eb=s;VR*Uqxu( z2SQz`(5(k3bVU*Z&rH&iopTdXGqJBa+=Mfkcu)xNCX<+&e0~Fi!<& z7iO^>d>o)PzLuICdq~3ujHVy~W0AUypu_l#dCLt{DLr8CIvOuZ`(hOjsY+}YYp=gB z(J}3**M-8W>#`Fx{X#9tSM8HmR5?B-!>4#Cxy)0NoOm4)W)uCWOOo61J8EEd%CFGc z@%!j9se-t=b9yHugp@8G8l=vxzs`>J{9WjZGYJ zZRH$da0$NpfwSYU@XhAnu_``_u)cS&RC-+8tDLGS9G z2@f$6JY(sI_uzecrWh?3P1cyy-CG$q?B9vjyi{C;yu_`B0;pD}ipo}P$;h{2aQARhFQH`F-M%?NnLml|S1C`N< zGKQ!%&ZrFY63PiwIlENOYRXV&7h$!^xI!5zYK{3S!v$NTV6e&Sk!zyR@zpHFg8D*5wOEqFu8R2wHy>GuMDkGUP-e<6%%CJ&KS4kD#mf`?VIR%u{ zLp2hlGTu^#r^;|v8Re7_r!rpSIKdui?TL?eVm!R09=QQ~u!Pk{te1@CDcHE4t_Uua z!I^eJH^lbI*hm>Gu5|4XRb1J>5IL+jqrpRwu6F4eh<$<>P7p2MOV3fHC&|)2f54Xu zh+CUAa#5oSX47X3itH7cdH<5^@K^cBLgl_blJZ~S?gD&CV*-}Y{LiuwI+nfay@nrT z=Cb$93L6`T;_3xRlWaf*D_A%QG*S~_6swq#^hg8aV-+eR+180nU%6UJ~9!a^R z@m!{i0jjRED#H_>s}>AX8GBX6aLQ<^GS;e$Ih5h2GO|^Mc@gEjpMAzjoVK(bPNKNd zl0Tsk=D~|WR-DBWelT?St?_P$~aCL-Bkvtj7rLgQ5o~hDyKd^ zRxRkHaz?6*j+F7fOPi2E*Y3aVyO4pD|EXHBiCS?hWwcWn^;E`L%8BwaQ&N-`Ec45ySktBOlwra-QZ~UX|sC)q@|qpM$u?B!S^!)?(7N&Q@#!qs4)~ zAA|2CxN5)PFE7TM12+Cwh|VSM>?fzen2V1eRzXfKNr)|bg?=nMg=#JKCumw>${D&@ zcq{ZjhnofUWgNO;NV4EtTJMDD6sXQ6huJCJ;gE&gV0(Q7Uw@9T3XDAre|%0_u!#&f zSxJETuLshuD#`kV5=VRNzxbqk7!nL;Fxrv!X@@_S8z9TY$#N~I9KN!pT_MY1LNa6l zy)57^Is8bNd(=kHu7#{aHhhnxFZ^UBu?|~KRKU!)+JNB1lNBPqt@T!j#S7)3{{H`X zsPA z9>Y4GfN@`te^|F-=)aVt)%;rW2w!f%XY5OHe;;@h9xo+JS+^pXl}`@VUNw|HnW77k zMk7Byn^Xi7za)D@4jhphRC;v-Z*+v!a%{o|>IB|?R!9^dFo9_qX~2?WFlHHP<#4*V z0#0-%5zJ$_w2Xu_yImlQcRq@P_}Fn;I4+!)y%GWx9oq-vQ#`fPKz<;JsjI^ZA3j&E ze*~s~MG89AFQk5b{4U6_!&GyLU_q)rj!Qo#o6D5U-WGn9Xo=8k5X@Rmp0jn2^XV(d ze1^&ApROb|7$S)2hz4(%BxJ80xr6h+glvVc zzah<-XK?l#;_3O?mC+1;SsfgB=E?A~xxte&4Rr_}g#=db`bWL7IUww5z{kS5A z_gu#x-$*7f_&P)MCei~>2C_GiKsNmw*uIH$WNh$k6Y*pXN1^6sY^U>(yqWmaU5T5M zEOvH*3*Wl9JjBr;Y~bf?CJz}l^m`bw70)PgV8>Qch6e~6w~-)ryd#|1MpBt`e8cZZ zONLnreYWG#Ng-ryCvWhvWcv4{CmX(!Kl(jZbqLll;PNA>atIMMF!6izDUToe6P|c5 zx$xu9#O7eBQt+qyc2Zb?Y+%$JXIRxw_(f+4)REWiubvhbqC%W)+d{Tw|6qw~Y!ce<-W`<9Laz1k}InKHk zTNDLD@p*fib{}-!PvYX+ywPBmp9TjmGNfQG{~lhJ6+C&5As??DG21>e|9vaqyTQnk z8Y1xx9NtfUVGXbNSqJd^h3&JL|KS&WcJJo&oVLo~`<QB zHrzZ$ykKhqdC8_OlneY3EVN%B7fK4rc1-z{qht(QcfO3hJxU(1bGm74c(P0oE3if1D2DL(5LlPid~>3jj`#pjw01RfxmyR&yEJ zA~)YgbHH^sKknvf5?x?X@=Bkx6zW z17D81cp6_ey1_GN$SRg?Iu+LZhQSWI3x|KhHvIH9%sNX#L-yQ4^~dC-&VdRs(Na9B zy~VLvaivE`LkhO3b{R?t!vKCeOSVRDTZtkr`-+MWhMp|0?02-dJVHBT3XUq`VjYO@ zEnOp?N$kM0UHSMreH z%GX{y5@nBbs~u7EHFEUF`5IuCQS&v^j;Q%+hp$39TqlW!>wr9xjGV^yC zOB`G=kmA>+PPgy@J3=951;UwW{6(00odhw*z~u(eiT-7yah?F&Kaym5PkHPeNWMo7_ zw$fUL_Smq^SRs-Ev+xBW1I{$syW_BuEn^P8*jSVBcesHEbsyxZ)fYKem`Q zQgLu#ae8rec}el>$-lG3b2GGMh$D5fQ2xce!GR~!i;iZMpF+&NcM-*KUOW8+i$0^I zB2I&6%8RQfzogXzrxfu^{w4_yZ1Z0H*{7tCal3UG6;=+uKVEj)HOXSm@Bwe|O6Sz%XrKK{?5S#+5ipdc)2N z;tfwLNr`{qPPBV;*5Ff^Ykb8W*F>$)Cz7`~D5nek#=#%&@I~YeFsBL^!ttE|sz{<+ z{tnsO52nE!cv6My&NFbXCX;=uf5tT3n;V?w{?>lwQ)a&kEQN2Y$xyc8n3XotJoYN$ z*bm}&$TUVNhZoA_z})WizdsiF7Annt60=yH;l7hkw~^TnY)pIpF{69x5Y@O1_1o2H zo)Fy$C)O_l&eqd~v;L`2T~FuBYEq%0o31|I zth8{`Mc{^bq?@i?v#V=RY5Pu^uPA)1ljbih#OT)Y zSKM?R8RjT>d!U{R(A7idhoy86U8~@gfTBm|WsKfK|7Kjm)gd^;sSwXshVT3<_;#7R z2$%uRdg%Pw2>@jtx}j{X)zGuP?vt?lRIcer+zQz-M->X!wix;M8*x8eRcK z^>w3|U3{2c*Mngv1;ac~T~jvQL}T^QnpvTX2`+i+zG*Q&6w~s}^`}J(dpo8_I2PR- zq=}in60z5>e?$Cw(ZZ3A10xI+4>u!DFXC6i)&{yL*BmSle~}!VdWNDtE8$TCUHh6- z6{*0_`DR|ac!v1_CVT5Vm=%!gt=rIfcMvLHas44p)@RF5!9^U256g>xn|#@^xOnnQ z2V}k}8s1x@{q8G;`=-3;*dWJIjz!0k9jBoyy*2Sa!XzJE?|RcwM3VhNxp3bMSABF- zYxSUI6bv8p72l_!&XcK=g|?(ggU%A3T4I3tzPiv(Q$DE>aUf)Le_mWR^moi2ml62W z7if6b1Wt29Du=7(zoc@ETK-2W$NvP+e06)+xTXANKbHwx6_0vJlg`tU;{Pr;2-z+2WD(7P`Yi+MM1dmZM*pzuv#bBIP6r;i){#daPZZ_AM8h;wa;6ZN8f=D&Z<|2xKi#SeU>a^a`2X;= z*mbm1E|w}Z8)<5}O`L|`qxijq-$(eZU11Zw@Y@l;7W|IFFLoUb+|ASZ@?xHDawBur zd{O+reeWy~#sAZn*8h-i*H?x0==Bq%wHL~iUVk8)^e^L-@v*WGgg>nKuWMQd1`pGD z!;xJ&!uQ{$d*Ncv9cL4JjnruNBb|f$@2eAR;;%@#44asVcq~%485&LbB%3$_DL2I? zZbF(i)g~@S{tTo$kdDd3tc^mNWfR*V&7EcwbCGUCDxkbD2kT*3KNtBkvEDqJSTqXH z&tvD?#H+{%UT71SAR{f?Cf-1ryT~RQMx$dnHn9nwQC?ka6Vs6f=i0&}W(4)@N1ACyfwXHj@eoyj)QA;h|Fnq%kp^E! zgGlu^Fj86(sc9irylE5HB0YHvGlg{9ZOjzT#N~I;0hAN&+eAG&SpJty9D}(2kxkr& zbc}!jBEIS|W(3RIUf4u)8$9PSGNL#GX|SUx79ovAdKKvyq_2=(t&9JAmxFR1qPPTU z8q)PhbCDiGs`n8^ZZWoLV^QpdG#BX@r2COBLb@tK6wf0y#)zUf&hBYQqmbsth-T4* z7yFY$aTU_?9-{aFDVHjWhw@#z2sAQ_#`Rv5YhU>6ocOqL_{sGp35-e5AQZms5dx=m^qP zIMiaTSdpJltb3cvG=Y(N&X-+M0~YJKhK&z7*+gFreiEHsv)O*m9A296+p(ucQ;57r zl-C(-Cv+ZgN6$s!KfTxSf6KuhE~B;;Wr(h zFQ$3g#K-XaV_jpY*MRfa4)n2!ky4o-(tsPuXw!XdVjfKX9R;3wasEsK)bU2uMU7yv zH`j$N4}|UBTvMhaf76>A;>51gLEBI+6yJ%7cH-P&UMT0xG>5!Ut~=uhH$%D3xbY4Q z_%g0|r(Jp$&2 za~&8B91Z8%F_VCa;JVk^(ZD8N!FU|KeBtM7Is=#^Bu6(ypo}ZrjlddcH4^ny`M|VD zRNDlKBe~9OkuNw$p|w|jFgS|)gk9nfTcWsB2V;Lc^s2*kf`n+!G4x50O*~1Hh97zd zdt*E{nD0aq)~B+-%16 ztaU!nCjQw7Rqt*9>$F^bNNtX7&<1jvV^GWc!rA7ixfeLaV8{>p!QdFQ>jpo@aP8O! z1K^(+?h|GX#I(S?=MINiEl_w29B9FH)~+9k>lOI3oF05ya*>$t2CPx-}Q?! zH5xxiY=y1T90s+*R?#1`iLIcFW~E;!s=3q(!>^6g@rP!YWZ^J|8m&364&KO`UxUUX zcbG%S%Dsr|lH+^qJYsvUyjPKctOtngKrDsTK&>Fo4?b(n`DhQ}v^@l(+i?DHx;593 zy?O)wZq2pFwh3>8Jv#v=x51VhgOhO{_@BdZdLKnSf51t(zidwGplfl>El2EYn0yvj z=(u>UKK8vH@myH=E1b9Ok4quGqp%x1>v4`@ud)AHjf)bp{a0%2ze=P03XR783p92)zw@#_`)|+Ke{076 z+cI`Jz>++fBJez#(Gi-N~iNzd?^QZH1e)XImEqiQV%=Y29smnU!sP|lk zIy5kB85(#V3A2`A3hpd{?aOd-&p!n(u#8Imzv7958QH++g;37yk50ZWM#HqgQcP8sz)P4q@h`PYs;7 z>S79?_k@7e++}wCU8r2meZeH~majSdNtdB8b}i1XlVCL^L-|u{IbViJ;U9m4>m*yK zhgR#*XabB~$1TOMp>iGf4@y5?&qcE5zk~)GFmF*%wgDa2K#h&qE*k#SMh>5=xB|0@ zo6fFp53@FL)38q2CMmfKHlwB;Ay9V}Uju-rd5#p5m zGwnV6fuA`YgOMHG!EL2Z?!n_dims; tensor.type = cur->type; tensor.ne.resize(n_dims); - memcpy(tensor.ne.data(), &cur->ne[0], sizeof(tensor.ne[0]) * n_dims); + for (uint32_t j = 0; j < n_dims; ++j) { + tensor.ne[j] = cur->ne[j]; + } + if (n_dims < 1 || n_dims > 2) { throw std::runtime_error(format("llama.cpp: tensor '%s' should not be %u-dimensional", name, n_dims)); } From 4c0f64e30271a21f1d779ba0506a58d27c9858b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 18:07:41 +0300 Subject: [PATCH 17/87] rm binary commited by mistake --- gguf-llama-simple | Bin 607488 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 gguf-llama-simple diff --git a/gguf-llama-simple b/gguf-llama-simple deleted file mode 100644 index bfa61de57dbc77cfb6c180b20a23a46dfebaf99f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 607488 zcmeFa3tUxI7C(N#i{h($eHE3}t*Edk_=L>5$is8>qNHS)7AS&H2m}{J^MPF`Pq#Z! z*9gNZOC(84#rxRv9KHCBhP2;6UNbEf@1qiJH}DqQazoQ>D=#jxpdZhu^(1l7eX%!l z{@D4Usa=(qmh!7yCRESO>UCpk$2Iz~v)@4(rhns}OZP?ogSXF^77pY$XsIUq>v12K zm|EaT6192KRP}jga_Q*93(`_9T}isCuwc&UkyjPYy`r$7sJ!Beiulo2j2?MqS@D&_ znLSYX#5-Z~4XgmNFI5ye{U_vzht<1sg|s3}swC_&{C^z(x97e;-&%FfxBj;euH5y= zj@JR1{_HzVl2p-z@FYQu-QguB{2j1^?)ZJc2ZWT&iBfdK?(nftOn2o>hkUM`)P3p5Bis_Djt%-czHF<6V457PnENEAEa$9K9-qCOt z-Ien?468f5-lUvaCiTDGr2J&)x4ZJsGqImVCUXA7g#Vxk?l6)63nq9L`e}FdEHPHEFNSqDO17@IPQ8w=l?neY6Tk7ciGGKg^t-Jl@~J_;?ymm%CiY-4kz0pJJ#R7bxBE@%Q)?oJ zSQC7y3H}=J?%MULiTock>34M|<64->bBCMQ|7j-p7?X0YGLf6zg#Skqyv@WP4lu!! zOv+gey?5t7KQ!S#&!qfUOzi3x6T2FO$!&Mza=Z!u6qEWKGx0wMOzfe`q?{Ed&%MiJ z-g2Eue~dD*hdWK`d9z79=i<5D>1BjTJ)bfu=Qfk^{U#HA9W&AId8m&a|BXM_o7Cr8 z6FCnt;eXGhKI2X5<1>+eACvNzn)tCZg{Om}|iTN8d1->p2yn&9u6 z$g{)bx#LWpTV!H~-Q31 zuXTH+#GPN7J9}e7R@i7i+bENdD02)Jnmt9;!D@RmvNB?`Ql^d0nmTO+ zM1NDp^l9#d+=`ss5_dtdDkRf(o76rbD=lkwSy{onBFJspNGkS3(mPQpC6MD>lEQpd z3|z}`qwolBFB~Ar$Ceir++Lmw#g{rKPs?Id&2Soon2|xXI0aK<$vkPYz z<>byuhjij~mLl{*rkiQ8uyM5sr=$x!5cMWe8@n{R-$Lhmjkokaz1)uoAcVf3#iEr^SSfJ$>q3TNl!rp%uY zNzen}ZRh2>5xLDZm#n!X9FsNGye9Bv(?(@wkoVwjEn~XK9F!u41rsT%0|n}tks;pu zo4Nw2qG&dgCKM7kJc|q*qgY-+AsU@Brl2gV7><+r3+vr%pkShjgBdakcfe)~F{)6f zLOUQ#8r^2kDOeCkgG*UKR!$+jc$Rxn33ar*V$D~~!N6RUl{4F&lh4Ww4n<{)FksK; z9*JBd`9XulhWeX1#iefWiGnyq-H$HKEh|_IwbS@Gw=kW%hW{rt&)OU}T9jkX>@v3| zYwk%)G0M)!cf$=218E$g^R*YGS1 zC?->`X$8zTWKUwm<7!( z_(VNrDz%b%snXn_p9+qz#pUj-;=HWV*+uhmv-0NC__~-I?r&`vJ#>CKn-dCy1twQA zDPhhccWznwqy?JL_0oTnwRST<7aO1s*8jmMI?_0ZY7=?#`6gpcO3>8=r}VsrOyHPW z3vOJPGtJF~a@b@By``I}6y%Q!37QKmES`tKC%6D%h3C=yEa;Z7WT+@`OQ;fBD&^6tLYmJWv7oG|1dCS^zI!e#RLY||5hqnL5B0#{#U5CW zCu&7wQC>>zq8gCh6%|W)3rh>I+(tcT&jsV$1qHZ~H6shvq6G?y=1=0i9O8z27Gr44 zs-Pu}1h&$`g896j^NSbc>OwEeEzFbV6tPJ;(4%8Lk{MlH|W9or5TAnl^ zDJg3NbUwl59G{dm{L0}8!R(k|_R3Lue#Dg{c|J1}caFSrxO%fn#;JE{WPB*|=nyh4 zE`&^wk}@+B$2+s)t{fp`wTEjD483XiaIp-YM?D~lKrQ$!g21VkJOLRy}{oTcl1ZvLpqIJ zjgYKJsci9oIPU4ou2Tv8$-HDMJR88*;Vhj3{y zep4EWe-@P1m(>{|E@=qR)7e#OAGItT*QpMgl!zjnFp?Gmiueh`za+%kw0K_n#fuQh z93sskfL-q)9YoBHNC*Ba_x}-(J6)O!fUvt6ZIg;sy8NsHL~?sei|HbM!tktYwftDn z?lbB7VCk+9ev8207x(b88<3WCK8z*|k{(d`=P`Pq^rTAXGkSnjtI`V?-A{U3rDrp` zuk^l3E6azWygt$wD*fTQ+Ykwil-gCgd^_zp_L2^%^zH0^NDVU?H=|3 zB7i-l^VJ|@806ZZwioQu!%RL_=?azqjDiA4^E4@5rB|`*{iX3L&E*%a%R@WQgom)7 z4)wfPlzPrd!)r77b-Mv?Q}GxBUaOWP8}L{r!GM>j{PsL9 zc}@O~!MvOp170$O*Fg8^?{PhL%uS`2vYYaDMi z;P%HkzSDr$48q0$e*6YJdkg39FyO6QIo@f&JJjb&^FsCBHiVaNGvI4lc{x!AT>6aT zb^~tP#_<>f?l_Z|A8)|zyE(tZfVcWNE*o%%`rLE_ZX3$W&o|Kbt>*R;BTsUhXHR?@lFHYq~cORsGV$8@kj&yo{HNH z_;wYKGT{GIak~NkOvPgi_?IdkYruD_c)S5`S8;~{?@)2sfFD-zbOU}&#WM|fpPRW} zvJH5Yisu{f!75&2z|T?f3Il$bidP!&5h}jYfM28HH3odVimx`{sVct4fKOEMwFZ2O ziq{(O8&$l{fX`C#1_M4<#aj$`k&3q(@P#VA(|}j2c$)#gN5%aHe6@;q81TQUc&7oc zRdMOoQ2XDY;*kb?lZx96_u^D2K)yVcNp+PDlQxF zh%D7^4fyFQo@u}bs(7{mKUc-`4fs_mUSfh*81NA)f29E*qv9(Kc#?|O7;vYGuQuS< ztN0oNK32uo8t_>vUTeVTsd$|McdK}V0k2f?76X32inki@M^t>L0e@b_+YI=dD(*Mn zEh^q&z&};-P6NJ6#ihbf``@SHkp}#*irWmhl+Eob%7FJ(ak~LOUBzPz`1vXxYrrp8 z@puD%wTe3oc&dua27HE!ryKBG70)!_Wh$O+z!OzG-+(Vs@e%{>QSk}`?o;tf1OAAL zuQcF)Rq+}F{52qvCA_++N1@+iAd~)c4UMwUm`ANwsP@ zHUr+O;&uaGtCpYc4z)v#Zcv{~t9g1iOaD>JG2&Xg37?Mt6ceK#t-eOwFO<($h@3x0 zC||_Wi+Mc>+E2cKpQ(XJO9cE00k05ndQVOJsTA-S4Me(9 zz>@^LM!+Wv_-X;q67V$wzD&T^3V4-(*9!O_1iVhbYXm%7)jR1$#Pdz?2BDk>gmPL0 z{6PV474U}ye5ZgvEZ}VdzFNTj0{)1AcL?~S0-h<pvv|j&QX8Qz76u(yISd3b@v%>EcQOS3}(Fo*Drc&$+G^@WFcNlC(y^hY0vu z0Y6>9YX$ra0k0GAC;@K}aM6#o2>4k7f2)9>E#Nx^Tze;)F1HEzIRd|5z|R%%4gsgL z=h{!FfZH{Y(%XY}d%l223it&AZWHhe1w2Z?qXpb9;1>yajDTM(;IRUJiGaro_%H!? z2>7J}E(`c&0-i45mkW5NfL|ft*#ds0faeSNRRUfj;IRT;A>eTWUMb+i1$?D|j}Y)0 z0cRVL>b2DZK1$$UBjBS2e64_w5%5|8j~DPd0Z$O{1_8fXz*_{I&gN@Btpa|H1|r=l z;Nt|mO~9`eaKC_GC*U0d?hx=!0Z$ZgsWfQ+;{`lYz>@{sCg3Ro9wp$Z0&W-Z2?8D? z;Ie?n3b<3i;{|-8fI9?yl7Pzs?h^2H0Z$X~OaY%F;MoG6F5vkBe!YN~2>4V1uMqHQ z0$wTL83Mji!0DX^?Wab-Z`44fs|8%$dSqAE2)MfS$ndoSev?p6t$@!I@HzpXCEyJL zezSnL2>2}m-YVeP0=`qgXA5|nfX@+dzkufmc!z+`74S|0&lPZ~ENK6E0v;*g^90-` z;Q0a`CEx`DZWr)d1w2N;ZxirX0WTErcmba;;0^&V5^!0-iv>Jgz)J)?Q^0Q*@N5Au z74UokFB9+*0e1^{g@Bg}c%^_Z5b%`(zEHqx1iV7PR}1(e0be8Fiv@hGfZrkDwF16G z!0QBjxqvqac%^{12>1#CZx!%61$?J~djz~qz^et^FW`3xc!z-BE#RF3zEZ#?bw1w< zm?Gej0)CHx+XUPz;86m8uYlVH+$Z2M0)C%>#|rrU0v<2ms|4I3;C~eG|7HItf&WS1 ze-ikg1pX(1|4HEgzY_S#dhSu#^FyTUjdI zndqKGS2Fr}qG|dSC}H$dMAM=qkj>~viSA8wI-?&Tx)0F~M&CnpU!r3feJ9cVh_*9& zG12{rwlTVtXbMdRBu3v#w3XK6)-_3LOS)j6Og#g$x4{qrWGbLWO~jU#b1OiMA8n z#^}$8KA-4TM(-f{0-_rjy^ZJ#iLPbzMxrSM7+AyT*NMJ}=o&`9O!UP>S2Fr}qAwx3 zgwanC9Yb_BqaP)D7}4pBet_spiFPph9-=QJI+oFQ5`8(*c1ABI`U;|Lj4mabLU{p+ z(YF$P7115PF#Quvp}RmEqi-TQj_6iKPbHc{b%6#(Pb8W`bb(q%k0*L0(Q6oeHPIB3 z3)C=rIMEb}3sf@tQlcpY7bs!$1w>QmEs)LVvxugUTOgg$1Bs?kTfo8SK15T9EfCA- zaH1)+7O*q==sBP%q!zF-`T)@sN()Gg{+?(Gp#?gAX8I@EL3A6VKO;Jk=vGGWAbLE} z4UFDKbQ00EjNV8zh0FqL82viYDMZ&W`emY1iLPYy^F&V|x`feB5iJv)&FDvob`qV= z=m&_NNVJ2|_Ygga=vYSINwkY-JEIp9okp~c(WOLBCR$?jtwc{Dy5lI*KhYHG3bZl$ zCZew=x|PvWiKft2pn=g7iKdWNpqA0&iOwK;4Wq9nnnGBC8b%K%dOFdSjJ}j;3S|XK z7<~cJ6v7H*Gx{u|DRdP`XY@d#DP$FJFuD)X6siiuGCG`S3Q+~@j6OOPG=-)DHbx&H znnF?miP7H^okeuV5vG5lvx#nF^k+oRCc2f;JBXe`bOWQe5uHPHEu%LQO`)g28b-fP zbS}{~jDDHuJfbTZ{XEh0h%RCDQ$*7M6v$@uqeK@FozCb7h`yC*2cz#H`Zl6t8GR?w zg+$vKy_o3vMB5l$N;HLv0urNdCAygCj!ve3qA4^KXk+wEMBh$yE2F0pO`)Jb1EVJr zO(CE_Eu+U1?IwB+qpv2KLOy{SMh_=?0nwF=zLaPR@dQd3eF4!F+6iPc`YfU;q!UPI z^gyC1loN0;x)0G5!U@DOI-F<<-3086K6*Cjr9|5peSqj?L`#hRo@ff`1Ue2g{S#eD zbQ_~TBYFkVt&HA5^qoXEFnSx&9-?a*y^-iDqSr9`b)u_@uHpQrM@o4$T^H>LNpYS1 zOp|@*)Z-3W`O4o0|15jQ4Lbv*Z^)et{c>jDIhbvp^*nlnBoTj1`C!Ih%lVI>VDL}X z_!o)%G#`rF>r}SiA}hOP&z_F-43{q=?_WJ}OaC7~r(|3U%Iz9biee7dfE-2z`64c$ zB%#wi1XHQW^axC)dUv&1&#R<*Y*JJ3A32NoM;)MK*fCk@kQ?`1CpUKXki)jf+mE>i zqkt>a0wTNG&=hz}?S86X<+xXHuT*~74YFt4qeGCDm2cerAc|$c{q-2O{d1wJEfGG@ zVK?L6koyA*aZ#<$4T(1_n}2LHK-y1L#91EYS!(B6oX`Q;UxsjQiavl4Gkyd^Q~=1rbo z!{)w+Mlg9T!Rn}ME zj2?u-{j--@*mKuVMfc!CRI-;BFMF$^+W^W+a{u^@AAbOXx_a7Ejjei>ki zZt7g<NX)hIo;nsXb6RHbXr1(p}17dccey=mAYG zWqoud9xwZrMCZ$Rpz>KgiOxUc@}82*3()1A8(kuMlcOuB=IJiwM}OM12r1d;_{F8{ zat%8oHwMCG?~t}Zk+`8xVnWN(3sBD=_po|?jl`vVfU0IfN=z$(N~X}N=#}ibYtAxJ}u!p_oXi1s1Y%}P{7aL5%gfoOBN7^QBM>&<{%W~*<^7gd|loK z8J&R&Kt}$n;2>N|VRUBuX(8=gSGl&l|A)!65(d5*xAS`O}Mx+fQ#0u za>!85`9k(t9!DXN?llho8E_=B(g>Ij5*_RU6Z%{Ayk(K>&!hr>&oo{N0 zE3Bg|A+36k`x;axIeP9ypXFJq4r`4QolI7mnvU9VrG2QZY!A7ywArb=i+jsA$L*cs z8@V7=S@Mds`N_otl3%fmZ%%%NwM(x4eA#I-X=NYm&OaR^TUzyAcO*%4>%Y6YCi+~j zNRxb)xw3CoHCnXS%dX0$j{97xjXs$o%T| zLjc~0CvDv7>QP_nCF#zDX7_*;WlWlHo<*+STYg<&0X#f?IT$Ty_75OFWmB>e3)69W zHe2Wk>2h_KJHr1@ShP!-i!zk2aI3ZI8X6lQj5l#1a1|RUA}ZJeBk2>xyvLwc+jjMo z{9{Ob$}jZHcLL#HCNWOk%RNvfxcw=9ixjulf9_(7lvdqkt@;V6JqD!+(4Fmv~}z+a%(`~Si^(2xGV z|AZ&=l0GNt$^OR((b%t2*%yMHo~hwysCX9eyeqY#n2lp>xXGB7E-ODb_o9B}|Nf^4 zNlCIab;d+2|Ej0d)Hxt2Qm%s#+45UflA@YAZrloZlD+9?Tf}X|^YehD*yf~oNvga~ z_9c~o$sso<$<)<|F}*n{o32ql-{mWFG$&QWuxl|9W7TIYk?Y=j_ulIqfMvyVFS|O( zyNXggAFyl9=GW8t^-TRb;dH%{UoYX;EA;Dx)Acp{dJVt6TE9*>+J9pM z)aI$>R~yu;xNRpPil>ENXjLz4VK^1r$*=qQ^$z_y;b>hXJTMlvS#rBGtJm@a}ThSDvbD~PGtLoaL74+Y9-mV-IYCiooL`vzHt3nOZT|Mm{* zs)zks@iglD*Q4*(ko`}C@yp(~Sxwb?!;?k$9k>A{uokNAWq1q~_+h_=-%BmiM9l8};YU_x3CAlbVfaTJPv>~Hj=#zASdKe%{27khIBwVR zdpS-9i04W=?&i4I3D|RIah&xZAL})|KFPq*Q`2Sd9n3|aq0UcT-s}7^bWOp4)m7es z>kipFO_m#X_lP^>PuttoMJBVkv0^|^cFnV?L{9kGEzcyK1orNO2f~l;gq)r&Hb~Ds z$mzY7@~(u#?soqcu(j_|Wek~GtJh+T3%r0|tej2ydP38m%eT-bdzag2I5}G=pXytI z(t`CJi|R(I)qUcJkm`=bq$-wb+lSZoI^1Y@p3;eRN3VI25{7@vicK}-3pam#d%uRT z&0jA%r$K5uvaDf~@cil>)<2NYl~&JDoZ*h!8hDhbM$fJ`*@tD2XLmzNT!YKk{~vgf z80&(~Pc0o`=|#U?y=+atq_#BqZ`?6w$I@XB3|_%V2$zu-g)`YHnbj+nUEF!&9(FNW zVI=Ct$bOG(esalx!Kw5+In_?d+*Ai8uGDO&r@`S$*jQdICp5ZaFzgLOO?3GtTVJzG z!R*Cmxf4?%%OB*fMmeFS+>ePgmN5<~ZY#Ac(t0oRg+95{scdyN26{NV8dH>(L{D2U z*>fbGHT)4Y-09iX8MjrC4^{z|`!;{Q^c>4%S`j4QXD1?gvW-&?PT4t?&8b+Ts_v^q zc^q(XV2uu>bD%*7vZbbD#Z~vU=|BlY-{^398au7k6&Uu?usV!_{K2Nz2ON>ku9oul zL~2P(kHm!5rI#tFy;%RFjDHc;AN-ae>ykYQ+pWGXJQ3KiZ7$4z4{w*l8lm!D?hCa+ zn2GjoZabTwUNpp#Dml9vM4OIA8(kQ6Y0BpyH`k!M`RoJcIRs^c-_A(+y_?q46>0P(r7l#QkC6|CO|8UipNVU>ZB|BjNo zTE>x4T0NO4fJxn@9C9f~{VUK?EV>7I`=5a)bTumdtw=pbB1>QQ_K!q%#gQ%)wlEh@ zU2S}-X3wF|yHFJN4#w?mKRqbFt|o8)ekd{Fy|QP7wk0dO5*0rrI;&lf zDAiPK;Z@Kf+V)1Gw;G?0k$iGkqT-`mqEc8bwLje)@mw}2{It)+d}+aStT67x2OCiR zn=k1l`HOKsg{`i|vPo9H+d#W0va-?t8d){)bL(RJNYV&PwDnb2ba-CxMg*OF<6a5J zt#gS55t@6yL%L!!S&k6>fgxVjgA6ee{Ijr**)R-6vna%I%z;nQMBH#RT(jKHHfFiK zaUU#q9+LWPSd&2-MNp}K#y5*VLUH z5@vf?>l}3vpKn|RE<)Ho#^Z0>P2<2J_Z2X&a{)1a?}8|&_jIRsa%7q}6_ft2Q0>=< z(YwF~=JGF53|}vfN@d0HDd;VM2f&n9BYP)AVf~G&;Fq0##gNO={Ryr!lrhC@BK{cE zUZyoW+Tj~@+ZU*!RsiE7e#^(W)IpbWM*a~dA@@EYH-2w-_lnygA3lU&z>dR*FrLfS z2^eo0tg@%AgUd(G*}_WmB8)f0%FDKFL;h)2SigH%VcS??J*lw$n^4w%++0O=DzW*` zLhyPq`+n+^0|nO9}ky}eN4jlt*Dy$fe1Tc7TArF*)pWoOpihX6UiGOL`^j2rIa z6`nyJcsM#B4=BcdiE8_XBqD)bzKB&`eTj|P^pgJlB|SR2tPZR7L%milChGi`#pvdMZB~4}Jx~gGQ943^yyr}TnFgamLEX{XsbjS(Y7xt3J-YD}5QGIOk*i>7&9l0pv zqRLN4&W@bD93g2~*Vxn;wb;<{#q)zqYZfgAJMsXYSe*W(uIrO16Tug~Of#o2Dbn zlcQr@zAGAL+Hltu^)m;8jkXBTe-o{*H8&k8Y5@13olTv?W+5_sbimn7yJtjA)>m1;XYA_}KKuX6>g zJPd>W7gzxaf?l>Oge+#{xOjNd*JCna$%p)Jeig!z297NeH{CQ?3d?Jbm_upUmWW#@ z-4cx6a!Gi$ zJv@!Ql&xFBQMZ1m@-CDxD=gCK zwWfLJMY?>qTb-V75wvbYAlYS2*Y{?Jou#~_G1+DCz2Pc)uBWHB;xwtp3Q$Cjp_XGBzBzKPN6d4L#A-bGRy z=KuWjuCBNSJT}q#>J*zx8G&G4q;*x2ZRMl^D<=(XI)Zr6uLF_>(XSB*@};vnp$hi3 z*7EUi7vJBAWv#Lk!ywlD+J2Kd`+fB&Ir?}!ZT(Jq=#VBP@V!oab*esg0Br%lL)YkB zf8bny=3HZmYdE;#&>0u{!(hw)AO6Jx3c7!!SdQ+evQQOxu^3^zMFpKe{Ri6we>R5k zE&m!eZ0SM`X(!<*8J902L!6o8#x_Kvm`424Nki?`V~%Z*64Md9!%3b;(w0Qb2a*DPOsPK$2o1G zI41Ew%;^k`zMs?2XtbBp2Q|8y)5)~Hq5GF}dX+{m;`APkF5~pbo+|%*PFHF4JWhY4 z(X%-n-Am=4$!WJn-@xh38a;*6gCkXbC#UCXbP}i6YxFoyTWEDb^&i9O42_QC^fMZL z8K)0w^hKOb?xWs+9;a7n^qHLAqtUcw3i}$_SLN@|=_-xx#p#bUI*ilN{Z#&+;aG6L zTciKY>CGDbBc})VSNXr?^jwYpg463Y`Xf$TPE+~c;dF*ZZ{+kd8ePxngBo4S>13;V z|6e)1N~53P^d60Vh|?o!(}ms7jXF6g@J;LHNFu*QlZjsO~u0+&z@|D^zZgs={ZkOK61$2tq; zdQ&cfaDexZU`ciHa<@!;MHlgeK zQ*IsQPNUqjl%s90z@I2b&OdNJ<=_mZz}=J^NV%ny8$`J>$_=Jm0p*5JE{k%fQ|<=J zu``U5D2D-53M5kQOv;U++*y>nf^uh5?n259rQDg6JBM;s%AHHO2+EyDxu5Z?&Q7`g zlslht-%;)Y%6&n(3n}*@<)SIKm2wwR?rq9lOu1JmcM0WQpj-^)o}?TtQv(lEj@H5f zALTBioQHCk<5&BVx+^Ftue*|xqPnXnnO7G}Np@WvB{S-VQ!=$~1SON|MpBYoH;R&R zb)zX6T{ni3tLow@iK$DVaInCFpxuTY{O{Y|K-kJ z6733A-eHsIyv7}o{&hRMx-bB>qz{&6Qb;TrH+ZMm27hP8*sHOSsltVM6PVS6eZ+cRPM`po+3gaKG%>Eoee<-!meD}YGx zy6o*`SdT7@!ag!W?p1f9Js6w6f1ifv{vJ$c+b^n#+u=O?lQUtX(>h_Jwz6ak&D0hd zD`MQ}PV+2_lFIjEb%Hhcxf8v~XT?nRW}GEs8@_d-bzSmVHugJ;{kD(y4nE6?1#N3u z&d~d@_;mIDj255ee~jCac=)IGRkG3|))&F@_P#>5lip!V+2eE;bgx`KK*C>A0mFoN;$4Uu#<;vU#v=OXN@%s-#a-mMPY_wc4CNdg z&cL)5HBz?o2}i>Y>_NPO?hZfm5#~;ly_TKO2c|K3kkRCv!H=@~@1XiytNuZZk2nt> zMe7<8Jx9W=K7!gSDc-Nvv3eGwM_3O=(4R}BMY@aDlC#3?u7nS*chivS@-B}|^9=@i zlijHdJ;SLapAk9UXBpdcZy7};T-M~yH02Bzf{>(e zIcyUeTt3j{dyr<|r~t_#&6>PX8PS|XUQ9|>CPgCl9WkpJ8?8y?h!6^2o@ArPP$rgs zrPHqx{PNWB>z=hpUDyOv$yPP(P*aJXhS=R7$dM!~??Vi>(kPeD67C$CE3Sc(jJP&RhQ>KP`zq5C4wN@K6FRKbhuI=8;`VoXN)x^PcFM}6SlOF|VAhah z1m%n*JJTaR@+0!$V`{|5x~>t^b&5VDMX^nIWbDYuT@walPR{nSXe#auYn7D=F@hm} zi1Q7BQXHM2Zg)U8*?T_#o~&$3WFsNeH^6%5bFBGOv8DE*?D-lJNebJ%i9u3?bDVaSG>!XA=e^-s~duzU;mcAz*zGU78u@E+!D4C z>!j5@GTVkkcL{%cpDjOv{uaT-!8kjxi^Wo5(Egb?r#2(F|H1Y<7z=hahz~A-4BtP3 z4%_J)8oK`u;dkK}8r=Ra`lILUrekz+h25md7yL223MilPyKwRc#;^m=A4r0IF5{;gW_1&%wG~v)& zS&lNXV4OTKapmLz*!;C&dl6f}sWAwg;!MwEl+vtxsf9*qW_0{{N7NiqNzaji*1Ky_ zS0@hic$eE@ic6xS*l!eu_T#Kl8cw#wr{Q#A3)RIpq!~sh=OA_&w!Lc?j`#dwCho_P zv_DKk+H~~NElOvyvfs1oykzBoXLlRSGpfbuTbLc!)qcLgzsNXg7Pc=f>_=>aTdM}) zekdo=bIe{kijCK7)AX)2JZa!b8zuejg0q0%-dO%GCpMNaq^~FY!)dhvmb+nqX$f1( z545kL+Pww!QSE5SMzzyoLt6ZY_K=k$tghGL0G8)lM8lRYYDiWNd%o+H!u(KYnr}*W zO5Bd)>hJ8@m=+d5{hbNhtX0$S1XMn3seHoFt&z8*@@K zn3GDL!JJg`4CbUPGn}4H*eL$8{9khQ4)?=2m+Dl0!q#u;QfL=fnc^WAl1G6D4tXQJut`@Y|VSRO< z?CHcV+LGu! zWqEx@^4d!B`p~I-NV{pjRbC^Hm)9MT0m+MI`JuK=jb5_F>01u#XlNf)L#{9EM`jNV z)~Y1%r^Iayv1bfQ_rabifjv_KdtSEOavwIwVb9yhp114voTQAr4}1G*3GbI5Y=24~ zc1<+SM(%}S!6zg~N6`rWi)aso+Fi zye;&4yMg}PK)<8YPUQ>!h&n(N46|cEiFEat3wM}+J+ta;SfA{j1wUNx`&2m1rgjqpOsPzbq&u6N4Y5toB2i#aa5^$GsEEDvw}vx$u>-y*@d^27m*D}RB* zgrA>;5yL+kh4KD34G!DJqS_Wju1?VjCr3v@k??%*@o|TAC)|MpP1aW@4Me04Epf)sLpp9H)0B}a9R|0Xskz;=^zrJX{uZ+(^0$;Q`*hvK?99~ok;3S6C^AM~HE;gO+!re>NyUUUC8%23BH{1Q-G&m3bM;wnvpoIUjfBo-| zM`*DZ0w5#yAQf7S`2$AdHs)g+G#`6j$oMJv+m`>x-@Z@&c0UcVZ2o}ZGQ{7G{7wFL zL(_5o_60zqzx|Ve_8RCr4D@XVdZte6{#FL`yZvns6ml|uJ8!qU?{`Shf z@VB3FfBSh0{Oy*_n!ok-L_|RLO|~gpFhB<0!coQ${i$a`nRoLt@5#5~2O%QRM!pU! zD%#7(`iiZs)UEj34dez-bt^vZ5EgL$^HC{Ul!e4!(y_tq>BJQHY3AVjS0SJ%=U7(Y zxKkGfo#n9>YxPTX$$K}eW6t}qPs>W|Y~xzEwR%IyH5}fPX}|nhI><2oS7kJEKSEV9J!fcoAHQN+r@ks)OT@5@sL z%ly)D+EWHo1-ua#ex+&ed@^s}kl{$6z3&5P`2~n2YuY>drN-F#CS&Z)8MEI8<2l5r zYHz#F_yK2ZLxK_q5@Qq?nf6}Lxt`-(uXC<@Ao%*;#6{Yx*10^KYZd3JBCgYji?o-e zbKT6jia6JD;u-`lXzxSxHvfhO-ra-qJ6(T&Ba!<0zxc}W`g%do=aA;ALp;pCNbS603kWV%`^;N^;*|Dp`Pp{2%_f zPkl1lt?BMso$(sZn8q2OfeP!dBSuwsLv+SLoH3d+PAA5zN#4}+e*BmA+`X@}=N?0X z=e`fI*I!0lq`R#;S2O3@$+=pHE0(xOcTekFPjar8IoBrQ8U-%s?u<>)UHIG3U5&0U z+9H6yWc>XnV}ILZ5SQ53ScKAe6hxQ2v7hmtd3l5Ey^nj_k5^g-=vrfCVImd0nx|N< z=a-?iPf!h-JOnQ*miuwl8?o+lO@4o?XY$*C1oB&ew>azR+Z3AK?$Q~nIO9W{@ds$7 zK8F}pz1^%c&g6`Robd@_yoF>P^r z{+ZT>01Z*TAxDv*4Sk7g2DqTNtVZZ<;#<(0?l0B8qVHE2>2%NX^bzF|-U%5PbRdd9(i{{@ldBrsc#&xs-gm=qS9CUhXGE9a_hZqOxDlCW z*xQWAycoTPBkQAUdFIXN24oPa#$psDBBomv-9`uqyk%uWHkOeHe(+jTsLcV$rR?x8 z$KXRpD^_f#D$YbB)$@!$c}LSr4ZcY>mzM?O{d;hNjlBZ)AhBZda*%YXG(#(AvSn`) z-oi_zxNN@Cy??%drd8k>im+zk$`945Y9-;fA4rz!I) zoXVdoSq(b%HM_e1#?ONuFTZEl&3J`Qlizx>KPJCV5haJ5PAI>`=2TWYXLD)=9t?AO zf>%Hns~Z1|+uGhwly7i7o-TXGv!i_0YT6QJ@uNdd1&>}3F9!i(yz-Yqy0Z+8!* zm)5b>bcB`-Ljw1JUFo1eKCL6H)r&zpz02$_?=(A(qPu$2xp#I~C)=RncV*CBohgt)&WF82PfB>-T1`92u7ppl)eXovUI|M}*lDd^kGxtdJd)Rl ztYZ@n1fx>0A}Z5Swby-aqIYC;q8IzEc)Oe1Q5SJO!Z@4K@NR8In)=eXQ+X_UCGO*q zAZEboi}lG$1KYVI(NujyJ2~EnxBsd6%D4W(eC7K{;444CW?TJd+>4EXU-F+@hYgs$ z0T=6`5i9lE?&7yS%x`-LBV>IWzwMVdaNBpUk@KrWeIzm!Dex*xl=sK(?F(NQ@GQ1r zBAEtuSQe-#P}dRVxx_j!2+e7UoX1Bp!;%QRG`rggh zXTZ}=%~*Os6i{q=93r8qaeIRgEWa;k2XEjM8SH>rmwJQy1Kkpw2(iJRFgv)PapD!~ zZj3!1^9IlflN%7j&-YIM?f(%THVuaKZAzYh8n+DURLO?dB~yArq;TEoJ};1`AY z#v{Tv-A3-k3D;s>w-I|ccBGAIP-YCh&T}8Vv`OMm@m`Duwf?=QG-<4*&05_I9m7}9 z#v^vhhTOQ0ZHGS<+zxkJ>EOTtX9A8Y1|9)7-)5v&gecZR281VVVebr)yFx+O3nlX@ zq@t_$SgT(~#xoXYU_YQ6u!66|`|0TZRcN{3b1+EY$T0?Lo4@TVQf+h4?{vJZ+0hVu zHGtdE1l8wEP+iUh)#FT%QBLKz5rPMAfRbY!vvPde8loa=ecHJHj8PFytWEZ4cpI9CL*57t0@LgT|^!`$!@Ja zN^{grW^>WGIIrXGZ9K6_s?hMg}mV+bIDn!m%KRX-OB zL=`5#0@WnE#GDuPTW!3$OZR6d(8vF0=0h@$SclAqGGWBJ?ib^W4yUYpI&JvhnGgNr zGtD}v%oACM!Z=UWI=Y(=t$SB9mA^gDOl1QSn9AD}ufbcKRPFEIZ_||MU(1wu1PPRQ7=uOq>s%YRpsV|xe>*{cKZW()nXGoS z-it&#%-4JSKVj|t&(?eE+0DXwZ!?(~n{x)RM{p|ZJ*wSF*L$bX-eIgN)ZX4FY;V}_ zY47HbNmxRAKYC7U?~#3{)?TXJf1|w#tSZ!AIumWK-vNy8KU~j$^iM;3_mI&t{q{b^ z_EPPBLwm8W&(6pk<1q|+e~0bbV`|1W?)7c^1XeX_Kb-_WLHik>q5U+c(I<3x|1xM_ zo*(SN=i^oMh!v0Hg*5ugTC#UpWTK}ld2j=&>OkN&8K-0WZ>7Fk(cNu+19eI_l#CT`LEll%EgnxQ&t>B-u`q;zDbErO^WiespGuW!NyaHYTVHbqvG+nZnvS=kh znJCIo7*0JLM{};5KVWjw-XZ}Wg z@i<}5sp-o_`f??FTt3*9@D=H+?PU5o17~pgOvZK+eeL~#IDe1625r>zHDC?X*9AzR zuWz4%zTW2gx*HzDU-}H`E9h@ct*ui-`6P^apI7#T z?HA^s!Sy6tzN=fiCBdx++WiPFx~8%k(uO-;usdNmGKO)U>LecF$oqoxBdp))Y{2E1 z8tXGeT8MvyBV>3qc8H7%aTY8%=T&XifA%WK%{Sz7B$)N@q4S?BPG`Y_`ya!sf5~cE zUI(9NHXojg=Y{CQ{YOn7k0AViGJViYlsoc zi#W^f^_u9~9$=!|j|8H7`>zn4x+ORLPY_+=!}Psfyl<%2Ro9RhTf0c1VYg_;CSd!DK)WHd56n_=V6miy0hu z>gYS-yN&U|c#FH;eHB+#w4Mm|C-&whePV9xcx*Ucf`vn{gLx;~u`h2&z((%fp&i~q zzy@#rQV?Zupc8eXfDOL-Mxl`aRnc397bce>cGe;H?&L9)jf8{7jW7r>HUMB74`Wl~ zt#^M#RLI$yEkMwr!NLPyEC5D9Ioe>Sw?(27y{VB1uHfLulvcK%ftX;w# zW7&Md9zU9Ko~Xyi=qsX|H&Kk~@L?8Yaqu9^EiY>}IID));Cv*o!3ht-2LFZb+kP32 z)r{uMaXRx=oY}#dgqa9KiK>okImmN&?0R^I2BvKOwd8*Qdpl;)WihN0vYR~(aRL?*Hsh)=pUVn(BN?Y=~Yc;O6Kk)vK1YEoC z2iIXP>m-{=^5-|GVGc{axpgocH3f|2NMgt$vG1I=ENAmh6VDt>2@V{HdNtq7SZ| z^gPmK{9)`ol6r&5d89DLdGhl}iR&~4UFTy8N=E_(6(T-TKLF2QYCK!#I+Jr<%DFO$ zYaqDb+wOY^YFvCjMnL^M@9#U0gxEfvL4wm`=f#wdDaA3Qsr174bQ}Gm7r>{-(802M zFJnIOC+n-p(V1+RXI`khFLPPesxFB4r7-jwOPbP*GdC3Z^^SmVs%l`b4!fsn!%Q;{ z&A~AR4|je-mdh0S3j+3hn89g8WcX=BWE|`?A~O6mBKvzT?Wf1>C7-`gEsxIfyog$` zgF>}Z<^|py-f8tAc5+4A~;e%PE);I z=Q@{jUB$V6B`$RYUxm)=Uw$8({D|`(?R-r%J#wbpBc_+)aO6mU2 zUv=J^FB9&@0-DEEFPL}xc->odiVZ@?f2964r1gmIsDBd{IWZl{griZ ze;-?`htU2;3wd#VtOtK`rF{V38tTPt(7i7>`3)Xv^k>#Hb4&C6T|56u9!qIqOgx$!qGMvXy`2QOFif)Y^>Gb{Rt6K(fjryEu1f!3Bf7*#ZG zkL0)G>x8IBxZ(ElMwb2fu;THdCk&3Tgfb~9SJOL44PX1Aa@_>@ErcJKC&T$ z|Hy;&@biqP5Kr%o>SHDr?7frpP|vm4D4;`mXF&qy5!aU2xIWAdhKu%1^}#y?!Nr8; zLp-dIEiUa3i;Q~k9#yGFA}w&0@3?cwleiwl1$s`^|8P9lGdKLL=0qnZ8aVcx`8 z?*Kd;lgSGh$MtyZqIhs^#21|&WY1$?bmB)q@$IN)Jnt~t$c{;a)TXP6IMzOcj)8Wj zCH!oy{s=6x_j)>k`k5$A{VsC2bk=d5&h#>Ni0fn)wqt56n#Vba;|%ctBWh65IwyGf&L18Oe1}?q~ZH7)vq#;>0^g0VH))D z3?U)Nm6|@z5cM&Q`A|H+64zZhL%BXeecDFoV>hIy`m~Lj`yzeJBz?4|;frq7 zZwK`OpEeWvU_It|eQeit^0KZI`kPexN1fC@!xcmOHvR*(2E}|03)Oo}GGdrsFA1G5bhjz9C&C7_1BN!1_2%G23*m7S8o4=c*yD;aoA-!gKkDRkOkW zc>e|c>=5;ny-d>&KQOP3aWggjwBfod=Nhh`qroxmurpy3^s`UZ50TdDHc-63+(G); znU?UC)`?huIqFKd1Nx~aw&V4)!-e+>tm}^8(oy`8QET;U;Nx9~KL4ew2{~8*_itQR zG(Y#Jt~2Or44jIts|xH0oLpDotc;WCD)~uGSL2s6T}?v*U6rAi)Te2>I#=f!%DJxK zToI^N{ba7I2kwThmU_s$3G+9De|s1DB1cA13_ABFt1p82EcnPy13pXGK(7!%JlxA) z`*(vJCX-8KBXSwG1?WY>i8#-RVBUoEp%5Gm%?MR?^RnN;ziA7l30Nonxsud@V@5`2 zKWdF8juB)`z9GqEGk7 z4IKodZ!EGCVKFSjMKIYY0vq?IV^Q%Ce>#bJ3ll*Jb_PfUw_w8ajT3L9q$$Pm5QhW* z@j4tA3H>?1NXbMhU%qG}m3Vy!WUnlp`)5roLzgnKT!92)`I%xks#qRCH}x-FVbFhw zd}6TbPQ$vI+kh8;Vp3*NM#h+C6mfc z*c~9Ls~@qTdShFFO|m{T)YA!KA6;N@|8QdYBst-L zwdzvJC;VuwnuttF!Uxu>TN$#?T6H%=-p8k2kx5Q?FB$H(pS9{uP)_9_=2@(FIw+P8 zf5hJ9OT%BcC9|W5P`rP`b0py=C;rH>x-@?ex`l}OpG*3YV)?K#yl$9+*Tm?xR3!!P zi6!H;R3*hh8AO9p@QIaC-;*Cve&qMPhWiq6(Uz`e3CQ1%oz_8!TxR)4@ z5hI_5Ma1Zg7jwojobf|q>>|cL@)Elq(OUiMLe}cjIAc9AN(i^G_uM|!xjx}sek5q} zQ^XZUT(tK4htBm9=i11*yu=kwTohFI>0DmU^%&=J6ITy#VOARkcjN!z4(5=%>2HXv z(Pkju%%Wq-!q)+|{Dtfu!HXAEv;P=sj@*e%3c zq)9fU6hT0rVKESNqSSbbZ_pRR>CEj=b+b|5dSrW!m$9m0T9a3C{FbBojF_q+mcdqy`i?Vg%c$=-*YYg!Jd3Pn>_BVvW9(M-ZDnnrrV=3E1w+Fo z+1L?qn&S!kUHW%iZ+n(ROWWM%(OoBauRM`)o@iS}4DDiDW(nVxx&2|y8jH)BHC7^l zomImz*Qaya_+}|=qjk}7`&V2(?9D~|O*k$e_U0l&pZpzg_U0mI+eNbAJRc*Q=(!dG zdKWoac}MmvB1!Kd%5w*a@KfUUPQdn|>ZR8GnSaykA2H zdr1R(<4KX8&K^u8)->$??ZL!A_250xX3PvAp7T+bdBEz+r zTXq!^wEt;%x%#_!%T~e9_?Iu_v%ll|Uj{#u+uhlNnWzGtJxGJUVaJg(N%W}s1+3;~ zM-QTL+wn&aXx559Bg^Kjx31FkkzdO6u?PujJ%J)J>YVk<#i;ex1%f_;^O1D*{4|bB z2;Ugd!n@BrNs<#z6R(H1^%dI3i0GfpJ*o$aGckJM?d?>xz3gKp>})~kQHzo6c_(&z z!SezqkaznU6M0{FLV2&pZ6}uZ(FZhncb0Jbzn#hZ6AH(u@-BkM@y{s#59B=(&kvDz zGHg(ncOF9izg6D6y=L;>{AaaioM4za#q-{aSW(m&qnU`!zDIMt_Q(@{k49fVu+h6) z+l*ud3APyp%hzRfH8v(rDl6EdP}y$m@tG&Ok&nJ;41UjC=ug4pDyMWFY&?Gg#C8$H zcA|kYigBK3pcFTwLeGPl*H0VVDZm!owxg{*Ve9Vk$C(&{TmOH9?XzK@b`3tD{eV;0 zx?{U?Cu`+!_NY^DWnf|#&Qqi}TS$@k(3mY^|Io^Z%eZw(|;^V~a(Cf#hZQxB9EG8^?zKpM4tFk=qzoAI`NC;Tp~T6}#d7 zZYqKM)92%)9~uEjKdZ91ekzgrbB@v91<>{9pBAKv-=D;3Zoe7Rs&`b(z(Uz^sL4{k zAX#}QQTZTI`3RF-@t|6RfA6D^JVx6^^evHOr6LpG5>Ya<(aA2M=Buwtx|G^Vkm6`M zRBzr{ym^-)fe^^v>#Ilzw0JpmuO@_pw=y9_aIOc5t6CFPtIqX4=lTi>m4z6NCaPr2 zz5G`dcR!z)mQIHH6xCSyiP|lqur1|L)>Z!u6aF6+#ImTeS zfy@yB5%oyN1ZIvDDk5{FKuLv|B^Brk4-V)7^PX^_Xnn|;lVN4EYA2Wk# ze14B6girIC5WYtOAv}c>ob?-d^QPR6=3PBs>>rwa^VfhE)0*OY);L`U^|e9 z>FX1`lHBwQV^47e z=D^BA;2zb%0a@r6et>1LFeUw3pe9;eKK_uZ--nof^Wjso$*01#W>eHzk>?^QQRIAR z{IU~iTwH)c125jK>HPUzrt>$DK<8z2wzBWpsy6S^xvDtVL!7Gy-MYRPagjE&b*@`D zS25=rM*#sAJ#$Y(iQUQNAiSc}{PrRB67-@)i_QvX}fVX%LpJXnj^tuDsNTN=i0JZ>1fyc-#- z>shTkU4NG*+9`9HXtR+(v{AH~k*EpnGM#G}=enA6MbdI!722N*A+&q)kM~z}evIZ* zsyp6~N?}aGV1+%8Y)o1*lM&WU4{IQyY?p_%$~}^7X~@x^H{%=m@H+U@VBG6ymhH2Q zmRCD-TKNr8X*nMXHw;WszDQBtWiF22Ytu+F>M!ue(ln!nEi>P61F3 z^}RLX+pY@=iVzUV@B2M>W_C9T)b@G&Kc9a-pX|)sd*;5JbMCq4o^$S<#K{WnG^fvk zHl{zhS}?s27hw9YbP7zfG2P%my`-SFDyZMXq-kS%DCA5u@0w(L)r7=(Pn5uV_%+1m zd119O$K>e@h);-cO++{&;g64fjD~~L)XA4(2*fw9B`Vv2*Mijf?RV69>fXqB9Nq?d zl^!18E<0XqRy<*H9t2pBVHA)XPvgwOiJ5x%I~Cqug>W`7sTO<$+3+IZ`$vt1;UYLI zize{fgTeQBFlma(N)LZYN?eFHx!Bgt0dDvYI@qoLlCWE)tZb}q1;<14$GD#nu+cvq z8b6$wb8rFrYaq;5RTKRK(Q177r%1sMbKrlb;4f70y>3kqVg*D*^uB5B#=nY}bj~9^ z9`x{rw|J?5ej{-!MblT^(3gC<3+I2{eV*gPZvcNOO=m0m}p3ULLGN zE_5X$d<)`To~V@f|H$dt2){u5>)|Kv=fz|!OkkO$yp5^5Q;J-yF#KkfO_4XC*BhXTfzQ2b zA=;TR$$G<^z2UdS`-Q_BWteO{!XY1LJ12cu zr`zH4@;IRpHFw8*zWgyz%~+z5PxEh)lvT_x?Ociset8tfbZDUka`P1q?~t2udH7<1 z+}+v_dn$0J=kqYV19Eo@(t5!nEa6BI*C;h(V66(lhMgy=GxgUIYc%qEtoUTqYhj%m zoCdM&&a!7&B_54%og(u}Mdtg3%*m5T(jg|nTt(SK3v9|ByizE8>{UY9A7R#5wH}m> z(XEL=`MLwRO#$w}1uSqn0k;!yu>$3byEmvBV1Gqo|E>VE-QUJa| zK+Baj$pO4v0bZj3w-WI01hiaf=R1JsDZmR9;IjmLlYmj8&CKZL0J;=lF9rA@0pB8^ zcLQl?Zxv;(}fW2>3PuEmztz4&YM?aJ>S&j)3m~P;}i|2Wqi`dPqSP6Y5<; zQP-W}KuuRra}?ACgnExq)Q(0uPz4I=Vg+>?m-p60{~U&6^brg!?rukHoy>y22g^{i zu&fRkdxObD9ZqM0B5Z4Fk5$-Q^QS9>Yo5aeTob{(w5kgNeuNpS9H<2fYKej>BUD#H zkr{sJKuuOq*D0toY5}DyV^seLUapbD;zE3k5YnL7jSIqR-F%8hu`NWm5bKCUndbje{G50%K2=-Ag7Z*wU(Zgdi&--3v>6qC?y(p;<|z2I&D`HpH5^3fy&6A(Ms z(FA10;{zcn-y=?}kDo)B8S*;U$706d20}I4j=}PIG$uTCmMqqy!$)J%JIETT zUmcPy)Hp%*>h>qdeirQ_y+iDZeX9EL=$+#Q81gXsE~aTluJBNG2Cnc-b;hlXk+2mX zftu_4wqry`*8)C8pHd&TjTOP#D*K6?_^qSxf2afh{|tRP3jgDU zf5rcE><<6#GR6P6kpE#hwL6f*4jh{NZ}2~Y(o*?%POD9W={2Vornc6R#tT(dphQcG4O#hOR7K1U0Zng0` zx>)d9j0^C325hf3UROb+L?4}`v&ck;uzetVMG8JA8NB zKh27jL%H*O^}_XV6og@gjg?}%ooB< zAp9=f?2GmCQR6&Pgvon$$jk+Pn+-->B5dHp1#Ivmu9PkL@DxNt^r6e!%Lm&Zn~YuZ z#6-jsI!KL{Rkg!ti0mdj@nfiIAQ6ZJ}N(K;Ck>anZ~~h z9BA`rhIVPShfrt{zSZGx??OTNV)QSS>_^hh*tM4?`8z*=dqGJJus3eN8;DfI4Vc5@ zF2D`A12K_`y}9AlhlQHj>za>h<5K}rPTew5x3Y)~MZGe+1ObKyaCU*2)dVXk%IrzT)H#)?k^34LlKj)cU@}UlUuWZK;bU-ScKig)9^Ck#8T!0JN0d~_>9z;=! zuGqtY>aL*rD5y`lK(m-;9>hj;MlodvTW+5xQGBTn(JmJ99n3TCC(&^g{u~`HEl!qUX=IMv<-3{NiHB zC441_&i1Q+!*Rq$iue9eZM5!f| zpJa<_(cre&O>LxkM&u@&G^S5T^B^uD&22E9u9|Oi=rs=1GzE35g7Of`;?Val0|7U{ zKBe~8Ce7Er@Mc9XndtT>h*e9xBm}oLaYXN`u&86t8jp}`gf<&sum4^$4z%%w%N(+_ zlXZyLOJF8hk^>{R-)LjxHbXFS2QI+KHCzP}BcR)t7>Ln#C!nv+`rzODJ_-zIkn>*J z(Dr-7-ys;?;@^uD|4steaPbH1$Y*q+B(|FgC`4x(!?~a6h$q^n~H4J7$t8$-xGMYbG}plpPOeX;P>q z!zc=$OP+kuEHd`yxfWwD0|c5#y~|?k2}xBH1^4;;DoGW7N1X>GuiuH+F&sq78n6^% zi;o*-0&W<5p%Nt;VatH}w6d(xT$;+@`(m*1lfhTaw8{DBi-epn-~w{q4}Rts^N+^rL4_*SIe=^R}-;?-T5fibhN+n7V^S5YfC;a_41h1uNMGa&$;qOmj zu>ZdNP1;de5+*0X+A(>D3Efultl(xK=g#YG@r8IR3A*adMlk#y>8=$s!1$ReP=RkD(uZ*GL}!VjoCgqLTfz z-AHnjPuVOGHqSeJOkUY^-1WmOKYaB%yi&3RNE(k1l^U)|sC#-7vI{(%en_sK6&c<2IY`6w2C9t%(QZ!q44c9QV7tCv@INgKQ`dJu7Ln3wtWz+a(0 zvumh+evdYsN#I|PsoeR3;8ipuUGCIdtm>Vn>b)VU-j^XNqR*nxcIPSM!;d5t#f5Tf zwMh$%@Tbwo(8@y7<%YAbHl7QZ61NKrmMaFo7eAS;Px8;`ZR>S-zw=+2H^gW9-_I1a z!yil1F)UF$8d0*?5rIt(!S^#ODUe+5cmr!VfMd(3sFcJ1RlvQZx&c)h%MD*YJ{8cA z+qQwx6fPtDs2lI5#SJeU&gFXOJ^y({;qOG_z@k|Xe-VwphPJ~U!ZkGaC;bzq=;&|m zWaT{DqiwS{a2NsTsvK=$+wo{hWXxfd*N9C+*rJb}8t<5+C$w!0w2qv!#W4TK3JvoK z;p<4x6Mg{#+aIW=`v`CB^FM|vx5z0ru`_Y_J$FhWz21MP7)U+l^~HucuRIa5=Nx!5 zz;De^hk*smP;gGe%rpsAGG_ctT(Ip;3%;0TtJ2m4MIFmB6ELjbgFSby*rR~55u*jO zG4rG%2fMYR@4*rndk^Lxr|rF+w(H@~qu1Bks4^i8Li@DfMgZfeu5t$~wltIVSY1zj zeSrPC&xG|0-!Th@<@H}uus{h7J=mLO^Y5H}qxkZ{X!YfTe$f3vajdXp_m1I)KTq{i zISbR#PvE2{aAHdx%TrJH7m!gH{u109`x?i39-(!$aGEJTFcBk3Uv~UEI3`OkEjCJj zImsx!y~HRjodW+vxo%F#)=eFonG4Vi9c?S2?(sokF(mAzg_lITHUlegqR}$K!qDz) zMRwfb95ZX#CY$h}=vtcgNCdob^G0N;y#a5&W23qmo&~7yh*`qoufp92n_Ml1QCr)r zA`afnKHxB&c~C?CR0(LWc^T^efZYGuYVra`iV!ROLF~hBM?^-|Vh0%(rT>XIm{dwL z;>?HgK*~1%0O49A`~~=A@TTa+h!xVFL!_RSj9t*{q5~3z-T(~mlE&bpbymqX8bK$N zprM4=W$+iYs{%dYwyaIjaaa*{sA7|8wcBDbtBTW6g<;+f%0YO*mV%+L3hBXRC0XOJ zK%$i>c_}n_g(LC|TmBWoMWDu%$KL)HH`V~wf-mEVJ6bW?P$Y;f;L?8-!IVBIrzodz zQPJ^*8;Ww^Ml5j`fUCQ*Av(V4QG_IVn2NHY8PWCw_Vrksrm+(!4+SG?1q!!Bt^5e# zv>-3bJRW7{=w=b#9i5F5i}11CoUV+lyv)Lo{_}w=OhiOXyso!;r937OtM5) z@__m&+Nk7|l1tIAH;~Fu8-$sMqA11QAAk>HEr<~~(ipq$$xdpW)_JXKTUD*LU{>_d zv0A{?rJDsx44OY}&){tidylw*-Dwxse29fBdp7^j4wU6Olwa`6U zT3UV)rlqHGLH-M4p*DnH7G8pHEavgwu#TuJyfpe% zJ+OcYf#Thf9(C*>+JQNRhCm+VXH6CpQe?Z0P%_v_GZi}JdkW|n3K%^B!BdzMuqENd z)OD)h=(Fqj1}m`QdcYcfR26<#jtVf)92`=GqdDZ`spWD$44O*MgbYPBauCW#Ie4A8 zu!wMbj%?n?;!%d(_C5(k#xnPyBsTpjGz8SuBllW{_GPpr%98RPcoBr=N%Py0zCe7U z^k=;ndMEV774AwPfs-6+0e?rMPve74#pHsQg$v$dZy_YtMgI!VlgRK~G}+bxv|u-O z0H!pvw;t(vOg@@4+_g<8iUUu2P`i^)xsJ)k*6`YFilU_e?TpPrR-z>pc*2QA#fYL9h7-pMaGI1(UNn32OTyWxN17G8 z02-DL^9SbLU0QGq-ZDo|GRKw#TDk^4>}p;*CBSc2O~m&0{+LRdJwZ8xqZxbe$uP~> zcaNgdfTTQ-9Us8*xchlQ*yvoq}To1n!UET;l z9!rqm>t_HcERo=ZWEpumqUtb-K}K>HP*NnC$jD)m%X3SftVFi;wN^()l9=`+5i3MR z2}F#<6R>BFMZoFk5|23+<0{didZ_;KF$rj`BXnRVw$)^392}r_rhJMkO91nHfc{>W z7G3iSI)jfVHvj_@{nNv-=xtwG6AG9}#7pc}zzwdKHHv^I z$tvKPs=GCU-;M4R#}cFu7H_d}EI)F}q@g(O&+PlU&+H2e-d5|oT59|TFLiPCxRt{b z(HraJlw_vGN+9MF$G2_AtP2sqqmRb)kn>hj&?zX?FmAoDVj4%kL72|gaS zRD@t=my!s{M+xe3l;9(PV?Dd^hzUg%Q-HU82nO7!g{qi>GEh&Eu7zgfQ8Cx&v8j8K zWy2IaDM>6-&F9f6n1o|RNKMdd1YzunQq~s9^!fmCtPZ^>0I<_%LXzdr@-Ws3n4_am z<4@9$!9{Zrxue<$<@k%(>fLOnt(C39Q)&~EM}CQF0!Drg>W}?H_J;OLHIbf(5S6w5rR48H&1^16`DS0bo$T7u=HOa zIDjF^I6ury7?Gei&t|li9~h9n@V4lR=OB|{a#H|4vk3i%T|>IR9yPt=3x9wLTJ6^g zFDabJL6ci}>4fORsy})0^OFlmdf9#p2*8V%Mz4B41p$&xt;Ql|v{oEWSgp#0Y5q>=h5gnIh^M@$8te-diWunR)OS}k540EbG(?4DC zLT3{=3pxW1KcJure`cZNfoFk|>+L+uf>5x=c7hTq4X>r3WU=aDUL2aUDx3u*p+xFY z*q8v^Q>iNn^u`B@Cp9?O; zm;QJ_eJjL+#4I{5sd_+R^-`L=ghXi^6A_=xm$WBe2O3_%m3;LINDl4z^9dLhQ97G} zhuDFg=SOH$G0LQ_+yXs~MtFx>%c8zc?SR)FJ&0q_s3da43^`x3i{Zz{mga6u0rX4KZB zaD~K%E5Iimz`rQK7Zu)tgy$y#gHK0RBV)j#hx*A}Dy(lkD|<3h-D5u!jOXSpjY*;L`-WQ339_*zR@9 z`GTH3xS-dM6Yy;U7Ae404&d_&aH9gOB;a#!9>n@8z$Fe~tpa>l0Zt^~Y65;OI#R|p z4&XEec&h>&NWjxcnU@vd1rFc{1?W?N83gQ4z(*8dF9+}h1?X0Q@6&+3hIlMjfIBDH zz5aZl^!i&|(CeoOSWm$53UIvx*q{KnE5JJlm`3!RtpI=P0N$?vA6J0CCSW>yO+5~@ zzS#l1NdZ$d?ouL5N5-^8=lNI1=4&W;a@O@l>o}~orMZiG{@KFcw4+`*U1$eRtfPD$r zT>;+i0N$nm?@)j_qXBp-@%T^ZqG;nK4&Vd@_-h5&m4K%a(Aufc-vRuw0vx6QzZ(U> zJOWxf71A8Q!(1OkdT9!<8F9q&C0%=`!dv5P^!!}`ay@}wAJ3|1u|#X9!e1T06$)^z z0zAm7ZziC%Qz7I41{B~@1^590XA>|g^J+$k19*i3yj}q|67UuRT00ef>HrQ_fTI-P zK?EGH%HU9-1_L}E;{awVz>^f zD5%#IRESV}3B};_`yHs?D5%F2)UOED3Mec=vWCEWwf}sqC{C+HJJTlU=A1lwT>P-m zWa?de>!zkhs<(n+p>5B(NjJx1)6=LtJ?zSX6)GolC3A3-&9`|Yk?Nj^E3}oWEN<>n1x89{!t&<8OBd;qY zI)*-l3z{7|4^yt0@tk2}>zQ)|TTQqCTMq(i)mmuC06piY=wUDEp$AI?#@`_x6TVCx zep>NS{FyE1X0EQ#rcnJ;s<)f9nmv~=De5HqvW3LVKn#}Q$AtNkbAa<5TuN{b081^BC{~6tUC@pQY=^CM2lHUB14Vh)r z4Q$`AqM8}CY2nCeZ)5r~<*$9k`(LR}K(7)sD6Cw79?E=^q!FYEh*fz4)M-{O}#;%a?DpGm|Ea6hZ^yoR=~#YB{*jsIK^G-t2w zQvkZ`i_S(Nh30XgOL_=-qx(=AJBp$P%11chcnFy6WU%592}|D zEQ0Lwl^XY^`O;%zGA(TT&@3p)kA^?jjID?+(>#BVzhFW;|Kl54^~0zx{s-Nxti+9- zcn1?BhPjA|y19g^RiT-#M_iRgr03dSbWbZXjfOl={xMpxl$7C)x=63RXLU;(t3By@ z89L-BD(o0S_~{4fciY+B(#Dl$-H~h&yA=%;i&*S*DPsw1Gmn>c1+pZmk~PC7%%w!L z^??onvkp3li=0tQ4BQyz)!82N7{v8zLE0S<(1M(404iLC$F$gk z__aD}n9Ed0bD$3g$08$e5WpIN8Au6j9JfC6;y!fjVfk_~n{_c4INQ!x%YS@wIU4@F$(3mtM9mgjeJk`tPFaH)8rxRCLF9wEnp_vzp{Lx!u`*>qY8G&Kx zT1}dGKMROak2^E=HS}!|r7{79kH5e+tM~CXe3<85jdZEam%h#0a3Iec`0$XCk7d^Q zm(k`|?3xU1G~xj^d$lopV#`sYSx_9PKY+v2v{8SLJ-`GA3gHUkAKRos`4hmDUNcu6 zw2ft3e7pmM+;7fI$(@iLA0+%28;jB`46Z}lxEALMkt=%s<^f4v6tC;~_?pd_uVN?i zja9&QT3{IP{w!`h_`pqJVBKkjfgLSH+LLeiO1EQ)a!H;iyrbds(>&=r0y_@t*jm;V zK35XMNZ;PL<8S-dIm1TP5PK0WJ~8RUo}fPwvw#+kHnemL)N|)~`Ub4>_JyyTb*aKiU1^6NX|3*M- z?%eDEzN!HKfeT8!mw=ZQVnohDy~`b_|5i}XE2wUmXjeUi;$-fucAzR1)Ljbd04B#( zzb6#u&JqXe3I%n&g4#)_hXDoQx)s7T`a(ZOoH+4;m+9e8WR9`6^iL8O|HB+^rDL*v zjK^4q2OT@W{MZgYKNF^b50PMYhs-ZQFn;g$@gMj&#zy}roW{E)GcXqpid}(<+T64S zpQ4e^7uooG7W@VLZNLR+S~2X=M*jR zgo~Ep|Iy~6S?Q`=hM@fY~iDax;z<@1j*df&$Hv8}ML6Zu*6{VOQ6}z$dNIDs+<0 z5vmg~l}2-ZjDgzgVSsUE4;vr~T9Xby^ere{(+3r|!_QcA#Mzh(GtO2ej6n(BKqD-uAC-NKmp8UmF2Mxr z!O`Q9X>TVj;R8Dk4c^49&S~+JQsRXI4K5v@Y6zYJI;u9K6JO3gkv{|ZV?<-@Vz$ui`PC@U(d-{?j?Qu;Au{yTVPIkg1FQr4 z&g>5>%5o9%85;qqo5KLIAy3%-EnL9vPxk}6FDVpu9|8F2kI!Ps8TUKzzftgyDfj^4 z|ES=z2)~~dWB;!MKV8AkQSdp0zgEG&55h;A75p#<{$~pQLIwZXj{$$3f?q-S`xJbp z1K&--pQzwpCHxHvzJl=A0N&ECQuL`|2xEiRipeKaH)mubbcX8EAam$*zwe$FINU`( z6rfd;zhO0G;OL-;XAngC{(aa!!2P)UxaYP(Z}{+7BjT=tE=4(Kj+~44jmQFwr7tm- zK5LB*1qONWlNw70qfKYFwT(7|XQky~tYY8{8yi`CRvP#@&9WTjGKv=dcm}Dmo9{r40BSC2*+UfH6y-YG&b! z;0`z^X1cRsE=!EhB8VmC>t=|0B-?@|%Gx~Kw{4Z2Ac{Ky#W5&kcbo3kLiYn7^q6NO z6}~$}{xHEr+z7Cjha0R%;J_;k9LCxB5=#PQuXHo)qc3Ojr0)sfxMvurH9Cvk7&|QbSrUTy=;4tmE3)G|P76R>t57r|5V2-bJ zUwjCw^O#fm73J^sf9)B5y0)078^y-qN8$-#E>!(3P50(+&?2v*;Bn^EJ(*+7if42A zcGH0`>zfWPD1c83K}|gC8}04c_#oShgZxim$MF(}6x(ZlMU|}$RI5TFGf|bDc_TBe z64s@ZK-5_!cmc$UzekU48fif+GeKC3P9RYb3=z(=zpm}M*Q&ENo?ClFTKuC7= zfxUZD>TRJs8xPJlA}${41o6GmHO6$MCjqWb0gOKZsds23rCvUy-rUoGnaI$7DwIb_ zy<0IzVmqOOsqf2Ry-NK+tG$oy4u6aDJ&ef6M^2*jJL|tr?v@t+soh?z4l&4KRL|e0 zYZq=q^EExNV>hOlE_s*^yON(uH+u6o&l`7dVHB5eaSY%d*5X^72!Rb~@Ta@I=5czY;wNzGZP99$qGkB>EDR4krYGw}xNuL* z#E3r8HaeWxhx{2ClFt=**)To*EpO!VJ}Im33%ud^dETz$z#V_^|m$n@^_Z~qsTm) zubhQfrY{+9R(yfbT?hCO)`tK-)&g!pC?vj9;_&tQ4)cepVL3AWM0ToI8!n+u^Jf$d zeh(YH<7Z$za?uG-E55IFVY42n2Q8rcr8j!p)*Jcn_zN#Gi{FFnC6ml0leBiqB->Km zxn+MA|80D^>?Y?nkMLCP_se+lzc>g6y>twE&Uoz8z@iumaWy-B$=NZyf7Q$zuDJ$? zy&65mg!O^S{77X&0h2if`J<3;j&*@#Tbw*3pUdQ9k@`|{YA>XsroXAep0%?+#0v+} z2&Y964o(C$V7PTVoj5z?AYmiD*f>3M-pBHE-aB$RYqKN{yaoso|EFx{M`>t>yd z`3&K_z)l#&JnjPdk_Ce#20`10k5Wz#N0Sl1p+5KUj^Lzmf?ZXhd_tNQdLauf3Di}x zV_N8S^|VMm#qs2AYedkP%bVY*EuIM3@1Xb`4kSTwuPe}DP$g3?y>z+s{N-sxc*bP8 z^t_B09LQ7;QuuLScqwkC%Z(s!B5#Ssi|m_`_RS#s=1lvhw|&#YzBzO%UytsSHZ@KO z3CN1LPBG2^1Gf4;?gvi;1?;suOkbf4g^gIMw|Y6GSpHm;DgE`;k-KxvAAvE|=<{fQ|YNg7JTkIUnz9 z<8#%IR0m@%P+*+#G5TbrMusi~m>pIU{oo{|L9Ypp%Y=XkZ@}5c6!wmiH`)c>%rm>< zw=Wcj7imS76+6zlkKRqPMQ~PgYVz$?a$aikR4ch}YO>Et9`qtwkSP0SNT#^eYWqR4 zkZJ1^gLTC(YPWtrg1+T7*KS0CvOs5JKLHhzBw%D4)XZ__3K7+XSc$NoxK3bCam~cl zooFe$na5&&k781j`O%YTO^0OcqTCam#Y!c-ohQ&14dO`8PIOdalJ-O|;>$_BhyJi9 zI#j*dM-R)ZSa^2!s@B=72SI++21QL?#eCA)t8W0RL#;2VSNmD7ifHQC;0HQ;HON;Z zYbn_T>8lHH$DUj-8S{!&YMzyPnN<^_ibU6iYK!dy_@VK&2&BN8A~N(B$6-E#()%Xv zU*~=4kl!S9Ul`QCqrDW1>vge1xRu#rSOyseOWaT5FbbxDr7_th6OGHTJ<&HX9K3K; zly>D8Ya?ComTV>;XU_Zt= zC@cHI{3*g$_i43fK|_G_818vJ(|?j=zFD>>rXe-io>#RlEgOMPxyjOiALxes>qFs* z=w;KXITxO=;VaflePelgYkmXjJ3%yL_-$&Rf-oCgPYLGOtM%bC*Gv5XUWfYc1W0mz zhE$KOgy$=JVF!L@Q@UGC36+7>J3wG4Sfb{G6V+Gdu2YEWHMo9>F?>zO_Hr@D_IBym zUSoJgN_#(mAnmlhJ&({{t0~3Ol#l&4sivF^w8(nQ82)I-_JAhRp4q6Z9sZ2rQ&ZZ* zbF2T8_B8g!>V|R53?0O``}Xq&g~0+W-q9*DkOla_Yu% zkqd;@9G5$KDx68!|3R^*1&>2bI(8Bl@OxVYu<5=CNu_is2Dd*ojIcP=ux=xgW%@{?`MSrd_2Kz6BLl|&c)d6aOTIf4u7Y63JV5{tnx{Ay)%#6OQ zQ4!Z*y4&Gb>E`W<-Fq<6_Va{iBeH+7n?gB^6EE9$D82Iz`QW(`^P?V4mrm8APH&*8 zkB>WZXL7vn;;|T0cpP1kInTudfiaC``OLChsRa*xp+(?KU!2>{6dfr8yylfd_<*H% z(Fp9L^zdO4+kj=*Ff8z92-1kBJAO{GG~k%a35^XE*+m9_eKWBltE4uA>!RJbGF3uL zjEJ3HQ0%R0T7f(xyqvhh=|(8s!)6phW1C#Shki!**HEMuM>#9#W??^ghFhT26w5>dEy+kNPdVR`!6_Q|m)}X8#6rDXhUtHh6hi7$6;eMAHSk8O33mX%b zBG9{MUVjeMd_C-PgF_eOpsYx*o!Q;e!ZEetx)1cmTG^Zbg%qDcQtVOD0^|-KO2DYco zpi(@gtOu<3{bhbmYOg`^crEgH+mWXdh4>=#3~%J2#kh@(`2^t`9OX$^5<8!5owB&CjbfoY zf5zw8k|H5Fvb*BGpqK&*ykC~9Jecv^Wc@+*3Bct+X*2@juo3mbQ3j#3 z!4^uol0*G_iX57nm5@W)u_A}`{1qZ{w9pbLH^gv1I0ZO39Xq1Je_kkb8Rp3O;$10A~Q8s{^1ZfY$+_3aJDPC+dEg;6bX=0Dt35 zTj|-a>U>m{R}5IogPyT61tyLy?HLa_Nek6$H>lT6;cHmL!SxnI|LpVSBv_?tXDdV) z+)GUdu<`M`oS=Ivp5>7O||dXXJ#Gpn7?0%mm5EP{hWoQcw@t*=e*bSZT49&?9E7P`u3_u z{J+B7txfT(-oXER(6&6`eJfpZO2v0wK&3!qwz$=>l;1F~=pxhqH5h(jJxYTZ>msW! zUGr@;=TFenvkq0x_u~-8&$ZCaq%hAl^0JEQ~5t_6od>DLJf_KHvK+}731wQ+^Pmi4P zA+*tW4$ph^1~z1Rs}Ao5sr+YOWV(hx3-W(H)*Ny+J6(_7XZ-E!3~oBx{WY)?r#HSKZa-?OuTc>WzZLyF1KprE{r3t33{DN4QDWfInchA7ta} zMXpW6hKS;Cf=l1blbFXI>a*do& z0@a3`-ZR*T<2t_u+U|DEkMJ6}}DRB-p{C2HGqLJzp}xCNOGZB`3cO z$>jtWXmx3Ko&mZfTMtK`jaS*bq2M)=qO&)Je}GT;qvKidI~!Qa)iw%(UG9j!8+$iz z!K&jk5B9*%8*a16yJ)gRbo~~HjqCP!gn+!|3=6pag0{A%gICn6UM#~4kU7^A{G^x8 zw8(NOJ$1c&q((;cHX}c#)t0b_Ah<1BK49-AT8!37?_yXGDmkLnZlzH+au%mfa6(Z5 z86_6tKuC<2!SxWRp?$hm^d>$d?bST5Lbl=QZC^O%<5MI<`w}|)-6dtD zTr>~H0L=Sbn`G1Nnwc0GcOmw`8-5!@V;UrHl7#vRT816NUuNoo7b?*pNT2udD-uza z7U?yu8``@G@Ad!&b@Lv=iGYM%W|I@&0x7a3)0;o397EocXHaVg@3ZLx-rAq3=U-gz zKUqiUNdE4!UGd|R`cGhlQf@+*w^;~4kSpe~^$7$VCy^9Gd-9?X&`nZC%H?PXSb`82 z^xzs09X+L{iJtDjZ%R*Z$7b#@%ZcYv>H@Cg%{P|-jXs> zE=TL{vOdz^Icd^g>f4?5*L0!3f7ag%&}#H|BrmGJKa-S^ayeRm>#>U& zowyGxW%PF$I?-8wpXh@Ax{wD@gQzj@A?W{Ef5)NK=&z3#)!#9aGEy!_>+jZH(%;9v zlm2o|i2e@2A}eM5zJOKVDo8Z;w}hxM?;&WRi9;f^+TX%3oGgz#lt++P$A~=IQ??5d zh=&oY^$d<9XhZ(xnh~(hD)A zuh$}b)s+4Lr*tpOQk7^_nt!ykosp?|2r$G7?+_NSB50SPn_hFNz!~8ImU{DJ-~h*V z_`)q-+P|W|Jp?F}CeqA5rhRFDMQOG$8*ddz&_sc6(wd!Apw}u;#6`yfuVjIiv^Y#q zw2+{Q0z>TzZ2eZXP{w1&0<&14H5~t96-dxTfkWCA_?T7TGC-wuEbxW442R!KtO5y| zDDWd#xs$tmxmBRWQ5Qn_XphI~EYxDE?pC1$PZV0)uF!N8Dt=t749>^f3AKSb!TL5hgvF#vld@p&uQBSOk7*^Ob*IvdS`xRg8)fX4|78ZPm zR_1zy^{WmZZi~*`cbEs5CipSjit{x6UYMiBP}$VlFJs0YEKOT;aDZj@RP<>txy+%IF%XUK=)r>|vnDj3mRPy#?MrPo_&9{2lDjSs2Rei`5Fc8X+@DzfPi zEdTGaB!pNS?p7F#hUlwcAWvhpctW|JT->rz>s1K_y1r^>`Cq1B=n!HOt4!`sA|5iZ z6DO*xj|m_u^a81@0Kc$I;a80!)IBB>KjZr0=W?BcuAq6$43v$+FQ2txOnJIFCX+Gf z!E)3H%`~(Qo&s#ubW702hoD&E>_1u(`ZAa<9H>MIgu*x9Qb3}IPLq5!;yTHHU*)R- z)Jgstm9NG_C;1Pm{AEX~ze43JPVMCV5|zIa?=wyWD^x_rU31%aQU8l^;D){s5J~8~I1YmkV$TU#$T3l};OBgxwv{`VYnng#)x~ zb(+6c<@57P>@IB)(B~~4qY$Re_DHorqLRUIQiod-_=3tJ2PU>vOC@L)cP1JH>!V1}~P?iMvdMP&JwvavL!=tJFNhr8gJNd-{J^Z@$2|EPF{dpNE8~!li z(x5sJ?j@=Rzl34sqsCN_=>YMWkGeOaS%xXW9*C;5;(>6wb@Z&yK~wIT{b%$J3?%pz z)Da?jK91cl`z5wJSqF}M7CndLw?gu;2XlukHlv=0+wAWXpp-A7Ek_%$61;N zA`9v{0K%{6;cxWt8h0I^B4aYmaX1JsUB`*QG7RC9UnFo?D&VRGo6#EBq+v4%jr4IP z)Q=R^305NxK-YhDX4Go zCV1iCO_S}{N$WK+qH4j>Y?@j8N@nF;9Oo7*tZJi#(jpU4h_@7*X_oGtg*CJ}yN~wd zY;2`$#w8E`{R-3f#aDmtwwpnB1-k&Gtwj0!s6zUjK{$dn=$^$swhqj=`mKz#!nItl zYk{*2q#ev&?4GGIH)?_7k%Ur8tWr3MsZO4JN>V3>IQEr)(RO7-hLN^!XZ^3J2fL(a z*zD4_L)azzBVm_eG;4MngZ-KnXcJHi1j9~35~cYwyrpl(PsER&f7z_E<2*R<(KCFh zo8kF>1Y6C}&3UA_q9|@`8e<`8Q9X6|gU31YZ@^Z>@Rc(>u!*oJLEH$U!VFcSTHTB+ zlFPkHfqYi!UTc>j(J(_qmSILXKNf|yd%%&*_k@?q>GSh(?#R6ujPNDi&4J6$9Bns{ zNp(c1ptH)>{)oGfx+hzWJ_ru26qw0Dw@0$zjPD2fO8X4;R?uB3S?KcwJ7o!Oa54#N zqtKkCn^o#QD568Rb^w7}-bQI~Qro%pe8c{rqjLOv?Q(+dR^)mUMJ>l8Pb5GwoJ2er zql0iQ3boCzfHCg9_=(Bm0U^l>&i^aJT+v9bN1zrDyPU^^V&?I%%X$6q{e$_hBRWwp zg}DUbD%_q97FYrk8@%l#)gRo5cBz`1<9Y_pc3=MfvX4FKU+WM{<_Zx6@uj+{L<9NP zW3CZmU=ueA0|@(|)jo({4%(mD{H)ZIP817Px#i=-DO+ZL{cmrXg^B3kKfvVHCW6|3 z;=bJ`g1K#K6G7}&+!*FFBF(VdwH(CG_|xm0@#aMTbTXWwOWkgvuO$)zNTD|G4H5S%-fc@OeoHkYNA^Lo7!w9fZ&I5grM&v^4IR*JmrF z?oxLFNKdw$p_gv;mA-}k^=jIV{(W9jk5K@wLlK0o2XxTgf~TVJw@he|ggm?s*am@Z zK|KlBm4HPSh6EZU`MXo{dFGFdonitWMCsXbgRwKfPRYd0MUlP_p@g)t5zj*i(s3>-7@ZK=IXW+1s{+rw9`P~aV;dcW&Fi@AuE`I|9u@VJqcQ<(9)4zLN-ph-M-Sh1; z_QytyhXkiVH|2=nya1(pv7hnodN0mezPll>VG*R2RJl|RIq>4hA0H!haX!UFmwrxrGo&N&brkEg--?Q2^T(1 zQBPB_#L6@Bch7Es;JhD27v_IK;kpPYqF^OOAS@oWg~bX>Sga9Y!4b37LS~(6*BTkk zy6qdmmHeVb?nX9ozlc}N^&CfevvGE@O2YmryZy#wgDp2E7qiL6iVOx;R?YI=9Nlo2g6QKc>a|{p+}JFTG+U*Xt|9< zVA-=Uk0}XMN?L(RL+gm%0!f>q(x$XS>z{$ol!*z*#3;v*&>|90+#V%bb&wdck`3T5 zjrc=62&!={#E-Es55L6=S#QZgsTVCqD~TIih#OqKAKLzm!;%shJ1cp;1}RE>cpoP| zl92XUXcuaJ#!Vru)&3f-Lx2-yTADu#!MnHE{D_U)Xgwu_+HZrGT;1?vbd&aXllFI; z(sXEYebe5_o0@h`-rn@_Joj~;`+~3G++3&0 zMk})Ec(vUpp>N4}sUp9*@QX2Bl_$){3!e(q6E1w3qMj1tT?V?Ev9w4s&m~6Zl{Op7 z_^|84_*7LCvkHt)RW=(@kt10cpQ?6baeU(NIgC%9ZD|k96J@+#GMO)dwW^ZFqwv;B z43PO#Q};uWe`*@n`If7iTvbT$)PUuE%M)FfH1I#uYy?Q!6qVK<4IG~sk}KRTBBlN& zT+mKRY1LIp$&UP%_Ta~;0i$LY*ML#8OKPMZv=A*IE^r|(aQS{{`g01JI*dp3|Ki8n zAr0eEtE~mXFbt_8UWFL=sTPrTr%k9y(-Pkq&sJ;2Hq zA*ORdnm-3Pt4NU-3V+MlOH9Us$CP~H&XvwzBb_+QE>0)TvP;sZG(m=?;|Ce|MLK?w zOc}%U4(8BI6TbNY>diMMy z!wV*=TzJ>BP$9Pp0t^odw^fi{cu=UVg6P77!fX}f79JF0t01)SpzvA+*@XuStqo;& zHLX5p>MQ&m+n?W)pPqvTcONM1z_@|46|SN4*gJhh&P@-CWeus?HiaIqxb=N6jgM0>uc zQ1nofg`%&nswZYPiWd5yaKQXgz>qz^V$#eFG#$#p&)&(!0pT?K@}B(0U*1oVdj~6< zMDC}~cTJ*BiU16Eik+zm>{v4f^%1Fm@8lAxbgEQ0Q>v3@G5JsqlMm$}*`5bBr3g4g zq8{k zy|JfroA!;5V5)fW<{Q-`!X({Ey^YVf;eZPEzznK z>od}{YVJ2nw2Zl)r(JAxgxP^^%tmEDLS?Eq`N=*1vE(C6Iw?Q7*WD!_+ijBalY9JG1ABu_yh-`Vy=|8K*S=Hvu~%_J ze(sdPlE=%dlb}z(I*I!9tCNbMPro{;q#K7j)$ANmvvZh?xu+Y(ALpLtvQ1n=wg9R4 zhKe_TtHaLzti(}MPj^m)JH3_J)%TpD@i(S78~`fT8ASA)nauWUA8td}{w>k`#HJZGb7Mw`!_nVNzGELzbF1nz?>xb=D8U zwQQLG)C4~^Hq4gtA>@PB46>6m$YX%BI5{W=Ek919h(*vMQIMSZr!#(bfxOV~Txc)! zI~Upu{mwN7mlEMye~GaA%o1VssU=`_^;!;Iv-Q6}wb%b#?OA$Yf~B4I zkspbqxh3FaGLY&f7O@UL6VF;TPl}WEl?9(|1ksJ8*MKRM!L%HYWMzATWL+dG4wfe* zl6R4)SXiErNajVN;$eA0BDoidiizb3iDX|SDlV2MB$9uTsMt7Y>x){*Cv4m{znmu& zw!yx}@1YNW(6X<=TK>MWuiZ%d+KsfY-AMb|jmo~}Ep7Hi+(90kXzNF?sb#xXAm$Id zQ#f}Vf@4?qu*aP+7HFejq?JjRgEr{RsS z{crmd&^Omxa4Hm`{!~XoS%@JL@nAdszP&@C_ys7E=y$Q~lSuU>K*^m2o{Q=$Y)`^K zY>;g_SQ6o?u4#8MbR6VE&~0ow2*+BsXg&kCX*JuC&w;(0*AUK0RaRTup6ee^>5yz- zO>YQqd;m~+C~rT`reBx8dBIsIG^+*+fsImwQyr_KaUMFrwmH`~VJ@nH$$*w)kl=8P zEzFM8;|Ty8wVHR4Vv2X7tv)PQSbYhtE#8K#cz=5ewdH*!3krOVZz0C6{JO1eC0xwP zo9fY?TOn>$950?;Ij=9yHT--a)VcKYh3pCUWO~`zwr4WWgX5JvsFChOe#lbkr1?dj zvW)lyK*3iDS@`|^l)!2o8gKb4*S=x-D^&$>Q>wLKgvL$XythRz&qn34mgFdcFAV*m z4S){GP%F}W%C$#_WIcTvG_25jNx5uTZoh=PZ;2G9&JF~ynjRg`VI|&8@*FM#a68{1 zTL|Bv?L3)ZhBF;-+UW6OKjM*zw!ORr%qsSBpE;lY!TIzL&ZmEHzVZ)xOB=nBzLm-2 zIWdx}vHKMIU5@JsISchmj^~}!YbsHH{65OgMMR`o&2H-E9hq{moTc!H0xZd2+9HqD z*{MJrA1I+uYAV;PU`ci~ZT_vQdnYLU^ncnvYOJm&61_AK_;Q5)r8+;^7_IgJ{6a!u z``RtgC$Z@v3ui38wajKJq!%1lwqjLh;xPTy~7Cx74F;kHVqS z-TnQ=%hv<-B<9zKxfJ5NCt`}o$+B%wF(qqp`h-^f5LzkkZF@%AmjVwp0H#xy^}eC( zPzDU?sz5{8qKuj>Cqe8{MoqSpKotaK4U2Iw0g6c7qY zR{-P}hGoaZ{E0ZZX7|KRP0@+ln_4Em=kyn{++Bm8nPTtt*8qvx6*W^DcU)Qbe&deI z>i*ui!&kStaYs=dk5h(I?d{pGHhnw^j*Urha7?Oi{Ak`u4e3p-lNy@7t-yBsih2wR zR_w{&twnxMir$a2=s9@yZ`bjS9JqP|pLX}3WZM%sXEyG*c@;ie0b}asvM=<&!S1tX z*!m8}707WAmS`8C9^crw{GmQW~XnA(xeHd*30l2W+?i=h3h(woG{LxyGnQr{Zbsj~ z)kxn+Rk|MhQy6Ha)rd;i5s`@a zM#v13T(rxcUR4!~-H)-s5!bW0V0vw-WS^D@@J2kMtty%b$eThO1>JpFG9H)Xg|>}I z*lH!r!drSopTg>QOKJ7ky`^GO_yVbF=TlFIl$HD$NcTlXO;K?%%cSnbDx?Q*M*#_? zLaV(7)hNFe&dZYNI$oyyRf2Qk9{1wtM~5rE}E9z+R%#X%ly1)yvR@Haor@8EBSg+JoW@^{0(CkJjJ z6oJS?ECMeGM!@ChHl7B?qjZ9@CzDY|SH`IvC&U~5P0;@_|0NlAlJHZWk}tTzJP%;o zf`eIJo)m^K(aV!UQ4Ns;NQI-C6e|s8e92Q-s%dAJDGNiID49f^h|BK4rFjr7ctRZK zbX!YmdVAu>%7EtR)5O&Pna}-dKCi%hUI-q9&b02bI!qPoe03$5D%KU%6=SMcH?q#C zhEA$fr-r8YCm|k0)Td2UonB6F+CAyzrmrhD@n5EOdg}}KHNaguJ(CP)9b<;WOlDd3 zuUt6HuU(Z@EgI9NGT>81H+G+`@QHk2y#$zCck=_eXwk1%adJQ!G*P_zfPt9-PsP9| zAc`?JNM5$e6MPl}pOPm`R|0(Ey%Nk20@)X3q7-2}gyI38tpj`h{c+foYI>grJbpEK zsU^k{Hc9g>QWjfc9P@M@QksJjf`-m=L_7GE)rA^km{PVP*Fet^9<6LQT&_^iX|?C# z7qJ19kizfrX~N&_?VU}CujnK`hrE7t8+}8Hd@vAi7+i^}lxQ&08}N15?*6kJMz{67 zrh|+xBJVkddExijx2o}%oR51Y2xxJa2gU7_3dm8+PN{$g#p{#`cu=fPselK?>68k1 zNVcPU^Xto&AXOM$oUY)L0-!Bz{xllXD{+@bekO~QEl>7JLoASmnMfg_oYPDMLo<;g zL;0wgI7KCj0D;HaX7)!t@c*$sE%Fp<(#gSO)!j(KaICK7PRY=PNCen16bsDgS?&e`sXqw1SC<>-?Stf6Pd`^=8~H zwSs&HZq;fg^b!Dj_lPn$YBgRYLwnr_{KN3Q6}{GiBasICyGlh62Ns)Z6%)JS05;p% zvaBBObVslP%r3EV{D{SXluNR;a7uFKHxX%}G!bbcTeJNlO|vUPM_o`hCmRH8q0rIz z;>iaEKSZObLHH5sn4&#+DDR;Kpnk{V0{tQWU+w$z+(4g(h*0SjcP;Ef)sAIM0(PK6iznUZPdGo1u2E|eIRs3 z=_3#>bD>ad1kGl5qvZyc1*}XzeV!{*Yl&f6Rs}WwXt}Y`Vo~c??Wxs{UO$BW24tfB zM#z*Wm`s|Fk_=*-a9&wPJ51k_(mt{80J}e!{f6kipXlEEFrtZ+{pK8?-5MJ}_Z-YC zFyW|#_IU4L#+i#TNrGW;_6(s&$DSi*9LU-wPzF#Rl=7iCorU{jFEJwn@P_ia(G_I* z+(^ikfL<^Fg-HZy$vSY$RqL+aQ_-h5;}<9$8H?St0Z558ADw`^xjDI2khru3q|Lto z5Ml#rY$zYN?$%u7-kXP<<`BR4&A<4MOu45&j%g4Qd{vA528BUzaJ`I1_C)$Jrq`ar zW|xKr;Q{(0jP;wSC(hzyB=-)exc1zj)oy^yQs(jXs7VjMY4rcX+6bdnD~0ndI)RoT z*XZBOqq1QnXshHS_L;#ReK%7gL+JWX)4M(+3v+x#513hk3MptjBjsX@%;Qt>$O2wU zMoz({#5D|&5+%kkWWnD*3|a8^51Z0-D93ZbGW;4B)Z@4Kf)@Pty+99qiFow`Wsh^( zY?1CAmS!0FTgtLxCnE*U8n#Qv(U|++GSasQhYw{JxFD2f;RvuI+K*+P(^;b~RRk)GlO!)GlO!)Cl`{ z1WTdr%Xl%mBTf*CefJh#`gZpsyvm2s|KAc`g{B+$@GYt9!@03pv13SXa2gW4;a2bd zt-kb)Lhdu!qNMfzOT>u_afmst7jdGQw{26|XWl@oP1JSl7LYT;LQe8}kVthU-uJwy z$orBa?;01$`=TQ6ONzW}guE{*^1h_VyGF?Sq9X50io9!tye}&9zNE;zM#%f3BJWFz zylaHKFDmlBq{zEQ$orxmXa;#(wb11tFFHW|j)p81jjM(thGs2CORJ*YopOFvl6C<4 zrS$g^EO}~34LA%)4LA%)jqDLMjeoeZu4Lh|#y?zESG=&k@ejT_V_{3;ui%7Y5Br61B1ai(3Z90@a^C<|%Z2wabjM)`WwKQSSJw;d9Cv=tlldiJ2 zX!e0|bp!lm|AfCR$>;-WfcQr+{m_sm*gubr>8FN+V2Oi?Hp*G(MDA~hk(c?&rM4|) zUr;?jhJ)_eGZSMetCkqJi})TUdOX>7jgMGeQCva0h7ujGPZ+MH0wLe+!?mp3ZX^ABsmuTixQYx@2- z_4LC9`My6gx(_L<*%B-pWp-PiEvYTMz-d|S zs~b?=SY1>%6{eu-kua+kG*%CVS#?T6-n}vDJ!^Pvx4IM0PHsP|=V*I9y(v2B^`>~m zcK(Z@l@eJ-+l3_hETF`QxN7XFi(E2G*{}Q7(S(}2+M2o&_Ey4XkEsw+yyA>Q zJ5#H^6BSViE(fSD;;Kw;A4yeGCzIOLx+smTeOEWGo4l%IHD|c&;<}wQwHB}D+y=ni zVuoF<_@ZvF$`hU_scThv!VgpH4l!@)YT<=kbYL1%T_Q<}=Xc(dLE{94r1XJ;eEuw26W>h|nAJSczYgpDi)}qo{kmg3`r!9BaF~wbn=0bvgF#FFIX@zXpA(GsY zc)QUq*b;w-nme>fC}@Nm*tbD|6M3TAhu@j2*#`e=);eN!;|?@<)iPdYuBzw7zp90o z5v!UG`3*5IE!K#I1S zB3Tf>s`3F+AHT1POFb!d{}J4(AX{ z;0TP8O5^a35TrEgU!eXA-Vf09CB zEXrDxkM~gg2HmSOoeS|Bsq%*+`3UkS3&R%**8f=k%s>^L$sa?>pJK|N+aAcm5QM^| zKL+IwbTTD>Jd{7o6ZvCM{xDC?N0dL%x|IBJp##?fu!H<*0V%-aE<`ul z!YDiIK7`O>2x6012f=dH&Lz@U_7B+Gg&vMZy!J@_lEh-g+#0NBBAc?RptJI>^)>^2x4}EH@ zwqEesUD$;L1R-1m0Sy-=>k{rH;g)>g@0oMy;wbhq;@mxBFlyG zo6_5}1@*83zIy;psXtyQbX#dm4fU~oya;jl8vD2sPa)~e1Zgxj0WMdU zM~kfXW4{{D+CvAP{SfFg=((Pckew&$x!5nlLNDd%dZY_|lqc&E7J4X8)gvtQPoAhp zSm>QRO^-13&8TyzFUjtljmtUCYMJ}>T_dxnl8R5)dv1By{$qx?P@beEax^6sByuz* zO-(G~pfW4`)f8qW7bwh1o~r2mwgL2e1S*v77WM8D8ztbAfcq!=>FfJdq~~Gp19H7{ zzUY%>$dTtekk7DO*iS~!$Qb1s#DsVwk7ghy#2dM=kC-4Y@-PNsg1pF67>Eh+MjpRF zOc!&p9?jkErPzD&6|KR$o(Bu|^t~I4HAun!)+=9ZYpa+)Ag(l>#|yj|VA!Bc$bU%4 zF@Qu0z0n={EjLuscO0F&MSRBr8jhY~he~)PD3xbGG=h_HIM4QR6{bP(G$xnr;|d0Y zB8YclgBrKwPiEpH_^~W7^zfp$Gwb|ym`OLz zu_f87nOYs5kuTffmv9Iw*W!6o2u5lu=8H{vcftwRhROqqqm4VQeL*ty$wWiS{S#F3*twmWn6se)x93@}kGjiWESF&UIiOrLLO@4KwO>uB=;FEjI zUe0_A>|Qo;FV4w^927*g4%0rT>4GD-v3vur3bJ}^a@?Vq$;(nN!i8l)sR_SaoWL=T^@IBn5*p^Yqyf1IQ*1~tVB*=CL(-tT;5j@4 z&)}bc1m$M{v<2)_;ElyTMeC6l7+L7|+ZO!7TYTGsj4ZFeI5#H{$S(YpX|*D))?b{J zv&rwvdh~?3c+SU`^vy>C^LCE!hVu3vX#xL~@*YK<*5WU`5NQ2HTk%O$+~z-E%effv zJ=$z8K7~43j|9%{T+j<9qVG!+#TslcfjEm*6kvuWx*q00PQjLdcQS5BoIA8dc^><# zpQ|mNbdxK|TRdr~E2%CptYBC1)}fX)8%;>=ndB42EPmUy_g*3|v5UN{ia z|MS;~Jn{bxe@*${=C2h7e_i-p`74GA%#NKf*t|HwU#Elf==>E-H6gVEd=tzy1y8|d z`}Sc9jtwC-z3xfLW`}%?0NLyNZt)!D!!!5=*le`E=^s5mbpwCASSLy8fuE^8KpO{7 z-Qu@pPpR|UtW$R}v?nmQa0i)gonX3k{^HD>odIj%KBnyvX?y&|gL1y~`vyIF3XJ&g zmh`X4i0?6@&H^xE)(ol?U9*=pdr|YIcIz9kXt zIVH@V$6#hfv*#P{-V(U+UZ3Cg)4S1(pWf>=_-{-cJU1qu)GiL*8k3*eE)E_Vlb_Zu z4qh3P5B?Yv2TzQN4{H|(?~BRLX%`2Ni^<2gi;07m#l*qCV&dRgG4YA*;^0j&`GxJ` z;6X9@_`)%9@S2!7_)AP2JS8SRt6dztBPM@tI9`MAIkg9Q#NBJiC~A>|quoEfXM=yt zeTiEIbEu1Bc$Wz8iSyes@7orbx1b*a$1o=54&6YGaTW@n-*(qMTZ6|Ck{ly!ABy1@ zDpV|+tH$QZGgPtytA#q*^H9IwO$?r~< zk(O0w`-FO??W4Fj6Uxu9=J1xrVbSx`Gx{35z_CUb-V-aZadL@jTC1A&iA(BXN5#D; zNA0+&z(ypfB47n4jKAt(kwg^nxwYQi;A}ni{HcFUZFB3O&-@2AHn;f>oBg95&3@aU z8oOtFqJQ+in!Pxq-Yeg=!B%;oPoDB_o7VGlhcB35+uC5Otbfwkvj)oG$8oo|*#ppq z7j)yAXJzX^)zv!tFI;{Y!W24$zR+v=S;v{c!6ncBCX9|Y|6Pblgm?T;Xynb8KSsJ-F-aVO?iucb%Qnb@s@v zv-7&nws)PacAc$tojtMZ?1HYd3%kyq+I4nO*V)s$&Ys?N_KdExXLX%j+I9BauCvQ8 zo!x87395jtg0r^?74RnT_qjvQkfXnc%Q*eE;(Lw;|BjI0=*D_(EI&6m16iqH=e1+rt_=<8QM@}wUj^^Z zb8DN-f^m~S{6FT`so>XT*wPfktI0)Uc=dJU)tXweJDOJudtf_$O?zXW>M0G-WQ}%? zl8jnZ7|p10s3Vq9qo-JsHpl3zE|&@ZoUGd$E7v8_=185$JjYirQ?$*hO`H?yG?y}Cd=NBxaYFzK~?f=O>I70X|^|6mV|y)Qe!@XQEhmz&zywIQYdV4t`1IeG5z( z-<5L^F zZ3_H4bSN-y){&z|;0_TuyWoM-VArW(cCAXCZ7|t&DNL&0zq=*y{dH%7q_;HLuZRAKe$~SI$KPBcDp!qwwU~p?cx|O zG5Pj(ap-I@`C7X;bheoMf_8D}Y%%#$+r^98#izB4LuZRAKcih7I$KPBX}dUdHa-80 zn2h!w*A!1$5O?=6{@&Y!T+EhU_nf4z7N+QH?@RpJa_?V%%EsLS=pwYFq2#uHPXD>=Zj!oQD9^BbjMkA3gg-QZS zZ&9B45deh72CuPPNL%ah7eufG<&_OI_`}QZO zp20SUCp+U-&&ZGLq=x`i<7Ze?dCfh~2P*Ft^uFlYndfcok>@>X&WmHvoL8~V0`+UB zSc4-->*Yb(E)NpdHHcd2Gh@v(O{so>`WyARz_n`bE~Sd+sj_N=6Yy}534|A)ps%%C zZ!+4A)h5$AVG!HUFqT|^eIH`&i}r za`FQ#6LA%4{|r!an8{4P!jroku?F=H(m=Bc)8EzuLk=Sl^r9yHd=Gd;bKp~`URB@* zf#Ny&8R`C(z(<8`*d=6X`yPm;TrmTOJd8F(&qMOl(C=?6sKKs+id8F|jveVsFR9R>#CXiivq+V*Z#|O-$_5 zm>AaNqFTE>Cblakwl^kL9}_zi6KjZx9f^q@kBOa*iM7PUE=0vf_6SEd+;hTzP>8Z1 zb`WI`&g*+qT)=k>a;v0s4`Q!E-~gyx5d-b+yO#8>;C4C-Z0n;e&W7nib8U3EcEWS1 zr>_&zj#^?~q?)$0)!Sfac9yhw%<_QBiw@H|+Ce|ofP9`=)C!W?5k3=Un>4}^{`uptTTJ*%z(TV5u#Fps93wq*(=)_i$Xm&-?%;V5& zK6u~8vcB*?YVzW%1RsZyzsYxvQeA<7!L)R$yNpXB zgZ_POZKCwUos_OX>8H9X{mmi0^qn0`V|S`2Wf{f_ZkrOrWh916Q%4NyzSZj((XkG5 z9x%XQ!>GCh{?p(dQ{aDHRB4-MI*fl_82=i47ZVUtp1FmjUTkE*7CYa81l(^l`c{wm zI=ya;(`%Hc`T@ws+bY>n4ZCq57#D6`mylKaz_P5mSpKcNTbgvy)tu*T1HU~6e#;>E z?RxUtX0^?HiV&@rg=o7BM0Qe_064D!R*St9)ej3+%S&jl)pE}&)wd!p^o}arJNZ*I z%!8{Qc{%`Ff_yC&u*iqK1;$Li#HYD(uQ`szc@VNgac=GK)tfI?AWumQb=SN978rbxha3HT%d zZzZ5O!st$D{I$`T&`i;o3wWS04-)V;0*bSX?gW1raE%1qCIRyZcn<(W*oB_(N*L;8 z3H5ggbv2=;5o#Qu=7ymjlTeE#)M?ne-+zcuRzTerhMFj$?vqfP2~`LvTw%NJzpz#A ztvRi2(enMmBda-PT-zbLXJbp=;kgNM4M)mw_W=k+)ui6cTfg3J)c}R>S8mnBYRd=U zZx{!@1|KSg5UMRad&?bT$12r#C9=di6-A1SB1NAhA^1Taej2vUZf(Oc;wvg!5=;A5 zw)88z3WMUrLijtqQgu(Y7KMI-_*uq#AniRL62q@B@fAT{ zA&4FculI@4bUq(dS`2tmdNZ~W0)GR+;6eb36100pl>l1$`(6&0=qBDb!6?BCh((Ff z=;Ty(GJ89Xfv8Tw`d%74?haIA^!Kz=sZkw%D?U6sJdGW$R39Fn{5 z4q=mceKc-gMg995&VV-P4J#XmO_Sy*8~_H0Z4n4IWV~m_sZ@Rm7^7M&vorCdRQ(aP zR(hM^e{jx2!O1=G+N`!%z&W4iyYDySlP!M`^iz4m3df{}8-5AO;Be(zh~-f6W{JK0 zWK-muF=+T=ar`and*PC2e(>%Xrv%Tz1;-kCZK9Zjxd?OoFY3$v%Jo3c=Dr)Y;ICAy z)ixZ*SmkSRi+Je=bYS14x!@WHP@x|f_HMKQ{)AB(#EZ2_j@)%~mJ!Par~7^j#QXlX z_2&L45a(EL@85o^7$5IN(L7ETo7*fL2J$42XRbbFT#pb96cqUTt|ZOVw&~6n?R&TKSaRqJM|+$j;_61kfQ>mfgC-C2gp(N!z4#B zRh7Q88g3&%D(%*ZS+&B#@Wxb7^|P7M`-8L}5QPlQJxdZK_2Jz)=;eWdvl$Y|@0*uw zADDt(ABeix>jT^OI_UKE_^@FDmG8C=jG8Y$K=aYt17&Ydj2dn1tY7HC@Odj7T#%`ycQ{#|D*+R6$w5EtEDmKquq=9B z9hT#JUb{-MQ#=4QuF`LIiO}DPa5#Q=`*##F#$%5K=4gp>j0bL=WF^i>7h^GzN5r zx;{TG4Hdcb6F=k=_t=-PWTEEHBGZF~t^#ev9Sl8T@n3eUyL6}pl&0YnMmacMUq@vi zL(6R}`$n`wR2jNfUA+rq_j*)^%WNYVr9H>Erw+wXxEn;OUiCGAlMMvjM1+38PVvIX zE7Z(w4%1!|GuJm+bq%2zaE;@-pLh;KeQ|Cl=69`9bsk~OeMhSL@Y$UFtUan{z{+V9 z;f9|p!O90*1V~OE%8liZxNs+{&e42Ab>ESudhAJUYV#NBpfxIVIak>*N9Eca9N<){ zcskm@X#Q~f)hRGG<-%k1+0(wj*;zG5i%(v(oH5NG zo|h76eG(YDCnmXPL8Oy-0y{S8$=?7$+<^{Q)jbPG06yM=H%jR?etK)$x@xX+(#LCd zTv-|s09Nt>13^^Xj#M??hAt4-Ug!vRQx^RLg+7$M(4=~1pliUFGeTEmy^{B-NRo~l z$8yx%wMu0LpdCm{_5TS2ZkQ$!9_=m|rjB-(4r9NnuAq+cr_b>#tQYX(kPAb?zavPG z|7B~NZQbH1te@=^>v8e+EcifK%nlAdh%tuOXWN$KCrgq*ip2g=9)$giPI91@d{|{@ zOixSIY9=1|$RofRNDmI8zrcuthfpQJmxjUrDZ%q3c%lRkIRP_03}cos{RxA+1scA2 zNK_j9FQ5j%o5O%M33v)B5y1WpFz%L0Yq+0pM<^qNBbNyfAAm#7t#0jVeAMB^vd|}@ z5ZkplTy6-;g>ZFM!rn@wd6g1+nS906FlaY`;!`x-ur;5ciQf-72K}6&a3VxlSIZEWK=$p7^hVI3n4mPOm(=Qv^0E!aRM>O zJth$nkW%>^GBx)TNfSKQcW-r<9|nbOD@|zd0&0xgemDifU|A|N(wMPsf@j9zl!iZX ztU)uXZ0T9{uW;sHCwR&ar`jrSwv@eObB{HHMf}?Kkwv_{F?0p*>xQAO(ht#VlTtYo z-`C-uU>WBgIfzy7Z_q@oW6hzRm?uvpHH<_YfmG(&++!@LZ@BFfSzlX7L4E64-?|(} z?q_qLYu%cPy0QrUbweD=qDW{DD`+_R0KeN;;m;J@kA!96ZnL|eO#Be&v|)GMf*Tz! zaFAan0a1%MLKLRo4lIztI^1v?bpw|^Xb*rgoVmFFBouFVg`^GgZreS!dvPKN!*Fbp ziu(jWhu!v?)V1gmZom)Mhx{}oM8`AQ#dD(L?y-sO%NzA$rq@`Q*{=TR{4}*>1Nea% zuog=@{8*m(SR^qoL0$zuT08vvghZ_%AYQxf2sa1 z;aCfrN8^5|a5?t;G+9~Pl~vHqo`W2uhL4X`0s-U9!kS_0ncg<~*>sm0+s>wq)- z(m}EF34;voAm~48Ar^q?8LZI(&tS73GyI7#A3)-E12`vGNZ&`=LNf;^?>B(|0p9W} zFF1hv0p9Y9oFtjUL)99uIA9J>RTuD%1NbH2Ex!PRPf7SCSg4;HUXS~I&n0&UM_U`# zS7d)ae(C=F)BG!};~;5VG1%e9r78OuoHj0rj~Nc2aW}3|5lQG$CW%MsxM9rMByt=G zJ6HRSKK^^O53jEJ7<;-QP2RrU@9tADWUFDc?P+Ji^iC1%8!YGw%D8_s0j$-{tM_z0`$A zD1?b^Ts;)}2k|u|EIh})n$rVY3T)UZ%nKD-G|yuCX-Ms$UU4ltq>{^I#p4ey621EcyPP<-HG z##ISd7@^+DMyRi1ga-6BLh0W#LPL5Qq2WD@(5M84idi4qDnh+YMyPKank~`>Tr@)I z=Z(;ivqormvk@9~1|iXa(|TaQDLpX!BmxgC8>_tcvgJ{T5wL7La^Ant<}bHYn&k6& zj4;OkJ6=BD$7o`FV}g9{!w6#h7)}k(zX4Y$-%>MHdG}?@99Cz`fnyfiyhk<`-!?z< zIr9hp%B-|gX~Xlk-Td*kG|lqY_&{LJ@dST-79t;-`7E$(N(j6&=L8Zn5&5dS ze4gmR{pEFmA$#@)(pzzCsr(mv{I-!e*o4;>|8#!ltNowx`-aS}GY5vW?!eabtM=eB zb!5$E)+dC=k&%&8>afLb8itE4`_8@LqA9Ng`rr8#3)RU&tS{vi(V9}xnt6|GlXyI3 zlFt{>0^;{S@$&f*kY{|ej(5F|_sIy}I(o6#mnY4zVgp(BonMEs>HiltA&_$C;izVa z7XGDOGt$0$GsK~*uT87{pUzt@}b zcfA>K#3_c=R=pVq^=6#;4$WBcpEjf7yElVQxucr#wBC%D^=AA-Z-!TIMxEY_LwYls zF58UPt%Cgj-%k*>;mh!x!VFKm)C`{&+XlDZhCk?SctdZ4PjACEy$xUMZD{Vg4aDPc z7!Q4Jo4cIb#aY=IckT+8NqMC>%Z`N3>C@w}*e}(we?`asO&$9+I`-Rj?7z{mZ|NF) zfy(|cDiVEx&i*cllPL8?xRl<2FFJ3)6R{2O=neRz-hj9C2CUT^utRUaVZ8xoze58K zhEa)Xz`@HkAeujjChzUM0p+m`ct&r)3cUgU)EnT}8?aMvK!e_ZbKju>?U{o>=j+Qg zAbR+V5?^-Sfcdcvcvf$~O1%O9(i;%a8}PZ_fS}%h^WUKX?Pq{Mrz`$eoZNm6Bwu=z_0ZN zys9_g9lZhT^#<(L8xYbPaPd1dpu-#>I$h2IHXvpWutcYGV5$WyBg~SY)3JX|$NpU% z`x+hlFLdmW=-9V@2kbjc0OHc+1R(Y?6M!W;od8oU3!||AjgI}Fb?o2Mv9Hy!-=kyS zsAJ#u9kA~(`iV&Iis^%c3aEm+P4SMaO)#j`;>1^SwIeM+N37mI&hQ zBUn-GY# zcPQ`O2xTPewr+vOJ-Y&5x9$#HfS!O&;nq(Ad#NlWPgxOYq?XXwx)phKNUcNaBx-?K zxAlrHW)$#tRv;N${@_~({_o&S-vygG)nZ}VAVOh#R`B=lwnf{&X*H^BgU_Yq0QVik zo_Ak2)dcrx<~FRPdlLHBb{875KTl}NefP@efYW$#{|U7E#2cx^TWJJvwE7p`HO0Dw z4LpP;uo*XY3)J#F?!h~9j+z2H_5=evE_@qkotXs(+Lnv3gIF5jK-=6H*l{L=)+2KF zYHTO*VJkWuRpIE^AN>c#g|o*C;Z}2ALXEt7_FTW3z&V&14C_s#ebM%SChGp}>;X4N z*#o#e!}o1`<$-#9+~;Tyc;#kk4|o?(X#Z%&RefiViZThn9?+{sw+Gy3WK^IYfEK2o z8mViy<7Jj%2Ds9Q=7}0}jA)vOX33~-2Kcd&$pLUxU#k)Iis%3%x;ZlBz^L8`NPEEdj8N~MMyPKOBQzkv2&Kmxp&=$CG`tP%619xF$dIrHoHs(f&l;h= z%|>Xz86zb6M3fkE!blr_%m|HYL`bych#nXaLLh3q3VXnPvPx%)*{BK*RWsZNz22^c=>$CET6Rr@(E*xuuQ5aR8O%0e45>{YBOD?Sc?MIJ3kLo#l#n2gSc}W z3)My}4T&!Z_O3K8F>a2ozFjkqVxGKC_0}TK+*a95)_?JX>6zk z-rJ?6K;TV-M;;9vVKh=+(UAu-9T|GN7NmXm7W^`{1(fXd7CfuB;8nc^@98aoElFTb zGY?xJ?F=1T@S3nL{HKlRZ~g9Vm>$~(3io;&mg#MHO>e_$y$zr0ZJ^1AZIC90j%}D` zt^ZFOFxBe)?k(T~dDJ+fbg#GI*Ln;7thWFbJ%RBiy#=)Pum#e(&{+#s{O2vG`0g!u zD7FO@@AVcur?=oQdJEu2Szt`F8!@Muhfir(=-2|e%jQ38L`sK=EbNROCo<0fM16oK zBk2D|M;|6tfu=NhY8IN?h&IhTd`g=_d-P$!=yVEqlxm}KdVEKF#4j;$01Jfp|2B6+`6`V|-RD;#E50H0BdA z8jty;xsp#B%lM?JhEHiIXpcCRxlYD-^5w=j%f*awmbklm>#%5btD+6OAs%I1!H-<1BHBF+MxC$GLn$glMkklLlfwX`bYh z#xXu=n&DI020BB$lQB*-x*X#~JZ6lu#3jbKT*cC*G0nmS6Cy+dJ)g7+^GUNLpEQc` zNs|np(k##!;+>3fqS56TC*m<I-m*{|J0^XUJbN|EkSW>^%uNI{P2O z8n8iF1J=MAumXymum(J4SOZ=~3Y0|`{(jre8gTM`-5St16dH`Ney;2PGM|TUH z*|P<@L@L(*{sa@iQ&4&9c9&2b{+< zNAh8S}8AgNA}A<$n_t{}d|z ze+gN{Ru=s}4P2bwP>6f9<|S;ESE!%sw>5AQSE)yt0cej98v(8|QLz{Pqp{7^(|a-; zs8hYa`6s%a=C&cynlT=Y0Sbi!a*@D5`be2Gq(C-z_zcn9NdKX|J^1ZLE`(ZEIJTR= zcvOB3Il~Dvv>fPJqw?X)a(>pm-HYd>XQXdz2@G%j)bC55y$zlwhh(JhZh>t}C_}Y@ z>?xlG%99hjlUr}lV_Nq-*0IT`Y#l3`+((o(D&wMZ@tjc^>04W{Fe@}dqcW{~8f)_g zKAM8{@8t6CMd;74JG1JWK>UPFC}I!##al7sA#4xgIcUQkgfW3}u~;>=WVLa-#DAe5 zIVPUP6E>ojchS9HKSs0Gl=K_)4%~)Wfc;0|@sVxVhsI6G7WSbgjEnLcy9eH0YIHP5^!r4#9Gi>mNPJQsZBfZ^3nC z!_94hz?qAf8*sSoT)kem9QES>s{SsPBkBLg(r&z5&jw+HUP!Em*Y|vhPTsjTYDk-mK8DwH=AGz>uFw@_WkIzdYN{0R9bhn@@Rz^|G2C3B~J2) zKm8x-9G(&2#ze&7Sm)v|8u8Z@*zxdDJe%=%D6r{a5cm>&`}==}`TOK`F^)!D z=(zwc$br=j-bT{!TizoNztiAjzo5Vy&{ZoH?KF5PN{OE>A&dpw*c*s_knv+LZUKDzuFD1^b;_tXSa^r0W7 ze?!=3#}X>rOl5EY6Z8R5o2vz0N?UF3TM@xEte(EDZ;654`x~4jm*e3U<-MuB_KIC? zzhk&3!r0aJJN9`UJzY%g^(7zHUi_WW6Zc2nZ#l*A$ey%`gNQoKdje<9U}=C01;1Lg zOH>KNI_89|lHn<4Q6;2_yDfg0&@G%?W5(QVIv(0hvEr8M>BXmOX6xIm~cPy6kI_Cglhw>GYh(P!|zm6H&kgkKx`qg z+-$@5Mvo6gjY_kDV>L5 zYvnkeA;xj37{|OTVsF7Nj%D1meD5v{Xgy%p18{J5)h<(D^}T!GcRpclyx&&@u*dtY zg~LJknEv3N-F!W0*DzcoYK+^aR_ePPw?`0?lf4Q(6({ANAK_e-q(^BWaM3=D23Tbu zkCM;6XMU(Nw9|SZ{d(jClkkD$c;+x@()|}{aO6s#L!SySMvz9{?%>6!jn`Z>NEiqx zfYGKD^mq+(7^^j(PN6FzQ64;PBib~$uom&{#dDk)>DyWYuNOAqt603>qc1z%RW}`T zgVWC+)W)n@k)>bMfvoXxtwLW$7#4%PVTVRdz~X0tk6Yn_cK#qc^%%%E!;o)xfLt8|`B50s z+X2!a16dP>{Imn)mKezGVaQz_Aos>V)`uYvb%1P$fjkn1l>HdppQmFWTf&eR+SLiq ziTo(+__}+7{GfRM-aZkBS9l0$^;;vx7N-vW z_X5>*(tqH56Wp3KB&rX3C#69L#H~n~z7Mhr7hJ@7!6<%-JEGRYREb;cn@T5WxlN_x zc(>X*Re9s2>N`5*!6uf@Jg$OCZ^yBvKaV$#O}z>!2=c<)cz`0$n1W+Vqizz%oUR&< zV@u{N9`S2Zsy1PgYcBZ`hx^?MMENfG4gIR&X3%(~Es0M1u}tH_b{OtTnf79ITD(kK z5uJ7n#0B-OicZ^uG+X6@iE(APE_}hDK2jQ0$ar=TL2qZb_G_)XW7sa zS2K>8t-^tI_Z4c%8qHO@*5YuFpNmtJ=%u*0Hm@sQ#TlVr*zQ^DzTK?X^FX+s6x7p( zm1bE_5$n0#EGj|=V2?M-kHJB7SpmB9PN&hG1rTJ=o%iE`?p%vH-hWVb=Y|{5o$vfa zoEp0>3~!b2!zKJfgfEitzaxA#;DtOj5l5qxDky~*0>|CsC$`mAo*ktu{?LpBPu~}= z$6HGhpH@v<+xDyOH_=4Ettku!SU``vmx&ML@)ZU@8QW>{FMru-a?%r>CZEMgx=!jY zhE&=~^2Xmsl8M>*KgY&@fv;^86ZwCOjc@#WY<$mKvGLG*vGL!-8GUqp4|`+dKdy<>@`TvyU5-j?SuJwhswt87t=|mkj_hd5;e`DQCDPOpL&tGA|6I};xru|ufpqq z8txbbT5S^x_=be{#v0`L9mx2gm+SBVc^+E@a=Rl}aJwJl4!VYZKN945aTvZ@!oMKl z$KM9{GdBwO^FsjtO{RdK9EQI`!apG4H;n-NixPe#;op_;*N5TLCH&7M{2vJas)YYN z;hzG$l!v4I!7e<)JA75wHq||TnBCn6z7syVHX&|;yZ>g$1au$-pM1NO>et1n7?1Og zw-0W;WX8KJ3ORlR-EKtZWZHPd1C{ruF=qB&3$Jo+5n(C0c5wPK^&sL z1_YB(!Euew&o!7$@SKH0YxwfH2(07&yXro8sWF9i0TnuM%EQr7cxQ6YMW3p9bjcvZ z8+V?nx~k-T!2rNpXARTbMLLjX#k3&={VkN}(;;^%9J9mU0UT-6SlBxFdKu%fo7!Ho zHMh9dC~s^vZL#Op*c2zOTdA_H7M~JT+u>4%Wr@vn4L(iKOyM_6?h8?>kD@j>4@9pW zLdcnWPO095fI6ZS9(}8~Fj$G(@_)lPa#6 z7H=={Wp0H)sy5dVp?V@@LYFNU*IZZbk7w6R$EZRF?&MAAD*&NE<9EGnrEG@eR(6=E zt9duSS`sV))yTq7uCi9(IW8*hMI63_n?TY<&xh`g-QUPpq+uF3Am$=?9N@etndk3_ z7y7e^m-?oP_-DL9mBv=wv_fdAneb7i;nLYzO4T_Kb`2=7I-(+S@!Z8afxak+XN!pI zb)x?Z*?RFpjG(z$bs;rI9$#*)VFBDEgExwCVzOg=0kes!iJrksRkBLcQu!*}CWGPd zLr`>9EDEFZMd#syYIa;)=>z=Sg>*|XH){|5E2V0hCV%y@AHN~`fN zTfKNXD*YD}VgC+p058|vC1EO;2O{N|Wt+s-?V^6=UWW&|*6pPfmdEMuZUt z9#vv=kbq(QJq%*^1fa(N2(F3{0E8fRGliHydZkodEof+y&13J=_EPg%t)vDR26RXd1f016rFs+s9II?0 z$k;H>9YiZ+bP%HUTue2^(~rYed1IQ@;((`M(9J>{$=O!58NWJx#cfI;P#jb?ot}ZO za8!c`l3KYwLl{25;-2AY!4Sj`WL!MWhaBvk|bl7v{P`Z<(i&MZl)swCONH<(LGBI((3Ik#OO=_E@! zHRJFQ@5FNjv8@v`p1q$MnsjM;<`sBKDzB3?of(3vZ4sFo-VUBpfr!J^s8sdh#|aZQ zd;xx`UC50qE#~#u%MjwMUV$h4LV@rk#k^lIR_Tby)U3~NOfPzId~%RZ&oEORxK9fs zE|2t#FUkBuG=?wZr;XQu%c7yg1(akpQsB2r^*k=97{0d;-}p0fCr~kdbma|P+b&4h z;}H@@PN!$2p=^YHfuG2Xj|?IQ7FlHwvaKZEbJ@IFY=m*tXY#vSrb+X%9?Or@-3wu6cUcS95`Mt zV!^g1SuhP&$PI;Hw!FmVhnjwcm2B2Yl| z^c{mGy*s2^_&RiBBXi}7F`KlV8mcJvHl6p@oTt0nsgWY?NL}Q8A={mvNQZ;9gZ2ZaKBO)YZbaq4aUvV)Idwm$NhTtlRv{LMNNFnbXousjj|~BEMIu8avn!SzjC~vnifV zgSTr#JS}9H(-EFrd$QJ4-6a{PW~xR5Up?CIbGIv5pP@BO+r7)zKYRJqAE5DI)j>82{nX)^bP&8QI z3cj)VOI9#3!VTCJ96bg+_i%azGU|;anOYmw(<~lIM6EKaE z)Cmz$5Ldl>Ut}*iT$>%0-?XyM=_K7^c2WKSJe0}A9u8BjYC^a3i-5DR&sEb|xYy5W zBiGvlzITJvU*T`x0cVPohL?&tfTEP)TlgA?;^Sr#lXR$)70Q}ZDJWw zc7#T``xj&hco#`CecE|?#d9D@xmp9#0#hPd1OVb9{m4g^-jMq^SG7i>P69;LY^v{2T_0@&*9x*TFuvOmJ>@e*{Ik?CX zrHYWZz?=+cEW(N36!d_jZLKz9s#R609qbQ$E_@Y`nI}1M+t#ZiB-7!ChDd2|$@20m z*98XvngULEzXANAq5+})27iN3#~{z>j{L^uuMIS7X-+XH$ADcfBPmteGutBp>AvO#8FF15cBX|xb7 z1esXDre%H(nhupu5+qVEY89Yh`kbUnNeVXJlL7%C1rE7Et}u{m+162d`yd?(;hTOR zCB@Rf@aj>&gELQZQcy0{wOhrE0pdZZ_czzy3 z!obBa?FIYtinqXsz=nwGbb3W}NsdCdqA#N7GjU^vaO*5J8>b1Q?=?XIou&&$Np-|h zYXtxW*;1+sK-F-;iHWrFO(vrsR^h2sZ@_O>t*vQ|pm5Vm|3F;$3h>qSr1~`kC)sqm zpA+Hu3O48Z-s>9<0S^m};GG}-lzib6gQx(f@}JjB)k9QlF?vWhyxvv4;0(9pkc0F*iHIHgpvRx70T{ z)W0WNA=VdTI@+}n-L?M_+p$GOaB2lXXbewH?<7(PJHfRq<6KKp?96s_s7fB$E(pWTcJbg)|fV7hdeGu zxGvJb{D3*ufF#^;4MV;LB@kBIS%#Qhb=y(W7g>Q$C@F!ou!K%s=D)zuDx{zlFh@rC z8x=QxHNJ!M5cAluOOk~wL6QTnh4#+U;@v_l8ppX1|GJoX)S*`(7uh;7z>}4g-sf~} zraiKp#=;ra=?^S(xZmUJph8N4(lZiL_<@EU$UJ0TY zm8Vw)o2TrJ^#F;=Phvhs3S@L#b8(z^GtDS4Z@BKLA$vb zN@!WBp@8NI6|4+vVut!9l(M3-+u&{wR}&;cbrr6--~T~e-0G~j9uNjztx*>3y$mQkyXj*9M0(PM6-?gdi4b9%2m`B=zV~G19zv zD$l$W!Wcqw1!9Okji_#xds5`+{2yVhMBIV_)1Or0y;Lpc(qxBW{USR-t6I7l0HLqK{DPxfyn(_K*y{?sg&${Dn4kcF zSW!$X3=M`32=t@-4-`)r(p#V*Y9rnjpHrzyfqXz6G)IVmP<^DsV=q*nGzbRlN>rFr z`fqH-!GvWE&>?1_fAKxVrD{DiTqD7pB!{a(q=R9#Xf6tY7io?+p?f};*Om#mB60s( z513xpp#4fUPx%`ipj3}R%;*HAy2upix__f}l$$?C+eGgLuLrEIF9?0d8yXPduXPS= zgVlg6Ry@}!RX+x-)3p=A%^8R3OQ~K6tMV~{6hxTMG}twHEgd%8=1P3g987=zTHF_x ztK(aiZ{S+?V*}5!>x|Fa0mo23$q#`u`8^Sr{65@2VN^g`@je(aoF*uWke@#nsN{6$ zx}S%;?y8usvqW`W85q1AaUDgq0>}B2ChM0)?b5 zbffrQ@cK>Rp#jx}4cDeq?2;C~!E{`6t%EwiiB*pTrOFEAG}kGh^s+4FGB};Fk|`}$ zBsc6dK`FCf1~cRY%sQ$~Xcr7)f*3{5XIkp{G2q8rU(J;CO>>>sK#Z8Bg^AJ47#RGZ z>=#Zs&rL!xfrX!KAuQ0uny_)7fnF6U2?0bOLC={UvWMxBL=6;WIxzs9z#e>MWaaE6Iy6AL$4%9D-B@-=PEJ+|WtbvSmXLrwc zW&cE?UUs7{K<5!&zAbEjQOmM)5kDi!Kp=4r4T3$vC=p7bd>5flxIP&C>OV#?-<>@$ zf}&D=3fy5lb-{lc#Hu#<-!d3{97IcgiQvr3(=SY<`RPH@ zqh+1E6ygUb$I_Jsy^=$M%88()YANUyloQcE;?oLajMV44sPDzg)F&;^t9eZnEzb!9 zFu)*$(?0Xne$qin?Y1Etw$H57GiaarfMHCXRO60~5?l`gTMhRri^Viuyf2ZvKR8bz z;exL@GB;z+K$sIIXxcK}3&rfJY_%xA5zDj|Tqem2!q(Z+u(|1sTKRPgE^^5zU*kGe zc@UC56IRl< z7Kj71QwGdBZN@lkv@Zc|4qeAJSAh+BU*|w7gt_CaOPZ>E^=LR~xPVgLST6HG=T?aK zE!U+p$-Pw9hAeawbOkZTS>-Z-g?=jMt0v_BC?QYJCl->LZP2)4_JAOzuUjj`D|D%k zYmxCE>96BrC6E$@Xklgz?4QukX!?9? z#Jjc%DmI<4>UdA$DqL0sp$GkW0Dr;alRGlAwn7 zU#Absdsh;F>vH@q_@Se1g9>4;a;;g*-BxzcnYq=OxdT^_!`XQyUcydK(lTqEo_X<@ z0j*BxsCe#lc;-<(ZAEx9Ws01LJTwM9^gW)H!W&jTo@`xZL&Ati!n9Rpf?N@uVqpqk z1X?TSEts|{5xS#UgC2>qm%a;S{}Vg|qD9rM>KvA!xX0q>S8?`-cT{}2j|3pBx_ySI zE(r0WUV}8OW;lidKq%r^Pm?rYgJA`A;O1Nmv()g+H`F)|J@-puB6$+uB#dB%LPX-= zMUSi*=h3>yv!*Q^kCm-y?P!G0mR4(DB5IOT&fz-ks613J35Lye##Z??eCW+85}TAL z@5~+N$;O5Nus%ogK3tf!0~M1p^zMVpqw(x}9iBMGGiAUX*p0|Q`jKNYFyvM}Wq2I| zLO2Q>l7pFS)NY1#@=MA`5WnX`0+kw}jxBv23HB^d^E1 z(=H-B%UEyZJ`D(rp9j?i3d@S2^f>Ew9N$Yt1$h@Ciceh1DzawDhe&5pJj6*g8}wTK->WerZq z>Y3O$)q>fITe*h=X=0nEWuAawblhQTlI1ziZsJTFx$IU?3jDG?o;0#Mz7f)#yPvs0AndHX|E??GPTvQI*S*5QlbP*~w5A@s8CQA?Yd`bVIsI*bOzIbJ6+~nFY zXyQ=6EopLNxYoh_0&|n6J^dzrOP^kWU+w8F_$`0hi{GhFA1~ftZb`O3-HO2Y&S3%YInfwo{v`2XT&UaTtp< zQoYzM4p~T*+m^2v`9jQv=n=Au`{TrRk<+_ao7^rUt#wLuJ<=kzNsAp8Nsua}G9R;D zUv${57rfyK?1#s2r?@tZKnSNHY(+RMu3f2TP+a>S!!g7fn+C$-njqCHKqb`n;#!1g zN5r-Fb&3tT#9i1_$D#zHbE-$XNvK-8SYc&rQag(6wWZbAm(YYKL_3HN6zvR34Y`g~ z(Ba9xIYOfo!!&vWuE8daJ|Le1?m9v`jFj|Q2$5J1x%pcPbbF`+7KPFTnHAKTdL>Bn zabhJ!xu%`$l#gT*=rZ?#LyFb)bJIDts)FE15{;2bUL^OqE)ylN zxi&PEOc(nsbRmr@tC9V-w5yqRQoqCKyF(D?mxT}sa}et$2f*wW+W;%XEAg2MIL+mX zm<9@pcpi<7f?Mj^ExCnYD6W(8-a&pYUT2pFz1^?EqyP)ybZrEd%wp&>8$c5#Ob(EH zAnf&Un$C{*WEVQIkX3mf$gqaxXB7*8!lnm+d!4y8O6AyS0*&2!QT9L3XQ%6gqj@W~ zAn4Tj7N=_sXzEUfsSeb+P;;#XbvF1J#G4zjPDk?WFyWq<_1#FGgMvJ-<+>-tBeD0r z$RP;zVDy^9wcBZs>79Z-MC2#bchdu-T_X!`^^EK;J`mQd%-ykOWiRX|;`xX}62|5v zk!bELQI>26DFk{;yTo3d6;Bk2KxJeOa49eB0}0UrlLZdTgP3#wMUeM<^7}ZkhlExG zv9p7+yu%cNjDg{O8h__O&e!0t7R*TMe}Za*c1vo$3Dn$gJG-Esjl@~=d5<*s$DHj~ z51$0{a}SyPreyzM6Vd@8!`)=IJo3A z$dS}_9m7#XRHGS?1O^iCR)`#!kKn+Lk`s>1CVa<}hV74x8IrC`(#^-)Y_2VE?SQkW z2e5$7((V~tw7}d(CEv0_ELv!H;OpZ`p#IMjC)LwGSv7@JPs(jVa{5*^VcARV&NrrF zn(@+COxv{3Snf0>=@7ONB~8A|d8x+I9MVZ?<{$!%kJJ0!o2v*)*cVX(g=`8jG<2=Bhj4wSfZKn!swP}S3z=_+ntK9UCs|Ep zTl|~>Ft1d5xj(?7R6h-rLmQ*$6APzeL&!tewS=XMWf0wx?CJ3BFmHf2js|%9ZRc>Q z8BFs(j840Noj^z%6rBcVA56P0l4iaQ+;5H9Y1-?^{k-fohiOL`lPaz^;9GB3P2YkZ zhUatOToTk@@$y$;{hyAH7|={GVs9ur6jVf;!b)21mY5;?BSiHfdku4h`0BBG&}8s0 zVW(Lp#vj6kYRPfj{7t3PU}BDvvyRMG5a!dyptWVfnsOYQA_=mBwSe%$(!xrjw#tjM zJ{)gCg~hlaqtv&_ce*hWdralVNX!;kUa>LqS_YU2gFoZ2(aWH=xyHI0_lg$6k_i*qO%Fg9IK?v6 zIY(gH)>pmsO)W|leQdJeD&hgfm~Dg=S(e}woK-qa=qMb{oXons9oew1dPZK&)!hvz zFXG#vjhgA8qYW!A$Fy7wZ@C`MZiLF=aDB;_u%lt@LY2xQj+amx?59#M>jYvDAh_Fc z1xkq}@khmy_%M8y<5b$PB>pIu#6<*4;*ZkiAR=N(oXuZ?qHIi2csv^W(M_9qq(yH+ z4>SQ{!4Hj0u7oq2;san(+026{E^-+>3M5YlNi#sa0ExH2|N3cIZr9apK$9a3oBtLC zr=++Ae-&IrbFbvMaC!!hbecZJt>!XWq{6Z68obZrcb%Z3t_Ve zJx9^M)h}o>XNGS6$^-20V9kdG@nm}1g{~)kQo6|R|Ey&p*1g`M@38yFf_gEbX z+sFKC=HFnyV+$YJ9raY-==w=m>rxhrC>-vIsQFn5bexZ=cfipPV0TSEBr>hfisd@)PNTM*w;76`iiTd3d3=J#n(&plUmST&swwqa}YM;}EY8HY=Gr8su526GU19p_#R>{*N0*A{HxKn>g1 zW+JMS?r8{OyzNuPmI%-RS|3Vk9j0|Kp1}tQ$D!bWoY!1}3a>B9!5%LT{Bgb)fu7>N zW9lR*lZQ>+S>9!gPc(J_sip?*J1KJT#_9mMY%0e~`|~#-XvX;)&4pPGdBxVYodyT2 zTJFPsFh|>_vYmYWMC_*`XHVs}BQFMrila&uc$O_@HU_4b!z_;m^#(n=9#5sZfKz#T zExN(AMJUWROj8W|S#ib;d1f4PnZ(I`6T$*jhbKP?9f6C}2QNlvZ06j_)2ICs&J&zv z)x-%dAa0xv*do>BFNBp(c_iqVtqBjW6m{rBf)DcW=I%*_(D>rx${>^vY|ZlXN*C}X`{q` zD!i*g)U8W@q)+-u6>wbgKk>$C6shn!^ng-T!s&xPSi)SL36gLOW8+warcKdEv>++u zh>@@O**)2**xO`xox>r+Hk$qf>EVD8=itaTIBM`_yp+dtRi)}SF=xd-A901d1Fits zI!rf|o70c$ws>3cRlGl<@2Q@|VUXj(^a%EV8|;(l1oZ}OQoV3HN9$7^Zcj*gN;nZT zqUL8L&~ZNcPa^3d&5`8i8Q~k$h$g-gxR6}%JV;cs{rNQfPJ2ENzuNOflJ@KLVqbF%JFwHm zwbrih?k2$vw@+*t6LT_2bSz5diB;eAwzf~p_RE=I>=6A^@Fgle?u1vmcgJYUfw!BN z51FA^^!+;?aEx$&G8_>Ur(w8w8@}Iov8~}^16g#!BVqU%5`Mmf_nkN6cJzS){=gBy zZ<6pgh2cj^c&CK_HR12NM!^4*@PCl-eZugG5`KV$-;o4-Zj&Yy|7C)K|6ZJ& z0sf760RKM|{#J?qZG;~s;r|hae_g_VEaA%t|39++YYE?5!Y>HJmrHn$gwH4ZKH1)r zA;9lDEc$1B7+#a`cT4!bgx@afUqkr6OZb6d`2G@ph=l)I0sOyY{m&78KHy`%cZ@#> z9{aSr`(dATA7$}<12|B2uKsjBy@I~FMRjGv7Y3OH1!|>oWdbS8ElG|Mw8!8$Zeud7!$$vCydQjxJAHL$iAj}bKJNQ<~pVNI&4T3LY`Xr_%Jg$1m0#tqUL@y zN6US5n6kJ;#B6y^?5SG(lz_LXhKNsxgNs)parLKdD1^oh0`hm01-lWomsU2BlM#k3{&xZ+%wyGHRy(E(lQ9-qVREJ7fU zfkFg;0gmPdVKQ)zz-4ivf*1q*_SPTLHoAehkvOlX*wRPfHqbw>-2EKLMxreGu< zb3ddlG#F9k;cG~~DQJ>UdHKu~w$0TDYMyCt72(jYzP~`<@Is3>sIrtIsceBetYAUY zW=X{Jcr--t3%RW-gDaO1dV#-E6-S{Q+w$Zd($cFCrbyI=ZE50Pn0~n$$nBfaZ&qQP zk*kL#yRd}^QV^u{2D-h*QJ|i?RbMbob;);@_F#4ArK6EkyrKGA73;gSoE7olCRukO z^aup7A|$*6eC~*sW@`*Dh&eKh>Q=E)+y|56fTIwxmf~>jL*RA6(vT&`4gxRh29vei zvb2ol@O2=J3I4>+W(V2vde!XER_kgj3l zkH_Al2!~AY>G+|^f8a0DzGMY{wIyEs7A&d9Z~rBboD1zsAx@VTYD*zb&nPTd3UPW` zVgIERsznz305c16H?!a?cq!LY)ZEQw|3fnYq**Us0##-;n2cecw7IqlGlCCGbed_m zxS}0=CwuV(k9{}Z?gb5h?e}pK$2nLrz^HW?#Gd~3h0awG9eS`MMwYe#% zUc>2|OrO(ZulmSo!p6!*G_@K_2KFBPvzns$BkU>mHxGg17RpC59wMzlrtyKa29b6| zrtyKaBO>j%OydJ-^t)((^R!Ik18Jv48r&fB9eg0IMWkJjX?!5<0@BQHGD-X2Y`Qqkl_mFC*oi+Mtfxr z@OpA3FX z#!=aVBf(Fl#>IiIdp7XAFC=^mB>Ybp231hqlR*27MUzP&f4Us%o}@Bdfgwlo3K~0E z5Xrd+01CP_0sz5E2PY904Myh(fg1s+y|95H09yjJ7X6@+z75I!Z_Y5aWV0mAn8%od zcGpgv1$Mw-OQ}&1Z-*xj)>qPJ5N#av9V=o2i9FRaTExKU2B>#ISp>ycfm#idQ3(5F zeJWPS1(gsvy7@_|TH8Y6Sz{2-UIv~6{_OG5%&wE!YDhzGx9nu9?Sm@(|V_AAGbR}vA_-@eLWsq zRmyGWFf$TAWN_>`IN4A95EjrxI8ufk6poZD=k?X{{||NN0v=U$_5F+li8eZkjT&3j z(MFrJXrt0f3T+b%I)i5vE!ChDBT%KSZ$E^jq9O&7DC0OZUaDxVMQbgsTE#on)^G^~ z5CufVdsNiKpaLQUH_j$he%kyN;KKrb*&p!L?z3yx6?O6t?HRMC? z_#DU`4sur)@1If?pGE0t>d2>Dkze*Ef17FScQmQy^DWWaOq&_mvfUUmM^+LL0{z93B!O`$tXRqCz%BhSpME6ID2#&!Jk6c8P9L!oT*A4{il~r_ zD%y_`G(lWjUv<=@t3M79dWvpV`DPH-f=IP0FR_~%qo-u6fS#IGz?5SFkz|HAEn+P6 zoW8gtxgI8V!M-N!8F z7OmGqd@MT29{B$k7R?n--4l!M^P?|@MXyr$@c*Y+bm_m1MZ4k8Mbjo!dDm>tz#V(y zP6vgrW={;8fj1r0jy<8e<4ghNUb81mH+=cy0ACLOm-uq(7sZ!`DyL>VJ_A**X@3(` zS?OrzCqjM=$K`5?pA(5CM@6Kiby%|IAk)}Jw|)jt&B5(`fHF1YR(IFdFX^sRi{r^D zXyls$jKp0{MwfNhA?>Sxs!TK)fwqS=QPB-c%vCD`V{l4;veH>#iQieN0xWSOr2tDT z+#oC=ZMDSVaKrlts390Fa0TuODD1-xy$I$b$7b>}u~@5Mu~6fYo)GD7DszW2EReK` zeg?wvaZqh(4AcV*f}Ki0png;-s!~-H21y0jp}}?yA$G7`Du;C|g>~D7#yQkdS=K~; z!lqc=u54L>jRK~*o4^6t$w%ULX3!t9zCdfuv5ga;wUS zDu~w+>iP=yLF)YUkOR2K>ed=uXTa83-AaRld+8Wx3JRZnqnG{=AneT+4EpL>x}Wn# zJNJj6e!a!RJ+Yf{J7E8R; zZSqph4Vt`(=R3}Hvwc%V_cm+qOY78ERdjE%_WoePR&;Mt)hOY&ev){;iV;S`Ju_U!*UO&B?;j0)X!4DrU^ITxJLQ8Dl&4a21G1G6##Ms5Q8zUqHZUv zaZpjX?#CfigXYGNQ&IA9EU`Yebq$GIVs-ELsj0$jxUbA-ss0bh_7I6C2Ne5B3)SSy|P?rdc;HXK7@2h4ZqZwr1=oCB{lp~V{+z0NS4&_ON~jH z4k8Tj(W)RF>@eJD0Lz({>3&Zf#nj;psyGh0_mt zyS;nxr9#Qb}pflxSdPrByQ(Ye&ropLML$tm(WSv!6kGOci>tT@ygz;y(gO3 z*`?8?_l|{p+0e&6E<31ja1Ja!&v1x`@}E?0;>{#+ z7NdzMz-Qe@MRhyySr@_#g>PCASun#*82OcAPZpF?hm2m06XfEGx^<-Ia%ikmt@)^H zgS9M+Tk2F8B}~G72IHHiJ{A*hp|*&jHa~p%-^OkS5&~`P)_)hf5th~syPX|iH;~?cjNQ7oSo>EW zxf#j>8=Igc>=xNOc5|sl%HtqI;jDjw-N0y+i*Dn%N3IQjWF~V_e`AzH;=P zmt>v0n!2^4?;M+DKIE;V?;M$BM!UArci~N*JHQ;%DSzX}f>OKvuB*OzW8WO}Pp7)y zjYWA>uN-~iRma^psBlq5F`fx0>duHkPq^yn8w=RM2^V#%?pr+ir2mQ(z7-mM(vKsB z9r!kWyYM-j6Z=+;eR}qyQ;sQ=r(!|X$fw!hr+=0AG_IKastTT7RDpBCle2Xg`6QRX zSx<6la(=;+?3R;uNRO6n3pcC)k;X7rlf5)Y7&Kh}GjLW%FCwlaeQ$lq9982D>uN6X z60ggG9e#jFcaf$eqM>$_4D+O9AR2&JAM6rP!&S@=DsOk34)w)GDo=3nmQbfb$wl)A zR}F+>ko0gjWbG4t<7?kTf=|x`Insrvze(0bp~@T&<3Z>RFZ9OHRPH|lTfU=+r>FY! zFdxUKe$@*}oG_=;_MxAPU0xzoDg4ebMpfK&Oz7X0h;ZF+Pt#WwSQ_qf!;D~>33*<(&$<5yV`rR z$YlGRy~J#Xb2(r;eDzIilgAuow#h?QVtWfcW465sY54IDewu^-p5T8GG5GhM1Af(V zgCCrRFL&@~Iru9D|FMIAOz?L)_=D2$c@F+42YW zV;ep@4L{7mU+Ca37yKXxzgX~d9sJR0_=tl)!NCs~yycl}YP(tR(;WP!_k;J{&}{D; za`4|0{106F7YY6>2frW<-{jy|abSE77ks&c|Bm2~a_~2#;Ts+NoeutjH-N8j@cRmW z<5GLy(P{W<2Y;!9FCk`Z-me|}iz|Si@8C~N!%d>>=EE-qf1!h4{a!FWD;L@Lyv2d{{q!~9Cp-941%HBrzbg$t+riIu@Q17Y zT@L;b!G|3Dgf#qk2S3fh=LkN__5VGVO53UyH~-V{gyHYtN7wWw3%0h}yxMGCj{jOL{@h*zK>Y-sl2;gap z)UiyvJssR7Tg~*JB1&M;By@B$?r=jIUL|ju(>>GE7(K0{IqkvV4LpmU*4o)G=;=@C z>BV+7ik^<3rz1G?J2}M_)yavjs7{V^MRoGW6a)Q;rR;>mLO+5+B;4>;9_?^W(72te zKhZIP{pkagBd?>t7%1Bl8U)|`v=$x4La9p+;Dv?|0y{HXx+;;kXmTiA8TJbb<;EFO zh^l}kNe&gs%egMbE_pfECCw%;=em;A_%8m=pcG9$={_-1R%K#^Q(p{2aIR}TGMebn zR~jEJS%|HuS}c7dqaICFSb2uZ==)(u3za{%8*$^zTy;@liG?+i3NbtS%eY;cjM4KB8u zbA|12VHza4SNs~ej0CYV`!num*tny2axNC-fr|x#J#M9`-cw%cP5P}|G&t84X*6O< zZv)T5GdG&NtT2|kO5PRQ$QJw$Z_F!hxW^A^_)udsEIjk_J{p=jeUj$wd0EY!C2q`z zBtEK4wDC>ilp2eY8o#Kq|)cqmh`UHGjdc4>?v?{ya6g%~Jm&P!Wt#7{=h`_&s@By;vG5E)b zdrfTc_G>fs8&*|Pn#H#6?C2x^+={KAda|m@|9gcl*%*8KtjO@x*4n zyIdswREdOBusw=Xf5|O8u$V>|NJdYeu$U>=2SwKeHcFB+kkUTAk7mWbG0Dl|p$(*J z@k*9Nr|s6%4%Zz3E+(69qNRcit%=WKTM2LJzyrNc&2YR^&rIch9VhZwsvrFJB}!^a zW|`2ccq(_EY_EBQ14$o+KSfpUpT#hH_bZkpUe;FI;e|dXFS8~k)@NZ3eg?$qK0^)m zj0qqe=z-#)_hrOONoZv>mHmWN_3V5?zGak6UZXT!-u3)amX6E2o?pt+ zZh6;B)8#cvujO4Y9hcWAjh1)4v|C=IbXnf@(rbB*(qehnOQV(BNHO<%+$nQnWKy`b zy+8fZf^0z|K14|`3|S)Irm}ODq(^Huu4Hb?UfOMaF1oX(YB>5O4kOVoaTtq!i9 zm-2_tG9}W;S*Ao9JIjc^-G8*5odXG!%D=$Q0%E-{XFdu1rYnjBMqE)O@L*RI3EZDz zKmx1eA|MiI?il6jf-@vS1<76(oTGPFF-;52Pyj6)kVy+2Hij04)GY5?h+^zh5^_hA zNny+IQcfMID53pwM;(L1N(%L)%-ee*a@ z3E@_!waA?oDp=&(gGIh6D*Nzu7CGycMZOZm=$^Vh`Yv}}0`p$(FBpSk!y;0Zyc=$) zf%&nl{P!}=J!zUgBhkqibsG1%PS!m-&T6gus51Pe{r7O77{+=nn3vJ{g_j>(@I!o? z7bp(nwf1w>Ci|A*ZjfYTy01^stzA&+E`I~&kP>IrWY8+K&s%+e1Fk`0_=58CGb*(L zLKW-vXU$aTRb}FX-Hud=B8#+tfQ!K-DYv14hU)6^ITgY^3a^w^ve z&5OYIP;A4^fDYhhx7LX9MkB_c)Z%#Jjc|kVlR}HPv`37q%s>y9;>smo4Tm1a*ja!c zeq?6>dN_?UA3a>+ib4+;x}wm-Ij$)5a5}{RJ(Okm8L-=S26|W(oa3pb;=hd^-uAO| z3q8E3vO^Cbg^=hz44jxst%al~IEbsKXiH1k3dt@Xrn> zn5}b2qE$G7j4t2=tV?ji0ZxFa3NQrvoC8|>gP=YaH5_C90RJPXA;^DR97`+&9gKwU z7iw^s;AGnRT-4w~rU+I!R74w3{bqFUCkagqz&i?c6;43jRiB3suG*eu*FO&*T(!ep z|2%wf)h>7abMV3NtKfSrAqshBCk6=NQ5G*~3$-9v?8;1pFeyL?>Gkq&;e*jMiRP&A z#oka(Y2prf#3cEzbAZe5^9+*nNk^Mz?YgIsoG-(p6!^9m0{HhHHu$X^fIoi)@ckY9 zNWq`x;GamtKjz?L{uT%Snfi0PgWvHa@b5ov{i#aB zpX1;!cJLK~kGcLlBlyQ1d~q87SOTz&q>4Y>);P{@LQJxA9C=M z1%I)FU;BJ8J}=I-@p+#E?|Zu1-^amwf4I~TR{XF1bdep`znub5s!Jq5k!_p+);`;NH;Q!>{3)Aq2IQXwS_&23l zt9S6%3H~w%-?ln<-}fGL^PdCnyQKyAKfC^n6#Qup{)sgFV-9|agMU@Y+QHxA;CpF&3S577uvFUKXEzGgV^tdd90z}~ga1hFAL8Jj5&Yv0zBmnktb;$< z!M`l{Lmm9>g1^SWcdiQFcgq8A{yX@If`8lf{}REU>)>0`@QWS%OB@)VQG#FR;0Fo* z>kfWa8vbSn|0f3@6a2dlK3DKtAKn?rlNtKEIdB|x-$ynWEX_hf%P_Pe%yY3ZZ2u@x=?DSsoZ+d79_)PdV6Sqc2V7xW3y`Ztw?4Ub>!+-BGmq&tgJsH zQ0=bOZiE`lOr2wFj8~LB1i64c+|SL6CojOe^~^{t`K#hs^0ZPlpED$R?9gP*yTq%- z@PdKSKqvSCaqWXw(nI1lN3tPeT}Q|Z4_<488UX~oUOdqft7}tj6SZ`gopEAy?W#~j zttVnU-^1x6t~42^;FkKsDD`Ud6h)h!!uDOgbc_t;_GUiinaDaWdo9K|FLglkz1dl8 zw@U;L;)QLu%PYVVIo}}4a7E6iy=JiOZq`xmD+^ z$+>;<_7R*&eX$`{M$WU-7N9~2N9$TST_}+%60QRq>WkvOc82!?VEqd68GjUtas-yt{aS#*WZ=(3Fx zSrne$rW(^1l5c^X5kqXm%gDq-t+7yNoSjG#8{;MOW7D=x1u#5ase}bkY+9$argd8E ztp8Ex*b|D`aYp9=a4!Pomip8 zAUK^E&iJtECw$ml{**%FUet$G*?%@A;v8JjV`U%JqWk*T){lHGrZO6&F+)A}p?&-} zypmn9&~}pS;c)ghJ#7+UO!!Hyb#k)l$Hqr{4JvZk@Vm*AJg@Q=Y04@a6_u1^a@ zNnuvm=%SHs;Djpa%_2~fRYxiveAVi-C#~)w@8^*+2|sX zZZI#gTclH~Y;=K0H<+8)E!1Q;+B@GMHCwdo#c;zYka7Edyrb_%2yppAtBQUsxtmxH=hU0Vqhq0oIEgbCPmFQK@Z_B(~Q8vTD4Et`|Qjg+ZA zlaxj4E*Eiq#}so$XLqTXJT($cP8lFCt+g5eHd`;$5906?sD<+tq71cgu6EJn&m=S3 z9Zfc55~Q8)kCUZ+f3M`_dBTb4#V*H1wSvdUX65H7FtgQDMYF8|S+4vvHg!Ak=yj@9 z;ugT>Q0bYUws*B4DyryMOW1=`_j>FTgIA??d)7lKXjzfTop^@?^J5yTt% zZn*=Lkw8eB#46y9e~GWtYQZji?YYfAYrxkET>VmjuZdQbaW_C3Qzxx zdLetIs8W9|B-!f$d&zZ1 z5IVe)SGB7;1N01rncJ0+ajHgDF}rQ>DznUDBG;@O3j)JY>oyh2En2k)eUSg~$RJ!* zZp6-km?}pfuB!MbNALK|(6(oTwM<3)#Ocf-7Dn}pWN=y|bfhtEIL zNN5p{qcBs2YBBCO-&f}2GRy2Dv!ncFqjfude_6SY!$$eevKts!%NW{hNyvN$S!05H z<9Vxw9@GN@sW7)!IVB2#pgeTmvz}S6bN$Gn(fX-v~ zZ2!u)U#8H7f7s}>J3k>kC|Pg_?euMC{*>v$N3SWcRdGrGjDL)dSxeT}4F1u_M|!Ve zviAxp?Tfuv(ettnD^rlm#scRA3i0v#7?K)`r*z5SwN;4FObc)2B|bVh9fK%3i{$b%8e9xnaInwiTz=#qqL5@@=iCUlwk7j&EP_4}He+2#Hc^6j&nE zfI>P)_@F%8utd{?RwJ;vIr31~WFnW~P~X*T@^Gd-T{RCI|L6Q$M^Bqv3=Z9C z$Kf~cvg62`l~@^XSKqT057^;Z=PC^{ey?7ZC1}R)wUPwPWK{mfjYWeDT*;OD^|uVp zRDbiv{<-GRIvtnSK?TnJ^&p`8_ciE)OdPx?`$a7W0} zI~VmEeO66m4};=KQ6E-56bmbyBBoBkL1|v_$ohp{gB9C z+Ahj@$>zlyme%|N`}N8Az@CvRm}?XOkJWvYWju05s)!n-D|U|pbCGzcNy*ls8c>Wj z1#?X0`zmlGk8K6!4<0!{NSv)lb z9mNAXIhXgZ9OQKKTxz{K>b0?5XKvHNyCl zZv)p;vyasX;Saj4d4Yo%bO*b;<45~uJoHLjzi-6KUaP&Q{V3u4Wbe`c2$=8c+l~3Y zlmpCniL`|0ibWUmJtz(JT?ci#gBps8f8G&-67zjX8mhoS9pj+N1a%}(FyGhS4f9<) zTN*gzF~e#6_ZlMh$eBz)KTc>$Z*wCR(c~`*W62SP3LM_gy!>lW8ML`rDMI!N z+W!kE1rYmjexo>5bYMJrDOhc#x8DZShM-=vh^$>M&Tjxvev+~+7U%bMyKNNumCK9G zp4b`OX5Kx-iz_@ZWVe}f&sMu_0}+3C?9tixC`AYU6b;I2Rwp-s)mD<1TnnXcvW4Dx zud+oJ4~Y6YJQc}o6CEQ@EI-;r6Dj-v2=<%-suKmET&{AHD}$@uwz{$8<-`iQybBL% zdYU=Uryzdbh1qtGgPG;Z)>fk{TU#?!uG@`J0c$2k9YWxwX=PLs{#2?_AzP(KXtkr0 zgviv2_<;^9JX5=iDHZxmW#fw`uXi0c$s^|EkRgfY%EWvm4p?uI#{sbta}OY5?2Kxk z4>;2b2oOW9K=;V7Ep=gPC=!KbjoIeQA!k}eL|a(EGUU*RRg}y5Yh1^vnfORf7+Asl zygMXk(OXDy06*%#BH35$72yO#UP&or5(z17%(+M`&!n1*MbHTa#N&jpROg~}vC2{w zvH7uxU?d2ZSsTpMvAzj=mu!)0$IW`GU4&rAg~l%s9oX$LwYOTZ-=`Ic z)fL?zQu|v!!f$K$XVl*ATg10ibbp4bS85@tW8xO{>D{)rj%ALUGV5;BUV*jzErBEh zH33PDKO@C*%yu=81gUx4hB}H8j3)XOg{KQ+3P$KuOZu-nCy$W+>&}`H9{R64FBviX8bqV9BYgeW zG%i6Ar*R4SXBwA~f2K+Q@W?+a z;jrW#yc$iU3KU@S6dT8CMhn)5&Uo~I5KXaQJ7THq`y}90d%gM>!8Z{WN$eyvj!oww zA}A97qLD3U8!MiXS2=T?9$0y;4*8WcbfB`$Rq8;c4pgppl{!$V1C<+Gr4Cf;K&8S) zXj2C&Er_IYrs77Z)PYKiBB`7y^Es6|P^m*6$^WR!zE)M)Gsw%HU3>m?tVnYE_=%EUw__n>gZFav{5;c6AYGp1(ta%=~i2*^8D*J(T0)a}*=SM&YN zYpRr%w(hM~dA3S@WCG3-LuezrMsY6MIY4pjQ4TJ1v;-*bEFbfM;?{JWc~OARy0uw* zUpIW#twja?Ir7`R!P*CWEC)VY<8US$bi)%G4bM+w&I5gZFpcK4FZS&|ntM6U?uUVr zp3OcfJ4viNrgQ1Bx??&&p_-(TC|P5qLlPpQVTbW7Vz3pNtIEFYIEh=;Q6dF3v+{ho zYoO#Vw$T6s;!Z+P))p|JQy7KVqla*QWs^hwLt2`JXQr~lQlGPh!;91)SRJk8qBbo< z>q)UJwiW*OnRtk}>mA5ch*9XnUqixL3}l=c&a>g}K|iKt3K(LANVvX4V%5q1eAf0!&2gpTfko>$%I&g*!?JzH;QMux zzesI|(v7La4$w8fjFW1|+$0GmCjoE)A91x8dL?S24aiAx`s36zAKfd;K{Xf0B9+R1 z(Bv##E!Gd&v)2Xk)->d;*~lg%+%8LZv0(|BxV^HsYS;O?&O$VAL?e!tl!Z>UWRpjZ zB_lI^5vXoAOz7qqBq`4xn9BpND{>g|9BsWuihxm)t={xfzm!O&JXl+?En&|h&T!u5 z<^>}Lm5LZlesZ&`f_4l}*LAu&NC$O=4sM6)2FC7W~-` zeqkDZzJp)Qf%aFa{Yeh~WWgWp;BQRBU+>`Wa`2A{{&EN3NAR0&*u(xIw$|jY+taSe zX8%zAUS*Wqfb(UPM^*g7_75+4#TT-Fpy9cc**qfh@7O=cFpQ}3h?!dVuzx65fb1XW z;*zi?UQ288QtOv-2pgaFLu*r`bq(s59CeTocyh`;6) z%^E;W=bNj&{Wh7o`an_bT^+drzCho&<)7I!9 z?@G4Jb~X^)xJOF)Xf_ZKN{g`9Fl-$M7*t{{h=>!cJugc|$vh35kb3GZ~=P&@7#ze~@`&@-eFQ#VY+g z5#S^)F>G%9xo~bF=8wP1{1JuJcC&wg#Ihd;eT~X~7=%U=5q6P*^4Gpco=t5m?g%5L>S%`3OSexR zu0K*l3>4vO7V^_b!Qc+|47&<8>ylaRH|Y=Jcc z)9fd3*EwoHR*V>`2Z}OVl$gx?OtkC`*yuRXm$F)HaaM~pVzBF%v;QBRbViHH2)nUe zSJbLrJ4T9Li(d{mG*KsHP>9*$M6KyklfB8cPdP&OPk0p`+P3NCI0t zdpj3Z4DdH&JR1u@_Mqk_e;dcuoDS*_PRi?l$D)Np3Ohs1&y3pIyd%HARKw}5b;%=A z`+%>r=S?++e+LtEe6t(B80hOW``(3b2&E&(lh8&^yy4ZF|3W|ChwLYOg1Pn)A*Fc2?*~cna2%a04moS?5H^JQtONP?w^fKD~sxgSns? zks1{O>`fB1$UYrf&QRa8T0)wc7EF&tn;H z`^OBIzP{^wchle3tuHj@u^0)~S8yK`ty@&sgL4#*nD&clXqxgGLBV-KBRLIAr;wuL zi~;5l6U2|0V%Q8wL)c?2F~5RradGK2QTDRkM>I96+DlD9;=tOpIhJ^#Nw^e#cUqw&(HbL*ZZ2ckAU){eY&3@spjbJ7W~cXw)dGZO%7R^*Huw@Uv}3Ww#{ti zeK}nLYWjB7^o`av6M)$kZWt>~*qd-6rvC2w zo=RrRU)p~H?#BXq^DMA8Z+M;YLv_tNMl>~V9qfjPen$>~9K5R?6NK7S zOwiRX26$oZqa?WT-3uE2!h_o{6hoWr{q`S%iQGEPCh|rOOyocRmC4tiiHyzYoNJkU z6<25GTc}$fb^J8?kQ|$N7xYTcJbY#sMZ0DmzOziQXtF|cuR=aq6~)XwtS43-%VX}X zNGx@{ej&6Mst8F*tYE9@y0?t&N87nu(MXo^K0Q9nFONN33Tu9?SrUmRhvBL;r-gZA zvnHC1GM#7HISgQ4A3sHuks%QO#4rf8T`R_?xBiSIhkq{w0(tWi5<9tfU*X-Oum`pc zOzyO97&G@c3Ia5nsEWe%??CP(vQ|E_uDSF0`Qhms=|uDPvv9{@Vm`jkU2C&zbz8XB zwt-RA8##bB2e94&=;pTPG62>)fDH~{y#W520kFXVY<2+e3m}yNV4`>BBb^T5Z2?@8 z0bp`?*N9}i@O16P z*L5x9U8fghT+4XP=}J7@bzjCCPCp{!TE@#w|60bijQ1+!y@DpQyM5w@!?j{ae*mYB zKsXILr%6_7VDA%V2MhLFQ*6POb6~-C{)GiQ$}QMGvR2y~r|;ao7y58C6SWS8 zHQexh5A_ zuqT(LSXC%OU_7PBhzQXy6@_8hFRw@}f$3$1%WNVfk{9q(IAjh0^22Dazi82V$%_U+ z7v3cj#OrmBYFt2idi@KbWmvfocmLtWvZk`C%rLDdLK_WN_vP{SS6PaM=BLDd2UE?!p8r+swlp7wk2 zadKaU(AU4skYiVAcrJ~P!}X6)Mn6=nZ2KDd!l*1SH(D)USqWRNwv#Db*-df}PAUn* zH&gK?y258d{c{HR6)Si0RFFRyI_f;rVyP-JE0AsRy*>n&c6zBPef1!A$@)$vJ=5nP zAtKpzB{8s&isk(d(@l;(BG2Is!}Xt<3SMy1vf*Q;8^k-yu(l~)bold9$J>gM1kciNeGx})pftnMvq$Kp7e z|As4>lyPOiA57uQ)X3juDWJV;eown1uiy_sn<#JNW$)E~(0(w$ZUu(xU(s6Y_GQcY zvi7mUGp})%7)a2PeehbV@xSpnPSW4_d`|RvX{ucC2u1E2Q{%h?GQTl)`~$T~<8kxY zr>Emv0sYi+^S>`QB52rNHzq&X_k~i5w%MZB_oZ(FlAPOdsPt`24W(NNwJ2#U6QZ={ zUhFYs2H_)i9E6wseB#bY!^kI&m3_wdg@Q6<`o0q`Z6trUckw1=G4WE`j=;MZS)l7= zOZO8r7F}1icRxWe=(@72>m>JLzZ|Pu#OG_RZM07(hW3oq*q`9WsQnK;%l7>nP!Z8! z&yqjoG_D)YCm}(TB_wF#i-G<8t!@^*bNRtwfP&`F&TI0ML0GD0FdA622Orh@gxdoA z^{q(`|8o%jzZ)dc>X5`A!4GYXlY8L*D&x|g$PV!TOO(1v3RPYF8cBCR<>pxbDnLF~ z-{y+Sf^}$bn{Z_J(F?Db(P6c z-~h5@`{CF-ci82d`814fQH1(C1;8_#8YEHL$6oB zHKNGrWKUKYx1}9F=vq&*n+%9)6HdF!jC^dGtlSvd{C$`c7u)7tiSSw-P{fVDu3puLtxB@DhTh860lkeTa ziA2w%E@}ir+Sl`v{qUUF=?xsK&uL%sE7D6O-bMDnDG)cGenot?BaA$C%@2}UD|-Me zk-psA@_I1yTEkb{xDhikSwZ88mxF!d<*)iY8>7FmFj;K7VVhO~UwYwX~ z6S9xISo=}5WEBqOXsK7RGH|fMIZW)G7it&0Tz|8&(C>r>FUhuY#ggni)#0c4pA`uu zo!^PSq%W_Y_?rOzl}n_n z>uS9zXz`5+*5bz;XmPOsHww@XyLvng_^1P1>;R_;aFYQ2u&bGA!0Q~~?;POu0^98I zC|vnVsgTnAMTUIf=mXT`pHT%k4Y3EXj7?Ko;SoU2Viq`CO=Ojr$0_V2Dx^Lb+-O8@ zXq0_m(3ouh26_O?OX6d9=)QRWnRlfTyW>4T?2h*+9g;vlb+yComlUBkdZ$mk~1X3ZF0dJbbP_ z9B3`HK={143!mpWeEuTO5k5b*8$SO8WV&foi*S0b!|B8_g44Iv+%Jao1aSJ!J`*l) z6egb|d_Ho{9^9v0Gv?E-XYihOEzNq`S(@{-Gh;j{^_|n5?R5Bjz3};~pNr3jgU@|N z*zox$K0X%cwC3`!2Y7w4(y0Y_y#fR^`I04qF*oH2SI$2Ry;4(7WiR;dj2K!|yAE-;Gl9WrwBldwYQ2oA!d= zn}pxl<%vn_<99;?IMWEf=cMub8gM-J$m7<$+kDKL4>@k(xNG!V_ZP2OGWxAQ7O#13 z^xI(YRcRbwkU#ouFu1Fj0MgF}rGvrWO5^x~qbv=#t2hy)pG^fAJQD&8o(Vy=?o5bJ zg9_KiqKf0ztXfoY%o?@>6qb+L6bH+{8LrP2me1{4?_KzQoACYDdg6QJ>HK}Vustcg zd~Cnau>H0!Z14CTSk#cPJ$yiZc1U+;=Yi?JNWtOv#{&F*_OAnMJnX*=8(+u)Y<$>l zV6vg$cQD!FS}@t%OVj$u;XgFa_7=ocpP$VYFFUH@f*%_vi&jGjs)M|vrKmXa(pU%) zc&gzZf<~Q`GbGUzPetJ(7oh3(h1pSkvHWb$>-|nOZhXgP$dB+Z(94a36a7w<<^kg| zR<5#zHB;e{PWX^ii!F{=N*gEoC>7oChfOUu@?ki*3znhpSvDlGwK6k(C*r|=UoL4`#1IJev{Oo+DW$IaHd;a+oFkWjwfDf6n|LonC=n$ki=#> z{%%G1^z~qt*zV_Q-)#RIki?&eSDb#BzD30NW}ngz{Qty#wLd5*7ge3#>g zqbGb?T)sJ2sdx52arA_pJuBeo7Ii`FGkO9XU1kONAhQB|kXZpf$gF^)>t50P>6+)7 zcdl7NCVQV>Q__(7Y;Y5k350Z68<~8fVzI4jEr1k$je$eqwj0aYQmmo1 zA9*D&Ixpi7WVkBX0Gitv4Q*o2&=rF0!Tpql41{tREff4h=}_;Hy&i(IWhKWv`whwxFXk_3abb=JjFTo2nfj;a3SfWh1?a} zx=mXMS(J(RIfVpn)CNG>@v6by7J;FOa9cd_ihM(Jg6$&n^-v@SVg%JdUPl$r#5}?_ zL7dmZ&s7wAYz1vkbPhrN%yzG<{p2}N=p(QwDO~ivnKHThq9~b~HSm1J*9HT6>^K|H zlQ}S;Rdoz#Jpt>>*5Qmq~a>KhfHGcNU@q#Uh=-8sa%hR@E2BCCV3v=E2_iwKLDwz z85gLOG8!SEUGZ7kYj_UV36s;lCF-RY zBK#og$K$ID4^oNkA*Sec4EkQ;6FC;m5@;FXYm86lwqizy?stODG;y8Oit3jh03zWs z-%TEStyB;JtInx$c}68(SDXg^@S=E$S3ADp=)ZKWjC%wRR&_=5AdVS?z#V<@aQz@2 zPSY@5&SS)5NZYugo9HQgw;rO~6iUk=;z5G@jC6g2HQwm@v@MdpQZzIf?w*INwEOG* z{ZiWW+)g#1w9)uqc>`IH;rh{f?eGH~+Er9f)K=Fyd|c{2Q^MO;t9z=boI3SEO{jMp zIHDN%uK#{KTz;8XfqQP3v&Ir**=#vU}tQ%ZTf#8s?~u zsFv)Nb*CPu#ykjJ@)EsuQ^z@mkkj-^&l?7aO*wzohF*{di%nF%bCp`fL5AK`3lOz8 ziYEXx<+5?UJL?V4+j&9v2Zdq`p<)+{sc zchOVLOR^*rYdr9gqN&{I=350@G)*XSu8Ysgam}9h&Fie~AahRLn+!FAlQR2&mHGaf zxuC!yCCkx^#kS&8{~mHhpPIw@7>*u@pUD0-{ENa$(68=ZUdwi7yQD|LuZeB!Vu|O& z)3raB`+nWwRj&MXwr;MRk?qcM+*yu0%XMeD?kvxp$@!T33fx(NJ1cZ&@>S+qUw76Q z{u@5%nc76V_#e$*UN3`RBq%AbGNnkRJTIk4rM%%OMJnZuOl5J%uT1C~aT2o)|9IY`Py^FB zY_1ESHo;0kgnK!t`!O`ls}q!T4$r2co^ep(yJ+jR%YnL4P|`WfNkiS^pdNQn%LR24 zP;lyfe+EJCbJ5=Oy{+XfjGh+~lj-Pre@cYsA^)Qp9tu4ttZ%913=b$Xp7M(SoUdi- zD6?w94MJ4hMozOUAn4p?>D&oXRb)T-Pq!V^eyxAo)(Wesy-FI4cye&WZkcaSf{2rU z%2#r%uE`e>Cm~o+Q5)kPQl^~;6(?)_%XSY`-0gQ+`%Q}`@u~P}P;tFwPbzK^vNf#Z z-eaZ*ALK%DtEq!0aA2tadL@H6pFwQD9zHDQ%&LPZsvD)Io3|Wa?4eAhVp6JwImgkr z0Z|rqs~e7Tm782Ss#}M}{B0?7;>ii+vE_rS%qF)y)!GSM*2wcisbfwtZ4geYgl`Lb17WRBD1n7cSE)%>^jIbloN ztT8(+?R!;wA<9a`+VIRU1q&_P*9zD(cWSQT0Rk_xiJ~5kO32dmi*Ja|Z{0`3Q(l?q zgtt~P8c*yXOKzw2(@XVh)=P?`Af#Ocqmk2&fj#(VX_*F?5z8O2P`d*DW&`W{pAU$Z zy%Mhfh57&qb}I*n1&`ba4%G8{;!TCHY-&4Y^iBl0cM0nsNZw~x_bt^C#uS_%+2cIi z<9zppneZR?IDa#J&Q6gcU>Z{Qs6||ovI@_;SlN2#FC{KD3V~T`%+)9H#4cvc$4dOm z@Nh}^mOxH~$;2ydbu{3GK9!8w(SDDiNY+gQ1Lz{7#aK1*I4z=K(jtx*`-wO(RuMLr zz{Jfgm)xS+bTS2=9bxYDqi7TBYZQ!0{)*Kw!}6_W9TCWtEeIJQUxhBrt>)*Ss`zt$RR2T zR_jGb1dQ`?{PbdpXGG66Gh!uc;;Bj1nx2ygajaJT2I@~Bz5vqe$B9uX20{Gt6C67R zG)9TK&ZB5k+J|Ba7E(pZ1bwp{E505b=(-=S-%syMOLm(@$oswIDaC3E^A?vQ6t#?W zzv~v2Pv~v@KyLv($hi=cp2r63*qixWIUPSr^8*4dRSU8k^Ch*|2Iw4344pHa6>?l` zl%v{Lr^V`E9#W@gQx&e4ixI>4K{WA_nzT`+b2F?dYRQ1EA`@en*gJcqCLgfwoeZW* zFz}mtH=)dGj(SHuPJ^tTVdt*s#y`=(I9=VnK+&I36?uj690@c~S0_=7!}8J9VH6o( zV=>)rd~aywO~#+ext4x5>Q=Zp_w1XBX6c+hs2zdY5CgZC_tv?FSof{8qkA;`@7Z1a z?`+*E{x?`Lw-?P(719n>E!^jY9&ubbN42YdKM05fBUI*9ZhYzNRy}bBS3EQjtbkHU zBvW+`0fTJgq^_2IK)s4o74)t}MPT(dVn|nV)LZE(8d-ISkv(2BsoDn9O<~Dp<8T|+ zk{&OY5Qeb{*l_*PHjF^it!3(#9^uIdtn~-W>K-A>FAnSDHJjY4am9v8#5VCJY8P@C zbmP~Dlm1j6Ge9#>6;0KMXt6MLZdU2Xl=CopZTHg3h^IBSGg} zF6G-LSbJT9wbvzBdnE&7kDkr7f7JF0!5%%~2JOnz{(Ylcy?-z%;1R!fj`4_RaDYeL zSBjpjV?oqR&)$cnp$>IW#SUuqc%Yi#B#pKI;xm#i}9l*i#)S&2=aQXKsz~a}iY(r233Eu z_`oxKC|@#=<7797n}r_vWs<+jVSlf+Y_Nt2pt8(uDx*QE3^i4jwXnn5{ED)stC|~L zoie`yZ|+&*MMsr4Ex_Q}u!eQy~rmW^|&HS*Y8npKpyt9O1 zoYl7H4==*paPIuJQ9qVjX5PVP&@GnTg60oKHQ^0U8tUc`Mm8;a`=_^ZSMw%(**lBr zq@?KVd(6r0-P+#>;}oO!kV-8_Cfta)T0R{w6>c~Qw9@{BTOUW(26Osw&8^hI$8%t| zHfT=2^YtzR{+5~maP!#)*xLb47vMSp`UdNtm9NhWeF*YIIOlYU_PLX~mtNh8p3Ze&(Q###49Rp~bf1o@24K{p}o9 z+}_sPwDm^fAr#}CP+h1A$^>Q{$9n^-7_>JJ)ttr{#y5^=NXNVV50|+5NEA6@78pOc z6bU!n=Pp4k>K#|)-^FacUY8(4<^Ggb)^T+qQ`ptx%l(}M@6{dPxH&u{YsyL$^|-7l z&6Zw?Ugb$rj7)jd-Gw(l9{lAJlnQA$nkOmc-bR?#XQLL&>55tTy}TjWSq%scj=gFAEZmu$dnE@iRSik>OnDLhlDsA8!rVR|PQjq}~lVu`INSIMN@S($j1n8JACHDt*h zm5Db=ur0$D#JXIsQL<}jD*O9n8C81&s3Gr`UOta?&*og05{2M+E3vl?I|NS&6~=pP8EcM75WAk@fh!BPy|;S+v>}c?Zlr zh5U<6dVCg-mrOC6TiU1cb5Wz1QdPs!NHldL?{h$83!?q;x{7xMfa56QB_Tk40X%!n zc~?5jYJ+N$F1N~{mPnEvX0<@A)MM@Oa73CN;ra*6obO;P^4r}hrKK7(-(fXBN#mg> zOKpUs9dkup(wxz&vtuzsy@8Sju-rQ2c1<$=NPV@7sPt*s!8xsLG*s^kH@v5lS7Pqv zT4^oD=5kDsNqV_rdktRBmqmc$-Zp#Yb-sY(w~b=lEiiXIT>o34FrIACyh*9`T}&XA zYFJMhAY3?!>3HmElUaN2LdXlrnj^r2&9H~j7RW5wX<&8PZZ zoS~yUT!d-)!KCAFpUDZnFZ4h^#6vESOD>3ba#3+O^|bX}40rOH7Bzv?I}Y86i&LKX zW%FKK+TO^efc`{N1v#$T$rsE<@7i_z4!?G<{*2eA{TZ*_!=Lfm41Y!f`~rVSwVC#3 zyf)LHaX#@^&ze7D4Mr4P+M_A-RiXKEcY|N1C%M-L4IxLw%3i4L2#|%LMbA%Gpm*&x zF>KyEOWS{I?^w?8%k_wtRDtlu{ysqP{`RK$?TlP0GC7kH0bGICxkdpm9{(Dp9@p^c z@vl+JxK?njf2m+*_bX8%7`6F$LcSiS>UBwHRB){?U+C3m;%JSEI!#aLU%`x^EgJ25 zih`KSme;<~{7D*fE?G2x((z3bzNBE4jvv|hX>LBjSm1WimItNM%F($ng(jm-!cW3}WRdw^HVvcH|e#U_3|&&^}>lW!dZD)CTl4n&?e5EmHI#$H;{9PfukZ zs@>eDa+>A0zj?$+Yjdo%)=#au$EtcqzN&ZR_w(C-wsxtS6*5<|`6QM)1&qM}kblXi zHWu3VZYR!i6?N;_+UuPi?7k}dqESPQzio?~eUdPZtyHNwL~X_Ar7r8M&r%3qxKhY; zs^C-S=Ef3Ah}juSotDd`7OiV7Fr5huX}3 z<>-f$2EuaKD@6_rX(YodGG}jJ>BDrV_G{n`tUYc3318>G4wxq(X4+9T!E`jD;C58$_+hvCryk8v&oW7Z zvaD#-;eFIWqXOMXLN$XQ(1LKP)5w6|P8TI8g%Z%t#9W^YBzR0?^#48nkCv_Q@jq$) zer>3~Yd!82|BGpWz}6HE_&iEb+H2y$J|(!5*X(M~$bep2h3fwu^k8{yBHpmh%-J;+ z^lc{eKnNg*@tE;N=z&Pfzn}*&!J5s4on6+Jg@>j+Rf`6dy_=3A2FI5L_yc4zjgw4b zK_uh|h)g)oNP@%KjsFS0UheR<;YIMZ&?T$xZICdEw;uYSz2fV~ z0j*5Dg2p|XSR(!5xM;~s(Ny-yT7_C&DB{7SV{uPOk^LW}D}Q7koyJp_JW`FBxOu@@ zThl!G;R6s)^{SF72EZ!RkdBB1g3wP-`bF4n$783BOV0m7eMjFXD+Ymei#)QXJA8QI zCg=8Ct^^(USU6R9VCJ(J8Kd8he{t=%(wOw8Fo5RVF}17lY8qEcc88!8|70;SmQhcQ z3sm$6%L+C(d?o|_$Ml(9+$A29_^r@StSeX5i6mi$c~DtGv?>JeCEP_dRV}kip7tauhfZDM zL>J>kZ1O}2^Nn0}vp;#B$ecXi*5tuoN%Lpw>2Cfs_3u7^#`T;(F`z*(U0-^4WWtXF z9P_On{v2N%-$?v`Aj7|z5B@iqRBR0%tdG6omp+1l>l>1ILlL&*0glzZ-<{jrQ)f^k z8BW?V41(ekG^SD*#1bpOL~XIuk8`>7Y=DG7Kil&T@58_}uRV8NpyCi))ya(=O3v&2 zZ}dHdWc=@$2OnYrqptyI(xom9Kn8ZgOv_gn)|dlSxc=D>g*9Ar`pm@vHd24pke67Y zabhM3xv{w7^va^2PrP`S&^TKUkKomychjzy2hs zgP9^W9U;tnDz-Ew8rk(#al~NGQ(=Ogqc9f;BY&3*sPrQy*ZX{fr5}05E}RO52sQ>7 zjQ!y;)grBHj*Dh!k^Bo&gs*+Ao21?2rmI)*aVxnmYsv~0r&@7AH*19J-;@W$4%mZ| z;|4gJ1fBt;8-mO_8Q)8my9>$jQI>9Wcy!FKa>!dQoX8S;>DgCV+3v5jl~ki<(Bb?<{4-bU|%QW&+7*mooA zKn>#q#!8ymO6mX}TW{_uq~Q`(mVIJnJat}Q*ocqi>yia*+|mZN#!__KdG8?(Il>%E zO}>sgFvP1Jqr2#(`FFeb6KgS^MhKnn(zoN?;=d^V7u9x^cL|ILTa|5loH`lW>anypBtw&z{8X9>*Ke^Ny9mSq zdgdIeHFBn=3O}G^x16<5E5=y&n;fS%3G_b^^}W&R)*P58Bu0`3PF#JG` z_;p{Q#cSJ}C>g@DT3^z!^_ln;pC@{fn|jFCMqQ(6C6VZ}*JU3d;aQ;L|6s(jbXhRi z3A$ZX&4xW{uov`z!Di5-2D?Ertjwj0Bb(Y1DJWHm)Kl2#pRiDFF)(F}ofm$ki6 z2A#n2UIePC!ZZXy=-sauY9^N8xXM{O&o3Z2l|ySq;G$Df|GYN0rHptkqH+IgW$!)V|sl5|3scz z;pFG2rYnya#*sC3wCsoE!BqOKE_`G1RlUj6^{iRB9?z~;D@BD)J2Mu4Y(sF$DeWgq`WDW6?)LXdirl+k)P%4nEN zAMb%C%|#P^i`3luFqRiZW<(~QJ`ih1ofV2Pqbm4PJ7JX^16< zOEx`iRZk;&w3?=3n>4iL!iG6&AVJiaYfIfUGzR9=W64qJ*vvA2JYx?38Fc{lq1_-o zzDbK3eR5RGh@D1y$M1>sye_0CwL6OoOc|g%&6hqCzvQF4`#2+3I^s@QqR!sOLSfo}s$pE>!oGuYu}t@Wbr*WkVooeL&l9!Hhn>qtC)`tB!W~O$(`12x1P8 zitH7=DNzqZzG}infxf5je_P)pqp8^gg$49@AqN)Tbnp|-^gZz3Uh&hpfP$7#1EZu5 zV~+D#I^GBFOc`0Fswg)OE?)8i20B=KpNr5wI4*Hb>ZjFlLZp;NSDM*qYVtJn&IGof z)kMoqQSFe;@Gg9*MQMvvad=&vn3h>oQIAjgtOJg{*EzN^v@zhs0t7|0!)7hrH+eSx z8MqE;d8$<^+}(gI8*}P~-URDJmLuMpQ^dx0*MJ-yl8FGn_8FUj{t^ z>8fQ^PB_^j=941CSrEc0OE<{NH0oBKHA!A9;^nHfw-Zajsn4THbG@_t$iC7bV}&S) z5q`jT*ow09GB|>^CrPP zv%!wdkG-!S02~R2!|f^ZXm0>USMp3-JoO`Ap25%GEta)p>Z|TXlxOf%pC}9ex$Rmu z$=wp^)StOVIrB9xOR~)o}MlEfpr5=DSau_)>i)9^@0IhiS`yKJ*EXQ8G6w zrS|`N`ZOGb?wIgWke+mza%SN#O%GTBd*{dZBu+U2af-SG!uVtFZRwGx+-Jgvk(I%% zurA6pwJ(%uf1^zMX@0bQ=_Ju3?BH4grGOt$0Z71aHxZ(ALepMpkw5-BMtBXgaW1i6 z%5pccU&?Yiv0utrR))Yd;o-7&0;uR2LrkC~*;)vln4uXn@NTERr$^P{>z;+M2&398 z6GjC`iY?Qm5-vl6)jX5-QWI4Q#W{BVVt`h|4MVLBf97}RO5H>!qqmw7&Ng1l(GZap zW7XhF16H&_`_s0c@{!Rx>q8hs5hanwF8AeKev3jPV~V{3xZcB4|Se3HrrBEkSU zog^Y=jToYEuAhMcEf7l_+k2FK1JM$3w%A>LJFrcM)c>JfLa1qXzDQKnUP#oWZX_zb zPJAZLq*>=97@ddI{z@EQhbUuiCXt9N1}>!31L)k3bV$i-aA2=aJ~~) zQUtI!HB^lqRZk+(W)Ydh19bcmGf~`LG?g2}^*^#1_A)b} zMe4>Jv*mMU87lymXi#qaP!Ilf5Bjrqa1|irAi(RYbJ-w{0+L+}-h19QUyB z1Gs3gwOVyIE~k;M2yvQdpp~fU;`p8I+_2h24NbZTpVwv|+$~>}z%$`$$JYp+{YZds z6@L_^0HY*vlKJNIE8WmdVGnd;Tib&b0~b2W>YtVvuW0|0cY&-m6g8CRwaf2LC5qsL8DRS#XoVKbm}9 zvnmk6oSSlEAdL}VnKV}tF9eY%Bhj*0Vi9}uw4Glheh!T0DKs1B)7G?t6cgz<&4Jl^ z9*j-d3zHwz%NOdaO|s|iwS6t~Y?}QSEn;o1R+~?WG0;23>K5&z&lFYrIp#=eY(8(w zo-5MVL#;iTZBadk8#XdSwz%04vH)G02u#H4Ts3LN0Pzy<@U=(62+Ct^yi}+WS-Ks% zkNFv%$Yq)Qdy85yopf8}*EB4Tqvgw3AO!Mno&0qBGUFF}|NIBNnHjjF zfBUD0oB8ny4wxU$!wG5L*kS@u`VcqhShvhc>@?`ws#5cqzOD@BaVcG9LdgoT!Joec z%wLH;%wGw5l#4}xm@Leu8r}>XB2NyN*MXD|i#i5tQL<+j?YjxBK}34;XEVdfUAmtV zvvH*wjVi9#&Ee_quxx|58*ca=0|8bH23|&?C#b5q4fS$*oC#%8?xkXXQ#rZJ9qC3d z!Txh>9nIK%9L%$D>U>dW`ZJ4$qRw4T`)g72pw;*jm!RQgZTBA49VxyQ2KoQd2`z2o z!IvGgG*GsvX4}0mWg3ubP27l~8wC@Wk>StgTB*%tf38*8?Dpqcbu{E0I=do`<+r4T zSEz;O7C;AcNy|j<^*-+v7?gsSyDIi?yclzIS=;JJ_ZQ32i{0(N*e%EKVz&g&N9r-J z!>Y%k{ZAhrPARYczuSScGCFY0fplQD-+@Eafg3TOHw+49dEHlRmfysIS?&=KIq$d3 zax=1DkcRrPgZhPo8ltuEGoUcCKaV}5?H@<{ll`Bjt#1*+ckCMe-qA$=BD20%g*8pG z*@|&op1yXDH*g$g_(;91z$ljZPqBaK79#EGRgnlu8F+ZBmNqkioW_y-{#j1Qt@Z@yY&n#q<_wm ze~<1*Scqm#S)=2ytSQep8-Bgxwh1b%nldk%%;k;FLc|rvle~<=NKB>4QwGRZKP184 zhugoglMss5*qHV4F`u%JZ>$6?59#vTG1W|cQ8}Fvt`V)80R@WTwl7W&IV_@Yp!@}t ztBp9lXy^#xaJIW-0es`QL=wEZR-CB7$!ZV5tFCp2PA73{7muvO*iNWM30RFu++~$E z&F=!7VSAsOD`&=1n8GXa4u0=ToQ%G_@bGsjc*$5f`39qEaez9RJdLGY@60jD)2zPK z>aV2!DQ7rJUP<&)_Kkv@ZN0~<32j}YDrQl^%7bFZ676IS_=fJ18aJAnk{e5n%VkT5 zYH4zX;Jnn8)zQ@H%tD{BO`Sm~qqA(wU!j=$hNirv-5}J%cGj^})J}!hD3jv3N#g4h z))ANdPK#Qd*6=>aF$NZe?CAZZxoxz8ysv0HM`(xRGNd+}WfyKTJAd<}lG}?Oq(Uz> zhdbM^=7CNg7eCXYcCqKfW`V7nxE2k)9t*uJrA3pjLEA}mp|{k;Pzy41t+h+=qmDr7 zXy_QwIo5{HUxjY>5jO#f+`{gBig1pG>kkoSOt>d%h~`)qn~@RP^c)+8P@6j4sYh96 zK{pB-jSXrGF!$N+D~jrF2i~Sja>V!F&B&wsp1QNxAUCMtR)cgM!=y(9keW0)qpQ3| zz|Y#7>$Rv~t7BS#@n4^M5kx&eBco%U-$ffauIF$r9jOMj%VHP!4)~7Nz-_R0T;d~b z2gd@m77=bOp;v}{izkZShi%|OzI~JdR|_7;PPIgRmhR`M9_T|^&p29-d6OI)5&|M` zvX|3K=TW7gPg-eJYkYB~n~Ec;;TAV6TtDAF-(prfb`kfDjJqISRjrZNQc3pgGci-E zR0o**HjfmI)47-t7OP#l$rh^SgT0S#2i$$2n~F*ee5JsLQM4R6cH4cfchsxHy2ZW@ z=i_b6{C5M@kJCl>iOjg*j{+Fom>nA$HLu%%v?1dnbexydIC3&xC+_or#eB#%y9gTN z-0@ZA0&DTBr*drnzwEsWd{ouB_n(1@1RI@+28~Cov5jq_)P~l!q|!E#0cUhZu}y1Y z(WZoYXiF6#v8aH7B+7Ic#CoY$immOzUex;93Tichj0O}1M8z8hP)rCaph7?(|L^Zv zdnO6m)4u1t@BjV(KcANmnZ5Vg&t7}&b$RaVc^2{G^dX*~7XeomtHHfuK!BdC$`}z< zq7`0a*VuCGsJ-8gzfCug?MHUU7(6q=Y8wU>=(EW&inU5|6U$2P4-*z1?!_UD3(4aofF?FZ80DHW_d4p72+H(+ zQi^@e@U*^C&qr^0lirHnvcp|;+68HlbZ5I=C|5HVJM2PPnz>+7<>lVu&U@^JWyXx& zvfnN&DJG+*igWR+W*haI`WxDoS1j9+dj7)Isl}EX;8|&9*N$y|b9PKK16gaiMbPP+ zzuT)PJh&KJg?@G3V&^S(-eKn*oDYAH)8P*)Hcqr`>$H_)%ZLx>sCXcGyM6yn#zIKa zD&+eMr~S~DGpF`Wt;#ActX{oo5^vgGJ>@~we!6OR%~0|FJQb6T*WMQ{+avp;=Qjc~ zSxRR^Y3e?wJ};!tS&@C;v#T24^L=^wx7cQJ@}pQ#xQBVQW=ml1JQ^$Era?!TCuLhM zXPyY@gzfm4hx-pI)KwbONoiWHfLFrq=2pHjT(&FFI9*fBtESMarr4{d)Ie2@;(r+x z@x3B5PT4x9AUD}%7K*{&;geoJ-!6@4EcMo0!Qdc?P(-}forOYXe3yQfv< zUQc+Jyxi-x`KOkRkSJ>kUb)b;v^tdwg^0+1HjV5@2M*Tu%_6r=~f>UT$;4H#xGy%8@72%aa z3-p?saAk_~N?ESLwnodIb@_2pfAC!4G>gx2vp9KrFdZKN`@$`YnJoI%xu%bPb*@RI zU-p~d!YOm2aXIsMvp0VoM<-;XnJI2Etm*1;w4xuPH-Tg)E?Js@#k!lB-!)`W28kFs z=r{jdd$6Bd8z$M?N9gUYnJQ*~x;1TtwI@=x?$G%iXsXvM(L)zv?*Lze350zt-%nSy z?j=W`+P9#EtZLHs$zb$h`XgBRFfR&Dc$hv27CoH$pem{bURBBZdb4T*>+7|uA}u== z(>QP41D#i7%Pvy3v!0X8atkz{4-xLV)Rdq%85bg?%7g={JUJ3ybFC;cMg^mfP-C$2 zkruuG5o!z;J(4<*7k!-KyvoNZ&YSQ!#d$@K6Yi^!94z&O99}ixT|Xobd)F5&SY!6u z5!noiG*izVZZdlZ2W0m4&moQfzSyMku}33~mjTfP(LD$z)E-^A2<7GE#W6Q#Qx>uWjyM@zVXYpo_ z8M!X9XIFRrQ=y`KUP2#XzUm&M2)?^AWvB!AAa39aRR4Jq{4h196nm(={#ad?gQ&$7 zQ!+JhQwz<^kCYMcwvHW_v9eCB0Q45u+ZBl8eollv3!-Hyvd!x%dGgf5m_8Jaik#tp zEztZrr37nbwdshBG9;Z zP@rj{aD6iW4mxPriP>3+x;2H(%xWh**2gN!-VZcQF__%H;d#)W6G`|#ux~02Cy%&7 zEvwD_OJAJcK$AF67pphELJ>}{pG|?KM&=vVpkKGiB>q3UUH#hlQ~_@Z2SPpJ#1M_U z$pn0^^562Vab zE*Uz}0lZ?<#sEG-$KOp8l_jE$Bz-i51n2(4rRK!#dqX%7-9Q`nhBf}$bmAr zFu4E_Knza|)`bLuL%(T-Rvj`Ih)I1h9s^@^XivDcvrw9>CDf=D-^sD=2pJz;r0$mP zPmw9Y9#z&2kwrnGkYU2u)T4hV!>e!L?`7!?*64m~2iU|(yV0a$3N55OCL?wrN}AIM zsB0E;%;alqTn-X*WwEl&f#lctkVx|EfK!ITxLM`&DhP7EC&Pj`(l!`&GYAI zWcCF$0}8^VJlMc3eLu@*1Ka6D`vpB~UNpWb(%2S^lrhhiM*>lHYqV#C(-LO*X2MZd zBAO5yYiPT=tS+B(brExZMfXu2{lgnwJ;mH-KdZNIMAHttM09AUJvUl53Z6S4(SyL~ z#MvL(Vt2|o_hOy13${J%#-&u2+kN^%SN(_a7yCb0{}iET^x7SY9h`oyzFkRN z8SlXTX)1G*r}D9!Z+x0vFgV;dX_2SgSkvdzQNhfhzF5AHfPnr-<-G2YWe6WE`_tCvw<) zff&-7nK=wz6VEVM$PM#2l9D7W17BA!3Q#TVVT8f)nfU-y2tx21pq-gSYIF2XYX`(S zdQN)kDL4yjrKa=L90fsALs=+Q>6Mg5`l2J_)d!lJf#l<`JPIC*F=??1<{uCbw1gk{ zjIyFMU-Gp<#+QUSz?V$>G8VvJ`})U2JlD+c%lRRt^nr>@aqon?a!ube9LDw z?&d&^qXgI}KrfZz;~Bum9N=mPc)S3c1gJ6O)i-AVZ*+k7IKbH$G?rZ_KrfZzlnmfc z9N^UsaGC(;3DAqgI3@#F;Q+tw0MEx!XjuZlZgf!DCuE?GcTgi7RGF%qFQ~JD+Vh$8 z_kNgV-+P#Y`m~^ug2FtIy*2~&oP+A%K(7oI)B>Qe#*7^*{xACw{%k3okT?w5**r6c z3CMC&mY_2(gj0B%0!0gTaX^O-)$OMVG|H_JT*S)G!Q}C6&b+L>5ct+D`kR zi54{o!CH;0tl_FDi}_Hsr!=tO`~Iw~ zvKM6fEtvHk@zo&p2A+wK{4Sa66J8cZ2`Bx{H9AsNP)3D?Ty)(_ zGy3E)44dJh8w|S^>0DVMBJ_D5ItI%*blhDC{SL)D9KX>Ir2cge>k46;_Y~XBIWhMM z$f;D!b+FBf3M~N4&vX?%h_9u19&xVa$o+O}ssT@i-#MlR<_op~ckjOTMiQ1dIYRq{ zwr}N&BEFtjBB8m`_M2G+e&2b?XkJY;ajip;tEmdsZa+BJ6vIhE7U>i1)k20t0*^~4$<~|aT&fIu@^akX8EJBBVC>hrGe&O zNhmyY{-PILp?H;(6pzb6O0-@=tA%wJbF5p{Z+M3F4bPJzrw;#T#)IHuX$mU#YDV3# z%(xxZum3V5^%VU)6FOxbiTzvzM8h;5RUhfZ=v^3G&ao-lU`@8s3G>TfQGy~R;(L*p zXe^Jf*F+QnJgRs>`G=1k`HoUwy7onSQ}hY}n_|Rl!tQ_s^K_u;FmYeN!=o6P6V=n? z^cX*q`1Bjy9qhu5C4QS3;34NFL#Nk7lfws(>?Pg|qx7g7&!vlPl zD0Jhw)QzVdg1L{{crLZ^Tq=<}T=w3y-&=AgNj>vu7>cnQIhYS8Z$4TYC>$pJqhqL! z`{$=sdn#!UhY7TR!zA9xVFYifUs6fqI82~99465K4kPT@>b^xt`2a`^30->dACar_+`qhRhGZF8@@H}_VBE6%-@ z9n56ftlw(+E6ZL8B%S%jHcE>_4}XB_dwH&xK#I>^Q=RMk|YToA^VFr-w}uoneNAA zrn`1nW!8IUzB?o|RawwA@zMH$nLKnAhP26r2yd_V`=KlOyD-`Lm7rmGOB;0Dc02o6 z*1KlCJao`t+IJ5|^{S{6|K|E%OI!25SEwK-(6rw?mbOxx7YD$%Q7UXdiVHMFNYjPW zX4oua73q6Pr$oZt#X3w_(xStpB^^49Skm?z=F`24b(nB(iw=|S?a*Pwy#VCt76;wp zpj#Y9+#6|36~@ZCnNKWu&+Uh)RLP2TZnY&JMw7V@hLa;6MiVaoP|~HhEoc4^6rTC> z7d*csq)gx1b&|J;bK=k9PaJOC)YE;0@|lb9H60<{ln7tWw4;zr`}ozys!f7HFQv1e%Dy7!?>%bpnwANkwX>2yf;p5rq7&z`}Mvtftz3>IhecoCe< z6Q>ww^V2LioA2%4-`m(j8qNo(f(IYZJouPuf9BxV3;w$feqHbW-p;=}_|r1*BOLrU9Q-wc|DJ>Y zjo_0G{$N4+!}ssD4?n`ee_!wyIQVITzsSKqn}L7E!N1Od@7;-$&$0_0{F#FPf`ea_ zfxpwi|JlL+N$?jr_@e}WV4r>Or5X4s4t|zb|53nCbnu%G0>9e9e=`F=+QG*h{A$71 zI`{_#f0u(lG6SFM;7@SyFMJ00)ee5H;C}|ZG|HLv-7kLsobBQU%!4SL!!&Tw_kSZL zRh-QUR|J(0^6aGluRwvPPy<-l`#Au>_d<%q7ApRN-B}Z8{&y*}&{V<*wuH;Zg(2&L zH8(tThl#vs@`%yz4b zumze=)stl_0?nV{1bdgOE0#Fh3;O>lisjji_w?04R?7=K&1nIrQUP*W$mtXAw3yR} zIPK@tr2g$=z{S9b2XWoz8Cij2FISYkHtX?7qETc8lu{^J zGZ?Pj28;SaW&X0xPSk;H>)Ge)+$GewL<5pvAc-JKVp+&4pm%)IaQ><5X`$khHI?Iqp~ZFi!! z3&PCo`Rd(u{jRU}FXR-jCbQ4%BOWjT2+;#+)RSdG z-OfHRs&Lx(IPLd+Uj6HSJ+$pzk=VzySM(`u4TrBc)c}Oy;V{6QXm-t6-VOTF>)i;z8S84RfAOa zRG_IE1TQ^KW~|82`L2d17%6$tlpCh9OkT#Y;l%8suD4<7CDDh{&Cc-xmo_dr1Eh$(jBou=4t9MQ>hT zJzVjYLl)*)#q_VmTOMxlmNW76MyY?v@77ez0G^%9C0u5?_3s?@?;I%*NiO$pm*r1V z&s_y`6sc_s*LH_WwvZu^gp(29yJ4dB8>3Y>MZH(R!s`*2mBUF#cn_@LJElBL^W(Vv z3kqE*HpK3_FL^w7zaD&5idWKtGY2dVYQb(zYv!6ws^|iNQ zz|&odH_>llbs^e?OVIey@sOhR;bh&hHDTYr?$3S<|G}my4ZLrkt*4{0Ey}8)89$zr z0qalr1VbhB9YZDa9YZDa{c+8AvtO!MR!pI;;by-iT@m;s{uDNvx*HY>e>|f3{fKsn zlpSDxD{C`6*x|u6&y(}6@jSY?}gUz&GA8z#SX_oQYX-*((5= zhV9V|)G`OP%0Zom32xbIf|7>q<_y%04(c8U)uflcDky2#F3Uju%t6g|P+ygXE%#L8 zL$@$_I-lL?*poi`GJro>!kjn$d zI1g>yIS}@>cJBv}@$aK{TcLXdi%N_sO$>%BUb>C%Kf_NRq6WLOKb*K)yCI(B-so=b z)#lGdp%ERr#`HW#aqcl7Pm!23$}^t*0uJP{kuWf#D(Y~+f}KPP{m zaARX87~Trk{PcC3hgBr}McujjMJL%LUGBZ$_TG9WYEj<}ByV~w@jWG5T03#qfl=Se-ucGhtkj2BtR#JXYwE!C z9fcEj9w?sHQ5a30a7owTETo;;xhql!E*p01mcdyQcfD17g-dk8)5SB}+qV2{(8`vH zyFM%~Z>dPkF0KH#wQZSI*6K^`rM|sW+uzj*9f>Vzj1*oiiKFRt#MtE$D7)$^2 zZLND}v$wT7n*7G~yf#)l`d7v(ni-BvQ^9aNzo_mUjo6-Ub{m*hG?I8&gqDvX&cyvI z`Fol_Sf6%plv4+x0aJgYa-t|>3%v6P2TqJN-OSrLx%uXFUBCw6u{f(Ou z$2PC!G#UCPDV+|Iwu!2J?M#K(Q{=5m%EzH`bP+1eNu0%;&LEr^T_hXAjcS+6^fVss z4xeinrqtdSClrO-w(LCydh1ih(i4TIH_Qu^|C=GL#NmoI)%_FxL?E5Y2wD4 z>{EcVvQjaEHMUR2W9RzTo8r7;0>nmPesY*F0!^|~^z}>~X=gAOMfU9L9(u__^$VYxm?+o8c5=ur?WY8j%@Kxbj9(|sk;zPM52L}C0 zor!BerI~KJkg7iK3#hB_uFC z5Z?2GJVG1a98xzXmdrh#I?hX;zD&Dt6toXT=i2jasULkFExl3xdSWhb=o`_rpfa&%A(Od;G~ELn8C`n140^ch$I;!n?+eOc(? z#X3YEZqcFg;SL>!Kg{nnp@($qn&?Bibxq|%x^>O)hcJ=h_q3?@b-N%&oPqGn_}xw}z%E)HmKX3Uw|ADAeNz2WKri z4tkL$WyfRy^Bv&H4luhHzybh`lP19H4;bQpLY&>4{Daxy}!2~{c79}r%nWB92nkD9F`ov zuW{edKx2aC?nFd31jyQvm@ectU1)B4p?-?lroP}KCUiyOJ3)9toE`iPkfy&tDwyEQ zE8-tA)jj)g*vW`z*N$GwYP@<>WUzOV5!+eNORMxi5U%8agJ( z!wbe4oMAMqKBFQ|EgHepe z9^QwaXl%rHP$mna@-@B1x13iy;t^iI4e!h8Lq@*Z^&85}f#dAth*MGH+P{pvh=Ba% z&_!!>h+YJfHLg9?dBM7g>t>bwGQY_#aoDy?|9l6uLx%|$W!X)_^xDm$`F6?HTf5A? zQ_AMbMT>O^Eo#wW(xMI>auU9BCe5>p0YC`qKGElpJlqRCk`Y0|-lZ#GpcolS5TA|FN1jSRb@020KXvv1~HyRkEK;uLFDrf;( zZVCywz2~JU`-REJU3YxipHw|_M`~YQ=q_ABR)k44ihp$PkZIT221@YVRl(4rHywbx zkpFY&E^}rMEt3Cp=q__+4lR=ZbLg&I2PXgL&|N`yAqVKtUHR@p4$z^y3UuLLWas1! zU4aVe<@$wPpJG%@N|V!jFN%O*Xz5~pv5@dvxs*8-9Bu}Q&=WQtqEFa#sC>ev!|*4Z zPvZXils4XiD+|Gudxb0iB5IPzKGUDZR^^)wTb<7VY&Db}kIOzk!kFd1Z3DgBy>)-@ z!h!gOt!o6si^O*m;m-`yP4b<#_4@~OR~5X@&+DF&9xGA7raf7hZx=J2xUwL8#?>&+ z^X&=`zcZ%8IM24L3gUo{fpMPduAJs>F6^Jv-2IW_XrfvOrUixF&<=57Bb>r+n{$S` zQzH!r_M!NsCS7&a^HjP)a&)5!aPn8vpf6c{I2xWYh6WYcOVbTH#_mPnFXbnQoH^zW|Jc?EE6x`?xwX}8wo80gZvj){&Ex$Axld7BDH4JF6CDyj)jWFc$ zWMm4z5iQ#gXkM)X2?J#n@ZV3^Z3Ks--M9PfRp;2Z$7=oVqcOS518k1>(J+Ut?y74Y zk7NJUn=$lzuYwEhjFxoyr6z0B%J5S!KpkcvOoYo#G31c zdf>^yLPm#7&~lgvQUDSn;Li|840=Ys8clvBb}{Jx-}>n%gadppM$0gQu7M&&Ye(P4 z_Ye;3DU3#qJvr0PJp|wXX4aSemkdPK*S5{6t0JlCoSAw4|A;iU5xjg$J!#ZmBa(96 zqJhtk3p_mH5=(-5AHVh#NculymMZJ0-_$jQ{^?_I0?kOYOrJ63nK2zoK{WAw34hpq z&@E$vfs#u^t2@HPGt9XZbzkg>COr8vD@h1*Cl3*&Zbw@3Jc4r@VrFvpP@Thi!*-WF z4`=l}Odq1i<{cwPJ%5 zSwXgf%j6TX9=7YV4-ovZB6li)yGaZUidUoyb z#q`evM4;q|PaB>2qPD8yDJ*A67~CggzJt-S4+D*#=NVZMDGo+_8>7hCjYm@kixF=R zmGs~hhAuFskj0Q51(5CI7w(3>{5{LYeyO$(aG}_y(4jX`;UNTED1xBO0M=gFi$jmH z|ABoBk9PKZjIue?szOU`vqFel2Dn`ip{2H2A@nGlA@uYPS7^HxbY+DfxWXV;d7(#b z_eAK?9#@#}3T^j9=+XVIu)r1C?g{^+u57r<{Erg+g*i_a>~m>**KHCJ(*6cvW{m~> z*zL(bch3xl9*`w87=1vN&|u{QvV;bQKfwB_3XvL{#iZ2Ob4Y(pYV0|as+1c0ZulU3 zEYCfb=N`)&{(xkKSlRpazav1T=pU{<%~y#2<%#}%0-uvz3Zv^B^iO-$DB`y`KoLL3 zQd)Kv6tQcgO^WOx8Nh4@c#H!)fc0_S z@P9bKHV3#$fIk+X=Yuyt1NaLEc!vZ0yZ|Et^nCDYGk`yIfYTh{K`eI5ej-562k+|{ zz)=n`;sAY8z}+gqPA#!Sc3uWB-~bC9;HnR(ahw1>AH2?w(qFyxW&7$q9Qf+H1^Dj* z^nCE1$N>J`0Y2*h-%%SY0W`7dwhYv-9MlpAwLwtd6_muP=^3cY9n@S0b+@3d6qLlO zSO)4G2Q|S#eOXPr3MhQ=S~tP;{ONVm5N6hAKmKRVc5R?B{owQP6OCZDK=Urn`dQ!v zaVQ^sDQw42K#DuWgq-mcSdKsrXN|LE*ZYD_z13lmSSyk5TCe8E$G(IzgEt>J}w%BWlOUrS*3r7=YbDYT08Osuus~a7|AjNq_ zQd0~ibT&bGCZSTU!?uK&6b{TyK{;&8Mly^ywC4|;%yl-wWd5JAc%Y$>=`Q(Q+??wU z%aMagyCx31i*z2;L{HXb*$wR#0csp79xwe`a=IT6tq)Z98lJ*EZk*2LN*G41w1K0- zGI9EV6|IqnONUMyHy;e|!GYgxz8;AmnFNR8?lB^;neoebF3AtwtbNWYO@4q_W2&@4 zZZXx#o`PPAh^}81nlN8|e5kz)m2forN^AtXbes_$j1f_H8+cUTve=He(5s~~!;-CP zWO_exk8bMyaz%l(U@T}i`)AaLq@&js1e(5MJ!vLW?GrFnCT5n?2kAAcfx^`p_h8C) zP$rcO!k0Et&=2#hWVS!37Io^8RkQ`H2Vn#fUY|LSGAloqR4b`AbgL8 z9FuG@Rv9(Ks>JC_RfdzpbL8=+7s@-jK}QQc>-nSmR-BtG`a7je6}PEg23X&ok-MYu z&7EiSUi4d+ipt%M^5{`>e1?Zy(4n5@O;a^U?oA<|-&z|j*$ms|LMuy?B-g}V7>@%l z@5g_c=hF0~;;|fiZ#vqUm;QsLQ>Zp;PRVPpg`<3=+c|c_!u6XUOW*MjqSF5gP0NxAKD!m&tFPt&6`C6cTxLZv@N*6#U5wks()x`f7#r?L9JQm7(Q zoYSM7hPLoAmwQwW;P@C6&kjVA-}Ms!<1`*R-g~GJ@9$#QgE{GX6eFOJ5u_h4^)F5B zogF%LKeTpGl+7*0fd!@B$d&WxU_H96bM*%LtH8=})PxH%Q)IDHcsA}N5*9nKclp=> z6S!HaLSavOgmnrhu0w_1YQL-Fd)QbkPuSA%o;S`-`rn}ovruWS))76XH1JofLREj@ zvJ)0?M4eH6^55H$(cpD4TJCrj^*T40Q#|1HVj_=d7+f z5;dU(*Xi0x{R`vI!mn;7Iin1R@dI3J(*y~ht8gJKsN#rkHPH=YW$)TU(@!K$DG>&dg|}dTqwjY ztU)h`?+i4(!R)cu_svIwlSa@|3g5$RwYsk>CM)WT+^Z+Lt#1)}4nv5bmH8 z{(t>89dn9}uy$D(;tjH7)HE(*%n?$nG(OdXl+XI;`i_hciR_~H(>%;)FtMNo{W1r5 znC=5G3U9(Fr1`9mcBKLLA}9iQxC49$gWj^g3ee-T)@1ba~{fF7T9K?d-<4)78O_^JSx1898K z$PCmM9aOo4Y8BK9L5a`$R0e9OgZeiIby_D-DM5+PdTVF8SGGK7z48GEKK6a|=F6T0 z3O=hEoYWawzaJc4o;4nRs{Y8-zF_u?D+Xs(bw$7DQ(xSbspk$?&!1d9llD_j@fWO~ zJk_)R`GNeJ)pxj}VpMfyxw_8J)ODV#tH#w;pt?r6x>7Gu*8|kmkKb$DKG~SPSD-MS zv)V;EOwos(IsoXjHemOekTcHY|>^0kJ_R^n&2vZAbg*5hJ? z3PbT#jhoqLu`gD;R(b=-P7npZ2u)64!4&w;v3Lc_MS_Ik=fIKSY&JGd#OAljSNU43 zWPd2WvT+kmC>RqXB~MqNa^uEWjYA1Ex3~Fv97u!yPcZ=?asZ4loH5e=vN3@&YLva*GlI z6~Gn8!@j-d@4NX2!<~!oo_Zox!8%40KjuqusiSe7>2Q-qI`l~}bSs&pM8*pE&ra5z z=1;wR!J7)Vd0bll2BBXYX>1u3ljiguV1>2o=Il?cx!@h*yFNK>c&hz^-4y24U2DSr z>2p*(J=Oh99(~6ieMgUe*!Sp&2U6=U{E$btc2zM5X@1XZzLEZ@uSaQB@ z9_Ys}re3R_VvaQW)j3-EJpJk%-8|=nCa4}EkDC#mf6YtNR>JbYk1|L^8!ELsQ!&a$?|ld4ABgXiUQmvMK{0R4 z^xx*{qQ5|)i`*pIg?|rQ&w7>OO$vli*{VSDB=m7^<+)YW8ZBAL^kdUdEdFe??H z_Lm=L6%l>@RF2{+a=M2JpGVl2>Mfx_(^|}}gkiId>=7@_5n1h`@u!)qt>-4Ahhc>? z3tEOBN9j{tc^gU}%w2Zi3f+&cbl?gVkgjy7&c96;`L~f%0TbBs(^_Nkk6_FTr+tSL zxBfEqRZnm=OmdHout&XDxm$VeR^AA!Pn$5XH$P2g(?HY7OhGo=V86b5T6O*+&6@m0 z?9_;ry+Xbx#ig{K>kvE3Fc;WYH?*^w)RE$aXq0(% z)s@#R)?vbREjmoPu0w|r*GXq)x7c)`TQl?Q7Ml*PWBo%7mg{@lm9q1;jJ}R9tfjg9)-zZ z^2u1(L}!aSn++gLFh;KpBWXh)fBRp2f4^#>?fNuTEByCg{2mZ4o(hAE)&!cz(V<Ho*Gqg0-b3NwOX#^^i_2F*5D@D+?Nwx?5oih54|E~}#5fg1^OC!DynkZwn=k(2*H z_vSYj$1)Ob(~919ZwHtGv60}-oLir!E1@Gro_{{?C^TZc(z zf#wv~vBZpmNTR$PQvt80wy=1kKiZud*)Z5g1E1G?8ztEDuvxlRJjk_+WOBZ>q8!lH zLtmfotSs_wWb| z28GGISz9f{%|*|Mz|DQ#9+t>f_9Xj*xr&v&5onIsW7`AGcnVr(#RTcw3KV)2MM}h2{M|s) zKCGXTXYjt5XtCH~T~i$w;5WW55`SH$l1Vp%?`H<5+$^LXth*U}KQlP<=B&247cHG| z)8e+d-(6aHQ%l?2*wW}t9c^>RF7@9OVWUE%Z22?=Q(8$_CE`t0wRC4+vQSlSnROgq zl08+esgGv1=r2^ZL*8)ezd-U{H4J{CsR7HLzGl<_{%Bt?_x85smoM|*fhje@?y+~o zhD;tZteab<7%3%V1%Ax2cdOa_CgN7on z@&*k>-h>-86nRBAsJmljYwO`pR;xRTv+9R-E#(ypeZdtkKe?u z7R>#l=x^?Q?A47BDQ!Eu5Q*Q1t#R56{OS7c>pc2L7p?3*zq>NaU-Z5lcjW?<-IwdG z%+?pZFX*n!$QQjY-(8t?FM3~ryE408^uFQfM=|Vm*2mSM(~qW0x@QmMS4EcFt*ab8 z=KF({)+xaW)+xbB)+xaeH>9>tSetrt!po`GCTvQrpU~wx&ic=R<~h*35t;t`OU&(~ zDbA|)szEcehqi9`BCr>{dx2-LlFn$_;p^cZsHcZvX>!jw~ zlUmFdUhI-u^?1EnrtY=(d3)9qfG zAJO-R-1kS__gA{_AASS3szX}3b=6TV-6_?TTDp^}C$MyxHtsQ|6!$cxxHmIzc&0CY z2OI6?lC=W|5V-AyW8=FjN?u_VNA|oxfD^LF45|A}zxl*6kYt)Yki3!Qba!}VurMLqql!YVO;5(0Pk+QIXkvSwEJc6nSyJ)5Q zzSQ}oJ(%nHq`gc+_B0RkI|>yTN3ap1ykh~|!tGdXxC%BeD2Ju_A1Q%5s+8gigqlF} zbtlS`rmC_cQ9eNmmPl=RjaedPf@!@fSUyR=;mJ(S`7XyT^P`mT_gDeb@e;ytyz|W% z7fnnpk0xe>BZ;YEEDK|an&MCbj_N#2qh2&_zF{&SJOO=3^V@JPcH=v!8=w$`7VvXVtLY{7j9>f0_}**Rwg z+n@+nrFa6d<~IWie)%apbzyo?qs3S4dF7;6Mz)}Ut6die5uhH^N;rA>U>r)rfe21Q z4|!qIZ*t+i%Wksx;eiD)DzOEK@wvjzG1bzJgW=Z4>=DvDJY+43?`-Wl(YJH$UhO{c zOoz3t$y{9XSMH{J{ehcEkpXeU|I%L(xU;1qXH96H7c{x|x$xTev?WP7)rtAQ8(7e! zpmA~1A>S6^QWHW+L6iRT*d_l}b%t!toc|p5@GMMHVj0n=okJvy>5=5{F*50iF(6bM zx3;Q-WZ@%z@|d)kg3pi)aql;eG^=m2~NFT*Z zG7au2Qp{G{-0B|D)a=xIeVvNC)nTVy;<}~^(6>#@$_~{{x7XP@Rkpy>ulUGcY&P~KD1Tc}>&ucpx#_t>|n zphxfJn^ggOq7))+GN%fOqc>>@R@p7Hn5#))_#8S%4HlEEZ_rriv!aZ>zLZ3=5v<5G zyjTw~@a0`ns1NKfx}_Z8IAQn{)tFqH=>d_iIrAOU( zDg6Fqib#^EA((1+KTwCYTPgYZ?z8~mF{SFmYKh%hiCC6$H_%jp1x-T2D4VRBHyda! zlVniarbkX`A$bp*zL>+v6!=OomAzQ{CA-2bPr{_mZHO!}*EW$oy zuE}r+rcP!sl1Y=rYt>ZK7o#xyQ0EPti=s^S^#Gx+;liK#=$Gy6#GY?4+9&S3VrbA^ zW?8?RcL#$(Yc+H;cu2BMp~g=iu49AUbbX>Bmn^0^q3*J|VfS5+Y6Qh7Yh=~glJ;h3 zcr^$*inCQC!CYAP6|$YI29$MQu~2hXr`iWZ4|eDjBRkR8JV)p~o z?p-(75re9p)Te1<5bw6i(Tz>E8HI%&?Pk0u88%BnIZ5z@GXwJ#Uyw9Wx6_ELoezJujazc$aFYE^F?1MJ`a*O->@7Cf7knm_LLXE=+Hu)|1?AG*OL`j|Xnc&`ANeZ&GxIpX5k!v6UFBG<4&k5G z@5P1<99d3B8x6PsqRlPW9M#nc%c@A7VL8RV$^~Ix*3d2}>T0eeV;2gAHBcnnKhKWL zM7caM=>7T0zL_NTh^7-%OL$KoDt)C0nCl;Z=lu_R^TzmbCf7z4Y`B02`euu!f*ZWJ zmPFv;eEdkhkw4N+o?mH{ZT4JucG6$QV7@|UPbmsA9UeuMP@S>Aw5_;u*l@z;uX9sn zjqWYCfo=gIcj>l2H$9Sw0r>Eek4zir*?S4eQjY}Yk7nJbC#Q!)au@*{LTH5No};Oc zf3Bx%ft#V1%*gpVquMnCIYIrofd_@=3nkBaF-qyF9lR*BK4mDSchqT}4S(~x%S00j z?zaL>EvDKeWrxq3#A0T=SZWqaLp|?0FR9L>v+^+k zFwneM1a(ipMf3)`eNFHsJfKLL3c}RMxsnnTYp~=J)?o!`ZYb~u=Iw{PZ%)tSW7G+U z&SS!-wx|-h6DkvFnnm~IhlC3%iV~>Dp!Q)a^fA*)||E((2G73yG`FxFEiDgqa0b(qzWY_S0QZ#BegGt zG4;<+=pMwQ(72R}-6>?4yC}mTpO{>ksG1P=1Q>N)Tk9bLjJP3h-S-JCR9~*zcu#p) zWV{flJ_%-OF>3PCP~yC_6f;AWfOVV{v-^{!m>Q&*8PxWeG0`T?w1=R8PMT>?OEW=w zK~l_WmdCL6yFDr9QKqvPp(VUYrnv>rFpJ_Zr)8LJPKH?(2w9407TZO0i&QQwBP zO{-36Vc|6y+fH{kImwPlH6FX%qgr8QRV$c+^i(cdqZ%?fT{;@9X)kte`Y@*TJ@yUgfV;{xBqx^JOXumO?3<0VQLqoh={r3V z!+7Kvy5D~PA?W^ivYzu;nkI>ML6b~H7)e~`dLHU1EI5!hp+61gF41r05>U8D#~m@E zlHtc?P#ag3$NBB`KRfjGcUnQ;>(lSl>x|~;1<@tu8d$j`S7QU&9PlzA@`j(3d7CaP zPl9<%L0KJFs$BGhVs31Kh;*~IV8gVKaAS6w2C?y4l%nc30(|L0bt<$)eJ>^Oc#-LF zpy^Xun~L#lLq6E0^ip>zNvkF+6mM`31?ku7qe?wqS61tC zHoP3dMahA@P`Pi_2)T6z?tf7|bLap}g3*PMdX3i(KO8>Txa>7tRUXcm+E=(w>KfxQ zr=$FV`OBcA;l?H-k*0^yyocRIqkIbR>9ZO<>(> zlNd%Zv?Mp;nEWWNwCK>2Jo7RB)}>q1zBv!+NbnXhA-1a|^T~FK6Ua>_j%%^Qo{h4& zT#lZoIpcvc2WwBj7t~`W-^!O9Tv27!tO$c59Qs->bGeo~H?-6Rpv6^gN3R-1#B`M_ z^&Et0l$LvqJ>r@sJgU~F`=l$bkIc|#zGWVFEAZN-l?w&vBv&`TnoRifzVue^Mtt_A zlKAXP*&3i{#ETj_1RulyXxPEWbeR*&?HeZ$?Uf?%^9&vdG~I1f!y!O@g}vMd*#O;x z!c;Bcg90%$34ai31S4vBi?Vn+hSt72ExO|n;SRo7-y|IXX3aajOl)3D=s_V;@x7pZ zhe2iBtkF-ov_SSjbY-Es(rFl+;Y^~x`as79>QL8~+BF*#FaonD;z#Pm95sEQ{7+(G zrYq0rIM0p}?dg46mM8^SL{5pIqqUG@mTCgX7Po}{K+`$uH5#ConAA_lQJ;sb1M{zT zg19FoLJA z<^r35IHv~y9w6U8+4;pkcvvKCSR|+OVpq`QL~e&!4o{Z_X{^EdJHKjWLDk$Rkn}mRF-=mIebH{tRU|8Ge-@ze;#Jw+&QG{~DTO zUizy+;^g&47+?<&*xioH{p@|j36){&dJdd+2)+@D*LZgQ49VD1V?FZ$0}h)}7*3uK zbG1!z?@+TbH*GYvO^EXy*87BW%CJ^Fv84Z7!hBPS*Rl|Vhl86FJ%m_=0oUNW6-yEs zGMOvP$LQfi%Ehk;MKe+%d_&N5O~LMJxEOs}0T&UUW&7HpsYNteURY?QhR`~#6h5j) zShB-zvsnJ>RpD8Rs&j&}t&gXh;u}0wXARTH*wbW&Jzb1?O1J^FEPV+A4-efL zZtW_pJ4(tZmPsqto@<7gzpb2+KD2_sg0SgaJUy23%~=DPPeO|Hx5p;as|_mP3)Gt; zQr2Q7g7qHjqtwcsCR7VRUk=u|$EcYie>l92Q+3AvO5RlfmryZO)xfFGG>+w;ld_B5Sv^XKFz6r2^s|nj;W!Zvqn>9E3vG<}N-v>0hi_ zqml3^rbKE(u%Z<7Oa4MbLk8efvdXdx*JlPZiZjxTaqh3TwIa>rQgNR|qfStu#YsiM zuHlZvW#-pEN3$&TuItV9A^Yc#r_Jp3cbb{~S`L`mzqt|v`tC1+#BHS}vd_%`MjhZq z4)D!~0BjJTx3%cx4B%%S;OP!bGrSMR&SUi~Qtc%o|TJq?;T6$}51 z8L0IR>Mag5@DHl)Fsd_efd?{B_c^F19MmrbHAGNF)b)!D)I0}un}eDnsChu)EpU)2 z(Ao8Cyj0WpV}O6EjFh5-9U48B{RWQCWneQFphg8KUvylb%fJg1N^B8$MdBxwV-sgr zjZIuNK{G^;%I#A`R`Zy1&}y3WU0%*Ln8g!>64S8wpNYkPVzFoKH?%|xgx3Zd{9}nT zgTg1mt(=!e!APZ?Ah>{mBZ&*kxjL?F%v)-6E*Jy(D#uB20IpcxvIYsVS@ zh{kuR@2wfOMcV88bu7Lr_~5LWe9py{vZ`V`Pr6mY6Q8I*}lkpuS1BLP@_;#|3J$lBU+$c7EBqYmQG-6R-XWORbo-A3Yf_NvI2~w9p z(ye*a1fqLa4iC3dkS5I`vqr0-X1uh0r{c)*z2Zx(B{)!oWAUwo1ODm&{dX^tv0FA~ zu#wvdsvX}dZDYj^R+B5xw3|kT;yt0{3~AjOc+boyIjQ0#klbQq!Vz%^spHd0LD0Fu z$eWJ98vl?Xa<#pf2dB&B>>gvzN49j`uBuG`=GF|U?jrH^{UG|}xk*KYFYtI7CetV* z_LJ`PsL^De2^Hmb-e?7o(Ohgp*u$9rQ;Kp$=7l=aYB9#Y!!AuXhIL5hn5Dx!>rmj~ z&_HVpignaM%Qnik(ciY_*ZRJPAg{(R%_1BnE2Eh*+7od8Y`u@wLWp(YeB15DW%AZB7Itv zOj45wUSJa(yumLRUU-879znt9?0^!>2_fnRQqNaOMsML^$%ji-2}f~tfhOB;#V#A^ z_-gcCIFH5LA&MDOxsFtVSeG=JHI8hTW}q11&Kqy{x@(j-pvx)HRMqfhlzX1)!M7p( zew@cyF^E_mmFcr82@7cDr5mX5q@|vX>B*x#8&js)huWGZ6^+u!zt*+|v*1^+>oW^F zHcx$xS@4APjTSQt(%+$BG?xD9x)FIfvxRd`n@C|l6Vst&t|D>a$H<(Gjgh$!dZWY- z;lh86H(Q#vO-s|ZrRrqqhm5dgY(QzkwuAml6SgWx*c|rZyj*mx(*Ul*%`HRQghBdf z+bFI=u zC*WR(zT#c-wS?p5U?*Hh0&N5_7f=KE;9V(-Xzh;m75OL@EWeUrPtGEkn8OC8UZ!@-j6izv0CFk$H{ag}LTo131M zW|@6;B*+vlEz}2D_z(0fo|%c!h9!BLO%J9cLD#CqX$?;|kZ#VgMkss=Sh}EV4(*nH zp@AdedRf})$i6;vUzYAWeW_KzY=#U{f!2!Ft(KmVU9PM23UKT^%GHD**GBr@My&v0 ziTbvR^w~f+Wtm*-hSEr%S(so`iXGhp8#rR4kylOha#xys2z?d$It8INFl5#iI!J9H z#GR&}HAfC}u0_ZtSed>&kp7`rVLeH>oa!;V?W#&x0GdWqLPYYq@ z)0~XrB#?Yc(}`iNk#)(C8~m8&7NpM$umh*F)Ee}d;X|=MrqVo4s9%r#e7C z;Yhv3c*H<RRA^OW-8MXQ!~+J$@ObK+O@kl^NLXa(?}7mjbM3B zVYIzbID)Rx_FL+7FNh?Xg+r7}))umFLo~jZw>bS+gyy*0HyOdT4CzcS-iZ?-b>_!|oX4;e@88Kg^`s@F1qidi=3E(i8j{AET4NO7sE z=jj#n-O#-aMj$C*Mhgo9(J^vrQEu!nAgGd#zDndaIULu^K1RLI zk;_~vM}f;ygEWGra@Rr@qXJuJ1j%ChmmLVqA89kpeZFVTdqCd}dPR?qyz2VF9!_I5 zY@V9UlRc!lR83Owk-sw7T`fX*{bs$J30()^Tw%wG-Iy@Ys(BvAz=AJ-2nvCPkM9uv z*dhFJkW>NTTl-{8Qam|kuUe;7ZeI8xO4S#JCeUr8?DWGhu6V); zs+tGXA~u-GP^4C4TOu-q@Txjf>%Ws4#qxCM8mJ9+Wd3mMx|^H*)&srJMlp%m`xrp4 z2Jr^mGSbP>W}qF>D5xp)T8~}B@)#V3368?-vLeCjZLy$XFwPIT?H0n4#Lxp5>vRZ#cX`QDSxm7QVuc7~pL9DV%_ip9_TX{Xb zGE=HBgIBL$bGANajLz?eY5(0vfX#z zp0;2)ixbf&%;MEx5b)vD+&$I#4~dKvc=o69x{qCHv3Il@X|*}y>vclX)(n8c9*~-; z@)>q5yS%eR5xf#|AyORwn=jumyrrzD5>O}0E$n%L7sbjhn z9|7F0l`b1qV8K@m_h73Lan<(xsp{WbRhhJL_GNUuFu0a>xklPr%zLb(nOyco9QB35 z3D&{DsT(9~sart*j){M%*#}_hs_FXir#TrPLWLlfdrE(gB`PiBgP8cgpL;<+vkGJab zOdFy;9IR=OB6?4?LR!1V>^6MB+%FZ%Ov8iL8AmLI^+xLIcuI9Tn8v z;PZu*H0f{EZL@Gi<6XSn@%icpA673xc31`G9U@y-gI(TaBuw38=B{~G=sYhn=B_D{ z2nP0B>s5IZj)3y{OER68?lYOZtmC>?LeDeynST}Q8)E6oa(1t4ew#Kv&6;Dzrv)4^ zD~)5T{Sp|t$VZ5DK7*7T6A6tA|#hzXy>zq*HN2QU*{rLZmFa&o| zkj{GIh(9pK2MJ z`e;PUGKNw)x%RZO0sx)3b7sK>+25%foW&d2d2kBT85bNT@x`rdb@SH8a0g7EXD$ky^L}YS(^hzs@E{ zYT4*4Uya&lbR31fNXPMrDmgmIK^!2xMiZ^p9F&P2VF2%}@<7u|^r*B$k|#$AIGoxh z+Z2m|T^QX#W*g$B@F}{F@pV}7btxgZ3KZ5IB7gCd@(F*|rq%GyKeISUaYDeGaPo+T zgKRI6<+>35_g?=TiJ|@x(KF0V3QktLx2D$J6;1qE>yMR5bCU%#3LBX2gCsBW5updI{J3 z2f)IK^o;1xjChKo^o;1{{2$JU7kKACn-LY$K5<5TorPc>+;2wA2GpAo$8-CT8Sy11 z+*oZ8F5$$R5ufJtUzid2SlW!(^syP?#hW{1K0L)h{5R&qs6*$&UDxVkZ3-;E{9dNT zC(nU<|K&OG>rb8oU*SEP12-;vrZ=E`*waXj&^Du=MjeawhDB*K_2NuJQ*UzsP5t^_ z(A16C$gx;Q!BCxFo1-zvUXp?Ty@P+)!GH5+;42;caKR68@G~;-S2+0V9Q;pi2mZSb zetQ$}FEv>G;SBsZ2Y-QsKUeVIbMTJ|{y!XiK?eR92mg5o|GArhALiia3;yR0zVoj1 z_inw~zIP7?zW0^efIr5;PZ0cB4t{9{{%;QcDF^?!;E#3iBLsge@E$)i0Dl*I_`8m! zgFxRrkz;yM)^Hwfv^eR%veVfLnf{ng6XzDP=84qFPjQ&wl_2D#L>VG=VtWUTkCl4h z)08lm&aC9;?5c3>H96b|NuSN%Z-gtNUL17-Sc0{3;_NtJv;6(-=O*({0>eEQBV-ag zV9p_Z1jnZ;;>4kO`$QtXnc6g3@{xk2tkutk^O7?^O%RevClL@2H>fGn!oW^rNf0pO8j$(qGwfdC{iSD?jKU{7YWZgNkI-yRv=@DM!gE ztJGZ?`u&OUt`HZbzDg!NXdXh7{+cte)HVW4`YQ}yf7o34ghtDD*DvmpeYziiYSAE_Jb{{^*`uNJ@bS2 zQ*A%^2&lYC^Yv)nl=*rzuWr5`%^N)*%VQz@cm3TF92&rYWC3peI}Pdj89&RO^_IM{ zW)=4>dF9(?6>q*hy$U*J*eck`fmLwt@0isa-K_o|3%IjlmaWPMGw=^M_*MtsvJm*c zIrz^BKF`6=&A>M}_!}Jj?*#v_gWt`9>D)APVE(2#+k2P}jaN>9p*aD#NSv_yABAfx zsw~MV`5s%}`<4uMIdZ;S+iS^21(>L)Z0s2nXgZBpUa&*55aqz&aL47D2{aWQX?KVp zMq%UE>8%=_lyTC)N#t>+tNt}8+003bR!AGJ*Bv@np*a4;xwl$Zahe!xFU%*wYm8Y- zP+RzeaDreP2sxwTds^Q+(YI%9FBenHhp!^};}aX->JSe2`LWarINSy5N1c`H3{C2p z2#ffBWCTTgYNX|nz(B^CAi07H6f_-0Q0FaIJGD7ILl!iO3SzYIlymh8BaeDlAmfkTI9o5uUAk+yMQh>(o)Y^oN`f!X68BX{we^@pkg~ ztYbC+DPhMt6wOkLuC2B@Q~=2ltyh9;KYz-31ZH~Y03c1 z-~|JXFK90GTFD!!le^dQN7Oy1n#P_vaoX>S%R44c`*U%5XIsTpmz_Sltzy{vuC|I3 z)^BR7-Fr-jukD9>XK!sAGpmeX2Nio~ceIV!d(37#?G`|{PxrQ4!P~kASnZX=>gwU< zbsU@JYx}Gn2iftvS%d9(QI_BCUzC+?*R!$?v*VAlhLFK8Cw21rS5seDPsqmuf#wS| zbn)(J*|tE_8K@GvUgL*plA3kdOZ;u*?=AkeYc9^?d;8Hx`?4MOl|5}0N37pDan8Y< z%lsGXt9ICV^kUz{Ewgflg~qDIdym;+AGAADXdSR44Km1qOq_OCae1b2a9c(8`n{^M zCk@BP?LDT)s_M;Dl|2CDFb6Vm+RY?QNIyq{1sdsEr&dKXtuhEMz2*%7Nw>;-!rC*i z5^K*6*Agnp)}Rry+Iqoz%G%QhGK2{hY^(Uf`ZaA8C$Hb28PTD+K`OnCf#!RdaN)9D zfu=uNFo8Rr$3gs!-OC}33p7TfP;)r?=m=)hJ9s`C;ma%bh~!9}h)AB#$tM%Z8r|`T zHz6UlW!YULRS;D3!s;!)IjdEF-x z$-f^>D-R)(e^J^0K_dC$j~tPFmm~ak^$90Tazyfke|JRkEAIG}KRdv0qy#7;c~k8_ zC6Z1Fs%YqjfBp;f@mID<|9_4?V&#)24l?@RAdX)|-}0~0!_TDY;gtVv^pFYJpBR>> ziJ=m=hDnMIdUM2ZcipK2Nmqs#evJMRFH`wk19L^s|F_V;?~DG4=s^la|3nJYs&Po* zK>h=I_Ak;yM+HR>9cBE#^za|k!gQc*&8}jbfP;>lmh)0!$3H zyCocMyx7sb-5ibXoqSP-oWY|@K9lBNBJl&(C<%OH!B`o4iEys9SAP0}M?Pu+R6Er% zFX<^y-XrcUof0Lkc+Y8BJW|uNXvW9pry@(W<)0(d% zLgLtN34HphoWR`u@TsxnG)a8~Iy9-zoL%}V)F_<81#+O3<^ZtsW7o~(Z)qK!#f2irMkhq1}28EiL%?Q8!nY{#~Y&iRvt?Kta)?Z_3g zLzwjA*lv^ZK3GTmpTKrIsvWk=xzNLQU#k+flW0V!Y>tOwJCAb@^_#E%623F9xHZ-- z(Xtl9tUFxz%K#r_e#yGr;@2(cfRpQbks-wi-y{{-Qs z(b@of2a3xe%>Q5DyVIl(slD2tn4Uvkkka1U_TY%wPDk#o@a-L2Mq;=aG2T{{;P(ig z3@j*9tUB}>3Xk>$&e>0dS?w=kH5z-^<^7f%YJ-96hvWd)Itf?Ac+Z|S6~5N^3yp6L zAsajDaJJ|K8n?k~5Sr+Z;Tf_U8hvLh<_lKZ)QvwSf%5#k< zrx8myk0!395qEoyP|RmqaF`Z1jV_Op#rR;PaVsfTpY*%dv{b}jsId0*SbItXjp75f z%k)6kut5JN{j)Af;F}Ow@ENZ~>Xu{vQHwqWmX#Ons0hsBCv^+W*l5ilV`|re0nPYF zJ(IHTEFmWb!xO^Ah{oI;?CYL5=Ncn#S3F~6-6JDNap3xm9^N0=N2;q(gGM*KlkTQK z!E`wo;_lJ`+ zanaL#3)^pDPyr1{;e%^dOndf=dvz~ojd+oK>UVClG z3-_eTvOyAFIIFMh2;S`E|8M-;u8w?8mMx3<2(h=NyNL5p6o0~t0KkApfXp8xtMge? z6>(#R+E!HTpK*i^)tiPrg|%C5GxCxIY8) zI0y7!mjOFN(n700isuS~$DM>W3^Z2AG9z&>$_|FWeMqy!Q{_z7^Vua!gUNr2k*)pGXZ$W~c<#<dw*ckL;e5UaaCw-_zHN<<_@+qPCjWVf$_M;Bzs4wfAV;G}1BZXpE;M)^;4tvt zD;bTTldOLfZNnQpej7ZCi|RoLG@hO+ruEOR?u%06hojX}^F2f5k(~^V+>p~CLXY&= z(+O_GJfbrxa~V~V4E&F1%j*dY5!?rSFUo`9DH|eneM4lM4H3erXo%c^$Nv2ck&N*{ zD)UKYT*kO~uS@>MAE(``ExLgl8sE}=s@>UbR9ke&xWHK)c1ydPo=>AMpwSm(Hab3` z|DAR}CDnfay#43-s=zT^2?6lB0#4U%9|g8mR#DjN!_v?9Y6lw?hSUh z-Xi)4CKZaa<0UmF;vejOBM|<`Iwn@bV3C;6HyCcMa)Y6*gPrWD(Qso`q-`V5oV)wqp#2{Vr2Ve~z@tSDb|oCI($~lb)X0e;qW!&ID<6~5${zI}H0s~2 zMy8V@HHWAF1=iN0|D#?J^yO&$ zT?cp&K+MO^3-9@^0naxMc%D1pdHR6o>hv?L(`9i!Ah+y|uS#?R(QE`_&&nB(welo^KrRJa@qJ^a0P+1D-GBxnJcARr#q@p0J%7V{|S? zE!+yJi(=v8x*GAlS*Ggu@&9!B2V3$VmQnrgu6@{%&~3fyLkfqlKI`%6@0VRPu>S${ z>;D1IHx787JK%ZxfamG~&lmE1`1<#+{+G2dV4bh(TkQ{9e@p&Ds{el1emdKY1hE_6 z^O<8IyWwrdLfZb_SN{uD9E{-*!O%3(-wQN4n1Pmt{N@ly{1C{`4uO3C5XeOt5Sy{w zkQf6P?qQ=LyX5si)3^HwqPX^pI&M*i>oN|E5?a#bAIyg_-@DfT9E#UN6ZChz)fuVj z154xUcdthhTo$f*K2ozSgy*$ySt$N`V%AU}XZMxGd&+jN?``cqN;^|Zm@W}?YX2CC zuVk*n&3c=6isR_A_=nw}mB7)bA0SEPEMToARNT7 z>vqQ1g@(OahV#MnmE8gFeSDn(mXyVpl?{9KQuah~{^~#ZFKbxVQ@5deUjN!{wc0V= zP`j<6`0g^_GXtxZDd>-EEYW6%w;_ArGf`Rq9fb|oOqGn_2)hoR!{hL*(6bJZ2 z>T_^leE|HA>J#tWy*?05y1t5k=t~Z$i!t`@bcGJGzS6%!`TwAQp8FX6!@-x+SE+lP zz8PO9z@x5vEZ)g~r+<2^f0k>{6yg9q)LZ6TewbPn_tmO6T`P|OyN?t9mgJt3(LbyI zb^rVyp*I}akDr=yWdHOz=Ez>m1xNPNrr>Bk<16gtUZr`fdC1lurw~7g7-BCM5OaiB zD?|?O5aRMQV$vbLLd;}E~?5NF}|_2>p69?&$@Jmlmw;)xFND-Q9eLhKUa3l4Gbk5aYn z30tj?bcpwS55)I`c#lI|lSX{rA$D+~d0rM`w-Dz##6@YuKRCok9O7klAWl~EOmv9X zq!F)ni1Qs{R4LxA=Fx5zD)X&0V!1>7zC*nBQV@SH#GwxHv^3(W4snb_oGHYIg!qQ0 zspcX6G~ytKc&tPGYZZup72;nV;;TPQHP0*Gu;zJ>3$@-T#D|4=y+eF3jd;I9Y;%a$ z2yv7UYaC)cjd-0yyxAc>im%?IX9)4%9b#1)@w*Q3GKYAN5JN)Db%Y*;-|hnB~|Oq^xlro|m13_Dz4)n5q!Ax; zh|fC2_i;>oG>g8r_^H22W8LJie($h03M(KiowhnHjrCK9b&bRN54=7e4GOCWtaH*> zVTU!*VNFoca=~IZ+EeGTZ{&~X{G<4(nsl}0ip?9NjzZUDTY?;VCUS2MJy&uQZkVkN z9XIJc$E@PRbDyB`ea%B{Pscl0#W;^1;cs?+xcP!$IG#u8F2Mf=8MiFnJB}rzQ1ed< zxqhhq9PCzL6?+QN2}{AjTBfs8iTiSvju@o69P=R5h3B$kx*4vMOHR z&+Ab0_a=v$FP~ytJy30N`bq8gzVHHOgO#NEG$Ov(Iif7%^ip1ya zeoPU!$8xKn1fSHE#qq&h<7?RqMh=nY`&W>=e0D@jG1=DTz*4g|R`Y!PrG|HW+Ivt@ zCb?kqLD>j*zX9%QaL-PRD$=UVglOVov~jmn``pBsBNgqzIem^PLRRVt`v~trv^Ktx zJ#iX1WavVUDYs&Fj2y$fKb-kY{y64RgSQB`t@?)ZZY()MV5m_8jbmk7v-NCm_rYM# zdB=+zfT*5w;c)?g6i`4(k;#|(+h-&Jv)_Wu*vGG&tThldMh6|r@!R&*`JE~ zmL>NF{dailJj+;Ny6_-2jLp7fGoK^A1USzeisQd9tNXraa~Wo)3yPb6?hiGen|*d7 zl-iOAOS?ush1i7Zx8~k=}BV!Lc#=_0tqM_w|)qFvz zZYFSJ5O}~v`Qzdr$UQGs^Lni288e5c0&g=YtD?=BRigUTf@6o%aIs7g1NsvIGh-Z?UBwm1<2ckrF0oR#F}N}e4>712L6+-8qmU@Svir?rQ6U8I3YBfHrYLS@+07R;t`0Vwp5@?=4 zIHN7HtJ}$z8r0$Df+C$~rG!M$c1Rd&wH7RF1ukAuvIz$orH`jvC81ue9TEgWDT^u2 z6c;qB<1`KAQi7`u*bk=k7pBs8ttb{#4~Xz4QuEmgMg$a`Z*V44rVM=9-<0V>u-u;6 zXTzM-W8*fF^9xY{t~yI)fyC*B=mNJD)q@&|ug4zW{pHN|OzxZ1YS!&X&6o`@WY-c- z1_ectk~ag5-ytVcolw-0ofof7c3k|bO!Ddekd=4xB(wict-${Koh7e3klBByR$y@a z&XN}$$PB+zE9^3Al3qNg7ey5^QMSt_7A@H^{a()$-)jA4r!XVxg-n}O3d&P}k|5X~lGyBFbLxw?0H0xcOKSvO@%Z*%y$g*?T zav4sHnyoj}!iNk{5jWg9aoU6@7@(uA!P4{~;?;Q3S_wpJG;W9`WCmh(AFc_Xh1XT>uXJ2r;Jz^kS8UdaCW;=@ zfJTJcEk^}DclvH}$E1~}V>U*5Tcf_0ZT-PtRQsPsSi}%nA!0pUHzL?9f^FpKw2AUx z>Ll1pINpal2xe_^N5v|SU|;nJcF*AH3&AhHWUu=`_Y?Q`ePZu)#$Dwvm2UaOg9-se zawL`9G-;i^Kj_{I#&h08P`n)VcuB_BgZh7M1#)y4Xwv#`SCAjb0r?Iyn9TwbC*2gl z@>2%q4~g?Ld0={FTeO0e(7^~QCKN`Si;B|rwHPNMiU1}=;5zYjhpZPy<2~fij?rx{ zG1EqFIfNOx4I*j!Gfq?L_|dowepKrC(YOqLROKkY}-GFUT&a*3ab4QcK&t=gA z($CiB*}hq;Dao)utd&lab3+HUxH^A@C$Jp?W$U0?L#U1jsJ04Ggh*{ZhQAasWk1XVDpeiC#7`7M@E~jxPlA1V!hPVEhuCu zrLrZBINu@O;Sj%b0f^TM(NijakVgEzL%hNv{`y-WUMEG6X?#do8tdy0Yl6etJ^`%X z3QJ1mXVX~6JFHV2)``mW4q-{D{AgmT4m-}YI`|#d6H-vOfrV1}!%~#W@-YhGQJVG5 zvTt3~a8(sr<96erYeLOqD?(?Dtqe5f`cu(6>axFG_TBF&oeBk3Ye8*_ zuolcQBlYTt#~Oy*GK?lf>7p0<+V5RVq)4Fb>+U}u{c#PzP`uDxO5LR@($H2ID_K$Z zT8u;0N|w6uNDIf+!pOr>Jl?i;|JHTfP?=jp>h-?-erEZ3{q?zYe<{$sYU7pxT+gjg z&q1UpC_7}KRquV8-lP4dSL8gBLZ|QQ@PzEip8iOhpY**KZuAkwNg-|bpztW^hn&m@ zMX*l4$;*7m&wMCw50F7NWKh@Abry}U5>ks+3^=J4Uvaqlvt#RU?VV%o9d+#;jaz$1 zgmQa&VXHX7P5sJ`~_+J8yx;Z zhreB%=uC${MEG0JH2m+S@h@@s(;R-WjCFYq|HZSwZw0?!JfSHKS8)V%(-N6>;%geN zn##kgJcODrfJa?W6=*tOPZKLcXHA?OX!<J;c_1g#aTzYk8`_D zX&t9%aYtY;9;f7XnBn-lqlp~XcuSES^xNma1OD?Z31wvAo;v$Iz2EO?Yrm(R{hoUI zJxSL~rJ3`e-b19JJv&l+{&9_qgiSp#&9%7$HdfTHV@6>g7 z$zMYqe?h9_55g+c@hQ%d*5Mj9rer;JUc=HVDKH#zvbX-((CkAB)*o#N{@e8JoLR00w(M5gxdR2cESz&GUkgp z$ZvBOw%Y~Efu1^a$(b8zki6U``E3P$bs6&89O*_S%Ot<0sC%^Nz40!LGqgif?>?0U zD^NtojZ%f|NPJnOVRMH^ypyQ5>XR6iP<4s1lC)}jj-Mhx{DP*=MgAQ=H=3wMn7=xI z$W13P32HZHhJ5M<5~FvtrPzOPMA>=h`zY%BNJl-{04eDSG*nYKSqKV!b;Cxs?qawT z!U-2uC6HbF=WxRwwox3~(JbrbnKs+iLxLuyDpum$94s>EAoaW^Svybm-!@{vDWppz%~lIir0^e@{qvNaE!MB9+tyW($w9}(U&1yzcwGEt0>|x7@la0R=;NX1B!(LgmE1YG z5PBJJ4c5nRG@jHpVMU7&5%wn;fhKS!SSCLm$-xX7S&pA_h?fXM-8zt;Dk&@vXjqu_ zIQjZee#&-TSuF7;m-S(^SU8&X8e@u1-jYu$(S^D!2O_G}-oD)5q9%#PpO3`deQ!9Zjn0Kwo zweKhQDj2Pkm$eT!QnI`DNmyL4{GpvD2A|>{dhFqJ_psj{#^@pMAxo#d`y2A#@Y2fo z9G81zCO84v z+@qcxab9BToGNHzPBig7$4tKQIf!F4^P%YBzr;UI+MVVfBmW`)P^i!p|M(&IDf`kP zd<5k@vwsf9Pj37F`H8s7zvd@j{@46OidAY-jC{o;~;GyP&37xatA#v%V-@5%pPmi&LJ=?f#%_+NJTr4Ii$!cRE--wVIR z;rr6~yoI9p=WU5+J*MS(jjqA zuX{otk+}Q3A8YCes*%N-pMZk`6>L+-qc=2An{kr+ZO*JJQ;Ko{KhDpdPOGdQPhrfqBR2 zB|S%jr+Y>{#dFmA&U%jL@2AoUmxRgWn)K_EFg(Boo9+I;+Laskp2F;#>P-3h7Y61ksA#H3kAqCR&I8a{blU!2 z_3>a6KRLr8asJ?jjan=@>Vi)Z<^bzW!hdA61_RF0AQbe{IDA@R?zZ{hrlN&3X1piG z?91Z&@Gj{v<*8QzEty)MHK}2h|g=<)b^$rA3)OQwWsU~cGcwrko23Pqg zI*cG6ZtRB>6}||obR}2u5@EyyFkC*7>@FwX7T79NI(xtewP zNDZO9R*Mp*SsQ98X7_3@l-U%4$TweCR7bI}Y1-k-2(JRh4)TUgL{MH_WZg9E#qi*% ze9yCW2y17B_9coJ_l_1^X)OfE`s$5u7r$`YiciykZ@+Lt2Ukv(B7zQFCc5Z6@ z{Ad+3;inXh?)V>XeLtKi!&3?Vs`n`hAULVDal#MNgDF!S(-^~vZy53ftCQap6d@G5 z#BpG1ESd?joCi8oqQu$M6pBy+^uY?lJKO3w1sHAh%PQf4GmopD3%utnr9;E4_TV`l z{AQ(<@DtFuv8YP-6k2TzY^Gopg2flRgEA81EdL4a8{+c9rwsRN!-;{evy8TqZ0f8} zo$$4Y*9tW&cGi7XpMILEk}RH_!L(3l-pQ7VI%=PJ@q;)fN18d)1}*O$V8ZNrWM{ao zv#|D4qPSZkyASl&XP{TDV0@%~<}>B&VI5YpLM*ic%i44LY#5Ji(IYq78E8i-mHiq? z;qFhi=woaHJfuBQNaTJs8lD#=iybdWaRCx7oc;4K(QVjfoKIZ}r+-)98HuO9^C97} zd9sl-v|*i~+P^UUd-5+#|DOB{)4z*;;q39N@Mlo5Y3gXnO>U&aVhU^XOdm8edOiB^ z(n!fmb=#b;fToDjT}cd#w`*ZW`zNNtI_F$7l;nnONqL3Ni7Ny&CMH`!@>a`^JC#q%eq5weL?PI%1tc!ykWXwb8+6V z_OjxO*S41xUi@l%Sw1I6mjy50(q5K*@vio=tc!cA@0^u&aclLRwOJRh^tP+^(R20E ztBa2z3k*!Ym=wWdT|`4vP28o5=}j6EoayJ>P}X0 z>xCi^sDBzQn^iZv>)W7Y*cU_vu19Hax?H_1m_7^Ppm2_WD*)=g{lu$+1;1{uIq}4T zUk5b`L`!;6eR)y-MBb1!H5gX0PC2(KsZ`E^#!bkuV%^iC!?vHFD6g}Ao^Vd0AXmPQ zBBb*ZQ)@AW?#k5Hh&MU4?jTVyXH7rI(Sgw!2dxlKTOn=;x{U9$j6sdoWq~?oI+(*} z2(bvn<4-KQ55$7}iA6V9;e`E~-Lb4RC|a_;?v}0)rBi$N4bDv>eyI)7=6N>CsILYm z=32=0CQp8bYlR8Uq+^i^?E^P5qn_KW5H%K^b>5T65g+=ESmX)zbssSz>RT5{M31*Y zSQVAI>w24J2sse;y@IXYMyF7iN-=wB^?#(Pw|(^9UUupvg0UHM)wuI^(&_gMr2If;eLT`%XY{^~ z`psf|@M8r3X$Dez*(s{>^(J9)x?1fdY3@1+aTu&=K>I)>1?MCo<2#^{f^hPri3NAG zmz^ZCJ+)$;t@Ev{+aMo8jer#S;+yt6#!dI9Q|^KpFLb`U?Eld9vjl88=OxHEmK8;? z(^;;f-`HMdc%N?1*{d9(S8}vh`*~I5UhPkQW?~VqC`WtQXC}R7?+@C01KRfoLCX9- zEA{>Ga|V3wD%8-%ITX(ZnrbDPzX_2KXb*Ybxf!17&Sp||5(<&{My}O(NY+j)Z&{d} zm8)Ofx7vNH-FMi12lv<`XJJT`*%(Xl^pz!3Wwpim3)&Yi2x|<8oVf-PT13#$!hrLH zNy3HGe~^*0xJ1yimwNfvPOK#7+KH1}l-qFS=87dSGdAE^xx+30FhTSfDa53I!sOSi z+7mPQU3=M86Pbv(Sq-Vr<^yLI233+Q>Cq84Np9-UWH}Il3XEhG@dFh z7La|lsuX5SMG|>AbT%=}U#%W_p!l-4)9VKc!hG6gpCF(c@wY!xBibv}62cLH2fedq6sUszea_V>kSF0EdBd-0j8aE1QLN`LaeIH12Y zXrlPcwSuk{bZzz8CW9U*JadDfuL}BV_1cdLmBN<7GdBtPmY{D{KQL<;QqBRcr*J*U z^;E9CT!-g6LC4Ci@~C*PtMG5fO8(BN8K%$OWU_L!1c`*UsJpOlye4NBza;qvrA&>; zbspDf;*8(@$I)yxcub8+OD?3*hR*m3Aq~Orx(2A%-||~5dGG2aXwtKHupBAbyS}%( zGbLY$OtM^y``y<@(CPJp3=OqyF$w}XO9)ubM7lMwndMPaNQPBvkUAdl~_${i!JWE8Fq zByJ?}>W#CrgQ1oKPIh@+2Ii`oo*cvLTwZ%<>Kia-oKn4Jc2*_*U6!-fM^B!x zHY>R&xk`SAIp@Ix#@x`}HfYkz$K_lM~O&YHnRt2(`#t{77w8zCN{B9 zRrKWHf0nKL_J-;QW@k-Wojkc>g8+w?J<#wYrFs@Wm$rQT^p34~&r5G$As4}M|5-X2 z&GE^K?Oj)b-Zvf(D@C4+>oj~9Y3kI97xnW}d)evV)jTt4ZTr}(^EViqoE^zvQeux< zPruvv<9=gDy4PBoeswPkxPI>zf80NiKZ@VfLlFNpe(3Tmf)lnTizZGb^`eQRl)B@G z0&^Y^;90q&Jbt+DpYg-CQ;Z+B_2Y-C=gm+-pK7g;HK$P6aKnM@8J`&0+VyuM5-?KZ zF~_g_76yd)H}Pqf>ii6}`SLk$IfCW)xbm^vu@(XA%!KW9u=g`zyBut5CalN7EF9U| zESKJ12eW8oDcF7ovtVQ?*g?Va=JX|(HDjIlYfvl8 zqJ!@?z7R?H8wakRl4eUrnit6PI>Hq6jzn;`{VS%^34ga$VnU4 zVWk@8i)ERzC^qU_g)!n7mBSei3ef(h9y3CKY~Hgza&4XnL3B^d8f#tFgf$syiY_36k}CaeF(-&vkpRkf7A@{6SI6)$%jC()Nm%EcH;N?G-QCT`xHtRqxmg{$y2?&|7o$@!NgLd8mV^dnxg^?Z>El zKc4lX1gt<}A*{{xJy{p(PBy_w)9Vx(7T+(;t6e`5`%BM{?mTcA(C~WZ>{o9$9RhO@ zb#IKRd+Kac_wHn^`04h|KRhu{>K=9g)AhDZ&7YX}>tqi;^TR3-yl-;H6{oXp>@i)+ z*;B@$xqa&(v((fJ4>DCv&=|bWl08?aVE1to`$m)MstU97&ULvx;wIXn7l_boMOXT74tQ zOfELkypeRb9XZbVc4`U@8d-OUW%wSk&l+$NFVJ9f#a}k?}v=yna9kDfUC|+3>7L7$rIW6G9>1$_4AO0Edr-#l> z|6u5N2RfwN%P75UHR9f}wPVreezN2P2kLMj^tqq75MSkGTODXCGvcvh%Jkw5#q-J= z+g?3n3G^=aqxIA89YEX1NI>h$&rCUx_F7+Z5!Byp-iCQ?hEmKC)Fr zdA2z}JT3AV+kPjze4e%q8>d2ilHajm*R3~)loaU?o>AQO7sx)XV`w~}!`-2w&|h8@ zeTEc*4@%el_`Q8O*gMbOVDI*~pNRJMbn+fi;z^U%0UQ%@v?`e4fPWGtv}#xa9x)4d z{DAM1sshn4(Ln-D(tkvGWt<(|s0EZM;hLUE%?`JA60Knqfo2^n7;eEt z1|8Dz7EB`XU6El2!|?-lY@JQ-Qjpuwh#j zQginj89efJjr`4LDW4jHQw<+T*t-Vz#ECQ)`t^QSS!I+O!`q3(OVZt zFKKuyA^E(wX#3v!?6B2B4Fa`Qi?{X-(#^!vZrJZ`&iEoXoPJfMMG|R7aJ#QPC9Ja1 zZXII?-G}ArV%VlvP0T!`nXQs=U7;K$Mo}l4mE4`pheET4R{ty-ckwNb1zs7#;Rv*=Ye{y?`unaXZuD->BS27_N44$M5#UfW^*Ah)q~_ z_wKJ}j&H4>Owi$aXFXxoK`kCskc82*>`on)p8GV=_|_&yX}l-3XpqajTQoS@(+A@{ zZn>a_f-zySLhzz+)DyTCZ<1@V?J=+YRC<{p8@(&dZ_?`p+2YQUX`{^-Vw!OEjV1gy zhDvU;p)|YJTPF}D7Eb?`a6|x6-`bIIlEp>!z$Iu;er%!rO46VFm}LB-$LRFJaLIu> zh|n|mu@s?%YCqSX-<07UCXLqg+;w$Z-2y=s`NZU&oM1vw&me%A#HQxcOTrs5C80O# zp6P`Zfo|rY+uS-L1hbyJ!p&|KA#mT_MfclBoqR;30p0sLqBWD5mh>|00TNROS4DjL z&PmMrOqED>s}f<~G3I;G6FW$BKp+Pf@opLbR`EWA2Q&)zuH@>uW~iQQ80xK8X7`7Y=h?xx6e{x%WMMlXS- zM18BB_LDK6V~wjc`=EZEm2Ot9v(;6a5~|vD98!ntaJ(aD5&(Z3DAk8BRNte&dUzS1H2c`JdN*`7s7V>*IefVYc zVTMerazkpBW|UR?8At+s9(tZwP;XRS?^y@mH|wB;xBEd1G|E*_I+<}41~cPiwbIc% zgmqB+tIRscIJ(JZ0plRp1~;2+@csVfKkkK(k|(@?x#yR~FFaGd5#!(A%@}yx*jW1w+183WHyd1MECM~4W05J6RI`ntvjmCT~b(UQBZ z1?tSjs`{$bd4=fc*tGTlON$AS3|J55d#-l@*C9(f~3VWAKVtx@mQr-qu==i|w zW>HLhZsAh4xjlioHTurP%EZ?j{A7>819Fvb6qw`JJ>bT6quzq*%fpx@j2+y7Gh4|_0etTa;$ z@A6azX@=LTUdZHkF24deU2C#nVlAl`OdQi^{}Y&lzyWg(xbfR?iTE8dQ<~L%J|n&M zHnf#`oWFDP()>=`SLT-yyEMF=K`QDQq>S%LsOYq!qtHUo;W$22NGFScDOMsH^dt#ik|~Cy zOiLrTr1^~^`j+3ju)Pd1F$(@f6ChW7P#5o-Y-JkKRDe#?jNWlZ10i6;iQ{hKf-i9t7vx93 zat7wARq~K&9#WY`oa7LH>=0kh2l3KTAl~T^PfsI`bckae;v+)*fe>dp#80FVvmN5` z4)NDQoFYUoCf~--rpox{9xLNkE|jrOh(8h{{>&=lk~HGO4sp3doFc>@3(>U6 z@Cn6f#4kF;Gacer10W_rwD1Xo(pXszE7xItE(q2$!czEz*YZ;hvjMq}f!fW5`VJP> za$zZa!oz8-ha6VYVRb6U6<`rQ;gsHey`9JH+sE{aeBICAzY|X0Jg7nO_&&(hkh;yB zXrj$O4Mt0TjHmXI#|Zk<{BX&Sanvs2iIdQXz6W1Dw=mE&Mz5Xo{v^A-N`Ct@?bbZ; zf5ok$_yK*$ZhtQK@U`4VOMXQ9+jNJ|V)Hq;m*X=MH79icu}b#Je2R4vx8_X}Gtcoc zJjP0caesl3;Skn!wr_l!`w!j&mA3U*ux%xHTHB-DF^%ou6s`pSiTlf zd->WIOQV(^Ap?*s6oeHcZX8a~hJe+AE8!Ia*S*A-ZohV*aZylT;|dsJD^+v2I9l@! zf2jH5Y$n%_h)4H#0-xn7@Y&w@QoQk3`&NfaRuKEFjn%HiRDIigO};EA`C2@}S!VJv zvLbfKsXI6>-YtLYm@^Mn#A@)$WJ88`zLnd~hnW$}%`9~OnpLXJ3adbyf*w(%QrqI} zT+2B8ILhfM*br7zkx;yiFcY%;t>DSF;}EuNp^}<5F207NkLHIctkog+7wXotaE zqTvkOQoBd+l@TKw#b}9&*Q#3BvZV`OscPz|JGQwybEYV>0&J`EXfF-A#Q{bDh$Kp@ zguR-iMDOz~$+1|NoYB*EOM_&99w8-;tGrzP%m_v)`>nRJ%pF!tN z_>~YmN1bY?XL1cRPCkk!OrA$>HJ9aM&HVXt_?GbQgWUmV50F9h<&kT5?-HeEV{R9b zEuk!ov%jnPEpb^SdD9j?vdesyhbaxL=CWGctm@p)7sNHVW_VXOvuK9=P0}A7iWivQ zZj)`QCp`zm#m>>0ea#ZC)t_3!Qfb(13hW8D5+|0BT0pL;r3i8zo+fOqYKcZ7vb}#` zaFGv>=Ok*g*$Vq|I8k0Q!j=!za@K7=ViDqixJrUNHJT7kL`%#8G0ck9=>z0%#j}%v z6620pNbvVe?#+cNdXv$TWT5eVveQ}vOs9(c*H}=7U#mT}Rht3~3^3(f_|pB^ei!lX z&R?y|)bqUfQU>a9pbi)B&Oloo$S$MKck!ioQFNY#mvZjhuk??1$4adIrSns94h@1g zPAfB?M=)2sHyw7nf@;yfwNYpP_Pxw%p3QgTS=hXc0A<0PU+8uU5fLWl{2~=0PtiC_ zI^}PONmnBLdeUK`>S3hi?mRQ}WL<^|WFZnS_3{ z*D{=|`^&f|$GOVU-tE`BJok=!R@3IDV&>WVgYLax_C3#p%}agn|55sLKh?_oT3nB1*Lu-#_2lQ2a z`kZ#;T2KdR-yE<=2(+TX|hoa({*YS^^eEuPr1}SoyW3#0;yv7I3bx za}7J!uyeuIQl-4U9+X=ytzJm-u!Ft)>Wsyu8p#UYN2s1nLu<9;w~rq7D*wI84YIyY zCc@rTbzKA@z}EXH2@IegsY;=16%QjAy|&Xny5W>u@k4>6XjL;xD zgxZr0aOYtTx(?f(jn5?gY5qHWyP0pze@C7L?|78kYVC0=>=-J|o)H+ivTG&}Df`dJ zR=~l)+;SR6Ol{`C{@--&6Z)~rOW%&%j-4F6!^I`e;YzG^h@!o&v=R-1HAK_l}U3n~Ps9vn>m+{JUmgK+E zGG3XHvAw38mSbCTTRZobW1AJY)#pH#qxaf!BxLBe_7FKH_g)hkALKW0Jbe<>{zyBI zOQWuymSxm^-C7PAiNWYWA-X)EX0NmM%T~kXSi^Bb*q%?Vvg}=X|1QAuzV@~~vs>Gj z?fF!yXaXelF6qPlwCIP~p0DJdpSFb)N9FOH+|%yAz{kT-@8NR1lH9s7!0l5U#+8KM zJ?d>9IH0fHU*_lO<6r=qd1s|2)|GxU5U#{_}{s5h+>8dFkm{evvh%w;_s^cTM;U z4dzVOvIEL`ce7wfY!l46g=KYHjYkS5T4rGWnI?RsqT$9(ESAI()>jfUN_8V)p%!5Q zIbno7W9M5HPE0NFvmvM~<-eVh2i*Lg7;}U9DH#7LvA9N0xFuoVK{FH{h%$ZHMrX~u zZPy(Cx&yg*ld}d1^r8c~c$2e^(bK5edSUTz zC(k;`i#NF(y|VXT#hV$v=K@VLXo0TJ zQ6eMI^!&)#NLnjzp<38iiK2ilc|_s#uoN7Y-7O3f5nk~w6v;s468cSOlE4@aG@Jt* zWeym7O7%|W<27M?&l_~o5FSDC_fj*oB1w<1IeJ_=sl+I}DkVxL#$2a`SPrvg4@kOv z^7QfFz;A_rKEJvC8@$+V??}kCS$yRVnZ1?rFk)Q0)h(81=PI6rd@in)L!{IZb}Gpp z8@m53exvtmb`L$seXbzP?hgXto?mcmZhug*TkIZ)&Fl~I3xr=FHm^U(FA#o#*sT5_ zzd-l}VsrX~`~s0jkpF(N^*_k7|9*nOz+JWlnzX;H%SSz;#O9^6(y&;~SAY{7A9zUH zw-KL0^A<{=n%63UYF-Dw1@k0N&6hki-{h(JCQr?mJZ0FqhMjBJxdrox!+|`tO7hfd z5p@SS;C^@^)Y0|x?X>2c_D=e)|L%815-*kwLgF&niqGxeEA&vN1hdA6J{y>Nmh3HvacMeHqy?wJ~J=h_hBg9EU z^enE=rV&><#0^|1<7q;y6ryKwy(^8l&>=qH5DSI)9U*!a*DKSAKXZul9Acgjzbiz~ z;yNLXc%DPN#32q6;w3`#EUy2SM*M<9{HjCTH3-BiA$k_qgZ@;l_p{9n>)ufg@hu@% z3(;+G81iBoah*eaiwm`WMu?Mz=viDBrxE|;5FdAli-h=nA$k_qhBV?Fhj^ny{DlyI zAVkmNIw_4<;Shi55T^=piV!`E>&P_XmmOlMLp)E2KNO;7QmmEPX~ZKO;-?+r2qFGR zh@Qpu&B3YWd3~cb&j(zn^%iXBkN!l6p2hXyG~z=JG3gN3vb6B%PeC+`YfBnyzQel1 zVLc+O%Y-G1>yOe{QykWn4(m2yO%s+ZuAwy6Sci3y!}{dW)L{)+SX`HMVR8N4+p@S0 zu(vYYLf9{^S9@?>#I@o7Vx-<%C?Vv$AkpEl2VR(8mN-Yz8bH}8)S6c!CD^fUGd>at zoc~G`(IPHqLO3K+L{(tkO{CyO<9N_+uac#8wQf<5m5~>l;~hZJl5;Se4mq*!Wz-^Z z6aaCC*0by#IYdfIv9^AbM~Os*ekKPO^W(>WBF*Dj_)?IS1|eW)QBOyj&npd=tO#6p zlK~NGWMJ*!jch~}J}}NYFC4h;H@x-^b-DTTysWv#-_%-n4l+le={f)s+=Lw_HYK4b zi;;ZFXrV_}kP`P76!Nn=+_q^DQMO@lv~3@Q>&km;ACt(p3Zu)W2bo$)MI=TIxSrv(ISlUg-fndl-eWYlDIt*vBvj2a#VKlu(jpJwmWS9! z^ZD7F^PvWi2@1pT(lR>{Bi0RuaMQ{Uxt zzk5$SSr%{S5RrF7zPCdqN$ks=?G_{?mrJF7f1}^mDZxQRJ-d0^5!g8-#)pOwsjPW{z}6&*kheXW;ii{ z#(*quDq@L3rmhX4_+*87nk-k?%?+D7Lh>c$ceUqcYJzdw-9@d`eGo}SYF?0ms{)Gl zhikCIvLX+G0hh#+0FnV(BoE4=Kdvb zSgJ1itz?@i?15+!ATsQ4n6)MInN^S@S12s;P`T1rK{aw>)O=uD^{mZqO!G&5b`74a z(}~=RI|A;s_zXmAIJ=pcK5AXg!x;9G8Ii+ndpiSXJh$+m<}=!cjZc zQon=OTwherw*ZC=T4#X9qAE*FZotv_uJACduCxj;d-_06Qb$7ZgAMO|UY_%=YaS|E zp)w959J1>NES$fs681E!gY!oUV@gKOdrxH313c2uUL{9YC!MPHiM)F$Gp>)LZq>&# zLBb94R#IX}@t^^kO=-fr;gHumi>YTU;oF4!HhODRFBjRz*WbDjsHh7k-RP~AdXukC zq(P=Q5jk+*DnYjb#rG=zgsYw(loe^y8OPYxNd(uuFI)<&FA@R=Bp9_}5;R|I6>lDx zqL8o}BJnMjYgkiKkr4vbX%c8L$me}ZFQyps;-X^e5{Y-RA^kMUh(RN@xML<;lK+IT zv#h)i71fgru2^RZwgi~k--cBp$H-5$CPD+*Y)Z8};o%%F$hz z+HVTqtx#SV^3Fo6=kq{goCrp;0n$zyqT%>U;Um|`upg%Vgp9U;p7? zj`+6S?nKUL&3ews(u!JsIQ}A0L5c4li@&Z`g}1(8ET8-oPngmEN`w=W61a+y$IX_j z4t4Uv@tF1`Wq!2EN+ageZ`o?~X`}$&Vw+J&#!V-X=n(KIKd*sa?>6iTuUILNo|Xl! zH=fSTL$)mpHexi+H&Et~DrJP*rE`j-l~3^&({2Xg+(dL32yjA6&pGSUgP({J3ls2a zz%d^ShxoY~My<*m9bw93egao)LQh4fn-ggl4>x7eefcSX|%{* z!}|F~4Ja)q+uc8-7Hs97df!&mN>fHTh9nCB3!HC65o5M;A5N4}lt$<1OnEJzgdggL zqmwv>XJ(iQYN^8RE7JOsp7C#oHoWytI8eq-R;(-xGF6@Tl&1Wf^${J z$oxzbPp~7CQd6Jh7-+1)cph!O49TO>21g_@wTT9K$ar+V^}5n9getZjjcnUO;K|Y%SI^;^jCKQq2;gMs-5y%?bNJq z=mS3NAdA#O6J0e$Rc&_WA`ut{bghj{0}MT8RjKqMov1=iKjC>=E)V;bE8IehUeFl% zI*7_v{6;WdQQo_UF~TzS_gprGOKF$9z=FCqQw9T#f8qOB4Nte(A_`r-q3PAx+6M8N z(liNN_AE2a|zA5`H@qU7n>lN=OIJI8!euB02iuV&7Q{R5qca~HV>i4d1E-5F} z?_IGaQ9}LRHExLu^&4TYQ(NZIB;7-ix0M&R+yRFsg-Uh=5@jgm5@Izu%H6c=yoCSr zhwR5{!*o>NzUlHuu`~3xMrd>V$vq(h)w-0AS!wtP)w=uB>VtaqLEnqlc%c@lhzxs1 zQb&F);jfP+3K}#9)S@oKzV;-)XoIa!ls9y?FTL`XXgzkaA3hqbcMA&*@3$|#{FYF? zEh$7BcqbIIK!qA`AiMk)f4xjb(S{!PN=q8S@&+8oF2AL`UW*w)e*-C!Mvhx>cm{v4 zQIu2mUBo+omh&$!Hqsqn?@TV9=qwKU)jd`UY!Iz>kFA3H4(|O}EOOaH+t7vyZ;$-T z%aP$AGD$XShe8pbBzFnVny*DDk|Jy}gr12>q?QCVTQX;9;q=MQx5C${5y^q}^}NsW z%CmT)EFg;~$^x=@qU$2uBy4ByUb??_dVHz3} z(U%}<{mMB z&E<$zTVB|l{OE;OF@{b&?WYd#5r8cMd@U)!PhMCKptpMaN5yBhR&U=@eC7(djeAij zw!dxA*Ne|wEvV;szWqgmK39C^dOX#qJ$|w1@r(VAUu<^#VyEL5+uSAoIga3$WyF`5M|m_wD!#-@*$*#)Y@Hon z{^ioQQFnf69nWlG*ifL8i1;v*ktNq({MN{|oVPH3!qM0#PWbu+4g~u~|MIS?WL+*d zXA3JPSDvs1{*?Sk3n8N}*T5Y9m*m<>A0=O{cn1o!@F{qy?+qlrNcPFS!P*5oEh^v4 zFTFGUisXumcY#&gBFkj$f}SMM8O6zulYV!_OME%CMHbB31^d%!R;SaHC3DmQB^hYA zT*k`~KjIIh&uU?A&MqteE|q^*U-?Jz!C$G?=9 z@h`VH*<$G`C&>dlCr-B1hbwhg41w9!W?2un{AAcg@x`#4=3bfYv3FvQmcnz~Y}Hza zLWZQr5S(2Sj(>=01o3eRRLp5eoxN-KvA@^iwh_TiO3a;0X{K30C!BNR#C2xJ!2Cgc zt*%Tu3i*u_?dTU7oaMXy^_`5=X@TQvDWidVO2_=75nUUAVYE_%gf_=B!|JL~?SQg~BbMgbd} z8A1`?_O!e;2l?jK_Peh9WAyecEx>RgrZyV~>)z_RBsJXn#+$V-dxM-p7nr};BXS7~ zqHE~8EA2AW#UkNG4#*?`p=V19)05_(!)KcLOYPa)eL%}B1m1sq?+;F1z<3N!UBEzq zTQLTMV-{$!kPT(+n~()t7|_UqEgkKfkOf;@oQJ*BlbN!hsn(p$NSOYMTfNdq*+wGy zI7K>~1c^X%_4mqlEyd{&6BqTU{f@Z6Od9}w{DhULmQu-&h!gm)v~oJYv-qbNU%*FA z@&jKCRd40b7}fQ-Yk;(TVDPTr1NB%*CVpV9S&nH(O_AOR%vY28-b=|xd!xxEI=9EY(R@)pcfa10YeEhB z=aSehZd#Q(vcRT#onzL}%lRF+4qapHx{rQrD5xq!f_si%nj4 z{lyu|frfjt)yyzA2aA*}3pCsUOpBar(~F#(2rD${U@nkynABn!3 z;b$LD^ldQ-^1~;XWisX6*$&Vr@4hI&{}b|Vttao+dh%|qC-08xlXqWHSv+|+LeThr z@@}-m`Gf|V-hgN>ZE^DtW{MPr51@ZfUvcWme^Gx5BPUmwySe_unS!pOm5ub2sd~ zJ0lJ4g^`lirvHJN9-s8dMHPX>#Wu-1YpXc;dP;-__**W<-zdvq$|B=yD}|;-)BH#o zlv6$~m6V9f zAs4EO0<9od+KR%a48WW%WfT9t0rc2Dhxs2--d*+;OcFj5ek!#1fPWzi&K9x z+Jim%ME8sOaQE0`^K!K0@+F<+Yte(LS_rv;-6Xf%3} z3r(Vn0ck=3t+{kDAPp*@HJ&a2IU5m}PDrG`7bpm7gpiqkz(%A3(cL<~R-{@_=`*_f z4j!$$^=^?$Y(-N&t=Fx6ltH;GQKc)xWS8r3a;3k2MbYW+Ur|{4`&Sf?{{EGUqE}q> zii=)x8U8C5r1b^$H`Bgay05<}ZK{=Be@Btf?*}K7{^;*{*5ALd{{96Lcnp=o>u*GT zCETg1vJlnE)kKiM{2SkMZa6w&7mJ2+#!Tqs|A#yiZuy8RxlT{RfJ`i0&RAl;eI`t# zC0}Q>gahHuQyO*xGHQWR@I4=MhXWGiDBnNYanEW3E~MqtHv0F;g|zdcsKxoy#oK3asw*Z$lb+dMCqvOk)NW_GARVj|tENiHq2&r5Zz zK~0YCSDtdpLlG(|0+-28T1Hc!BqKVM;&G{@L|h0*qRVjQ&KrJt!7p%rhg*3fCHw0B z7-h$<$uF+48+G2Xqn|eV>v6=dKjti9j?Pg~iwK7TzuTqpoXP$rd1G$cPb^C^Lh2-= zpzV1M676CC#1LZi&WQ|!mY)Od~FLi0iq~JTJclVh)Jr?DD%b z)}0RPL5KD9%U~TPEIGSeoyPjP!@AyK-7T!6g(YW~i_=)&bXb=@zsK zJiK(DhVa1sg~qN}ZDXOVwy4xRT^f%_ZK1P6S%lH&wc#iW3rcfog;sxNHnq-XE4Bg3 z1;__rw%lxXTonp3A`IwZ!$}3ptPtpT28uRMz@v8U>|g4slv8Z*AK?rl`?k?rE0)2g zW@e={Muj)p&cfbbX>b2ewc@bYJXs??PQ7rgp<4DRIl{U#eP@v~ArMr{Y`fj9 zS_Zl0pg`3!-=MoxOClI7akcy?Joq82rCPmM$btJ_53|g(-U`ZITEP%~Rb8wlH}jr_ zQG3ZMv0hROD{Q(ABv!~}KpCnHtPJseIzdDwQ!}WrY6dHR+!lDZk|z5iZcx%1E#bjS zRtU69M!!!yE#Dp*$)-QeHyE(Ajf^&!ptQ7@>d_F5R{<#Edr|4I^t{+j{34FKO|&D_ zp_*x$VWopZc;OW54f>LnQ%3|EB~NPUK5*YQwSrYRG3E%}mnvUYQ&{mI&8=#zZ$+## z?b*{CsO}VlyK-nTTjf*rRrEmOD9z=f5kcvWVqLB1f&1EIuPiilx6G{gbyczW!C3r5 zHf*Wa?&odykNe4|gT2K1wIfF(t0|v;^6*WmZDg#X`d*LuUW@sjQJf5xrK_TlONFRV z*#yiI7mY86JseOQECMJa8f~zU+pC*Y?$Y#fE=ykC@$nIzS6zd4!2fH^8H$lZ zl%3v{#ItBPhKT3_JatQ9S0 zaabTbg`bZAn}MRmZ}(=JXvwyF86`q3Io$iDgA)&UE$2b}5#qtwr6imfRopKg7xKr)@|D!Y`?E)E z;+@BZ`Lq-dFRa zk5`4CNp(EFDDYiw3U$Li&1y#GbS9;-2FMTB{HPrFwXk=N&nI-R-K^b2PTcCYt9H8h zd*LJBv!lH~;q7)^-F<}K$1$rjJ1tGU{ zf35xt+}EQN5G_)>ewBE*_H$ls%Q;Lr-o~-R_LtVjQ~P}j%Wwe<+$ZN$GJ#`?-H|}H z%2sMoIdKw{q2lvOApO1U6~3;R@55NhcJv$CCrSrJtt;!cOx})qtPYv)lY=Fn&TO;w8BD-IT z`QFjyx-W&B3-k#?v{F;)?u_zk{QZZG?_X;RcJK3bTOy3_Sp1p5+!5;Cz#z&!-*LB0y0<=KN=6?n+!ft`MK zU*N|13&guyOkZZUulYX`g)r5A(r<2EDE;PE26nLgRt9#k;8ua=CS2CKOY&m&i^S%i96VQ_6Eyg;Tr7Yz7@S4WesQH+8yL&DrUr& z`7qLab|HOzcD{a!*@Q9wc!SOeYSHZ|yFZti_aZ!J@6jwCX?|RnviMd=Holb*1svxq zUJ@Y~LgJFDSfZ{9;SiNEf5`kl;FHMBQB8Jpb^dX)xQWE~Ys-Fi_h&9`i4Y@E5x)uI z`eMZQw^+&ZfkuVRC~N2`oc=||D#Ze$hf{WN;dwdA?&j>su%(eXd+RAgpy73XM{bF4 z9o89%zZcoPHpUD@AcoqMKOY?&ELfcED1Sv=U3o9!MKavGqV8R0v7yuF*#1>}zutSV z4=NY?`ND&k|4QZJ>^_M3uv9M2>4RuHE?))vAVCMo>w{=3E;#vpkbDOz=z~CrNhX}m zK+|N#Lf4y6%K-nJ$BvGKSD2I^55e9W6v6sj!5h9uVYm$JK z_os;@hkCz!H`Z+Ym%pv6w~G%8N&Xj}0-&4I%D6ES8D7+#e>nP>NeM(BGmaU#rR$Rj z6dC;GFcgyqWiB1LB}FoGUx8#Y$8+sH4BX(Xd+>(~hVH>1E|_yq^|r~yOW*L>wYYlQdD%->QO?=%;Md0ej=+-Jeww}X?*bed z{95Z>^aP`2R(iCoWV8^5;IdS9ee=(u>Gh=38h>PPkC%?nY8U%6^F17%jkVgvG;Uw{ z4u@wWuy%3Izrw@8EXjf=<4FQW*L@g~9m!WhLMf6T7C0QqA8SxY@K0lNn&O7@5JU_hnU+}|=58;O?NFP5;LHhV%3ev|9Q; zn1b~2!xW^CAEqE5haX;S{P1GqhZh?^y!d~}52w)GDdNaE>Ttxd3%B%0vm=i8?Eb$) z9OkD0E1YnLV};^=C!Ebgnlz3-?EE;B96#>-cogjMaPwoaw?5CgIoV5d7CTn(g=c>2 zM*igV$bVDwWMFO`!^iAtefU?zgBvxv!8KrXv5!>xpneZJs2?=jgAVQo&GDfAe$b!? z9nue)=RuF?2hI1OL;FGd=turzUH8Gi2F$0`+ol#TjW!(MGj7To4!WDlhF*75)W8+q zy_w%X6?gM{Pw@_Z7Z>kyldbXVscz1#b#v~R^qhNV-<+#chq}JRk7GcmL02gnTl9&; z(b$a!JvXRO+1GMk7Jr^|;*qn=R!}JqRP3AK<{ytnL?Y;;C!Bp-kFUq#Z$t*$5+z2u zO#Z@}OVIzj&hI~n=hdCa!8!}DUF&!6U^IxHHM}T>;D}8oyYBa0LFH;f<02?7RocBr z94j?Sq-@9bB#$tJT+-AzR{obp|%8_r= zUl>0xN4`x%jGvby-=-nP&&!c-(-7n5<%{$BAjZ$jk#EyD#?Q+a7xY2;#^XPfZ_%lc zZ}G=LzTF?3jC{L4IJ_bG{w4V1T!KH&CHUi9!YT4BP&#?H(v7{zZtM+DkG;S4jlCx| z_NJ%D-Y|_U7nX0zP-En!h`b!&POoF8APSYa6-VveMbeG8M@nXd15N)9B-}h>M5uW% zPPPvqH=TRdD1vvK3q0CQbt+Si8fu>dV{-WaOSr-)tnx5b(HS}~oaJ1%pfAcljDCBJ3T+8Oo zfA76zcW=06H94*d`BsHWI6Nn|woHrW=Yu~^D&2T}==8Q&vyG?_yoAVhwiuF_fXy~qM%VxqDGqlYEWp2fF_dQ8J!4LRHzgwXp6skL6`wlE`dpe zaU7*;TWzh{+V-c_R%_J)N=YC?0^aa`s|E3P#sLLX2-m!y@7^<$38HO%{_pdDpXbd3 zbI#dkoqbt*?X~y1?6v#0yfYcv%00XP50C~U!Tg`%+fFvjq{{LrsQx~V` z?F>-yb@X0d{0x#nFqy?urQPA9#M(|gg|h2{o&JC~cf zXqw)J@|32xKeIgjZ#dd);0?V6_2K_1j&?+!gG6t>KL07bZG)lx|C8RFa7%Glg94h& zu$-R(3|PP)TEJlfyhea27nTz2GQ%uT*W%obQ zt=`GfnJzro0epwnf|N6DT&1YO_^8X6zM}m?I%hC+gVhgp}Ld_G@ z8-kJx%bgjh+bz@s7V1(#-2)UxzJlMQ^#A<6ed4u@GRD0}sJM(Rk?n!#k6A)a^o6E# zT$?{SvY03)#i1+x(Q``tLq?Q@F7rn(Df17xxGZ#$<+i~yS;;o@e+B+1r&UMdIzPZ4 zovw}FU;4Y5O)Pw)$JhMbekr-DCh~E|E9P8*O`+Eq`URasUoKaz&!iC(|CK?^>s&z0 zQ;*Y_oWZFk|26}4pM`qNLQQ5L-;xrbXmYOyXmSos?zCsx_AaCrsp?~NO5vWO0^sE!^+RgU^43Zx+(K5ryl({i2SjuTRZaLK>nW!DCLtQ#v#;)Nw7yj)j#A;lj{3gOXxx!%S~ zd#l*zr`X#PZcnhcW!x5XTgO(1#z9JBe?wz`Za7TGLJ@PQtZvA8W!}bKdQXfIqL>>q zXtaAx^qhh~^e4sP_Pk&vfq8z4?U;bEjX#o>B37GARC7cc6M%=)(M${;;k%P!6N~QI zrSNn5s6QV$*7MUWCx#>Sn@Hr{QLGhs!$WzNdTzkWi8b6RNPtUbh+Ll&-bTUHa}`Xz zop9vXYzuPR($UnLy|J}?P&cVb8|(_G)N6rBM4s7H8%gLe#=1$b)kKoUG94%#&C!hh z$Xlt{G8Hy7w7hftF%SDw6xRmVEbHJeQ*QZM8*7~tKR>N2-)kJSa-B$0{wGa*=9Cke zoNxxJshqd^=;OuaMVtJx>UB$a1;uVj2bI8f&pT}6DnM>v=D8sizuh_yotrt(v9Lbv z@kBs~db-92jncyeeO;o6BS=w=lnJGIuCU5+B14QhZEZ?$XIgk006=wgGFk5BJfUqZSY830Dp#S4RHzKurAO6y`heX1Tl1IK0TU1VRx@HfZh+JK7Q1|8i^x^Em?ab=NfKjrhO z--)m196W%>-EQ{w}5<*-9?&LKGI zaPAi$<}q@A?{Iv>58h{o>~mkq*c+aN;7w-i9nU(^{`&X|Wi}%#v!A9i>lx+ z*+dzirun^Xuzb>+Xcc%6?Pz8Qo`(?}TErJQ)y8Q9DUaX`)rC!_@=e-I+ypK5=uUam zoxs37b|31Ixw7XxN?So`WKq=f4tDhB{~!WQ?;78n+0jc;qw(z=EpjYzq7YSXn_^As zFJs?H?YG56;6LBryIS!J*wSieEN|>)8Ybec*OiuPgqKXcBD-ACvR;4d{k>#=7vGD( z2ImBi&d?8=wFu~- z^QeAos?+{sdlaA0Q$|>%!Dym7`@`tNAC@!S9k*OrFBg@NhLWTgz2q%xRxqB?SsFl`COU)eIP zNM)w!Ht{R-vJ;EiYK8MspmGAPb#fB!f={%i|7ZN~74!OW`4k%p=&X;op&QI5Ll4=} zSJ{EBjfq|CnKYJYAI)q}Y=CNPudqAOwYw9|+>~xlZ&AktCpwUUli454_y1qppUCP( z@l*}(?LO@kvrK4GPs}tEleRrg)|Dre9{e4a^TJaC0htg5S%KXYLb2HOMhe^dv<*+?))w!6oTLbdrkEZ2;l`$g^ zyvl_|_+@`U9%vriDQVu50sNH(eAogGMm@1)1%O7VOHoEF|7GqWv z+wIdr7-?&*6RoX?UZk1%&n)H;HeT^I9;LU@i|`$-ld!SEi5*iB1tpV)-eG({ccz66 zXZv;u8;2%b%dDJl$6|pOu_&vRtg*%@5LcPR*|NZJl5Q=s#uG|mJy;Ql;FV}2 zf37BNzv9zl=EeEsd2tn0YBu82uTA2`5}#hS;#0)r$?)|@tB|35wmtWdS{I#y@H9c~ zSrxDi2^^Sdh+kE2L29}VP)SXC^fpd1AI`*_R8@r`<>Nr*dT-2no|Af8ti9cdwa;mz#>N&0D&O=r9;_6y zC~+#QDP=dVBXyNu%FDjO+t`zrb(K3TR9)o;Z)2XxK(UCTRVWLpki0(Q6B&ZNjUSqi z-@-=4jh{F$jjwRy45HMkp{dh z@R2#!m`4IYM+u^3V$x9v5NT|~3w z+;$|X*Xt&I><}Z6R}kjMf$}%%BF%M?wf^mE>vGpi646q)Hu7OLVJQ8PH|xq>CEibB zU8LO~UcWajt<*)@Y9sF%YG zJ{$tJl&uE?R;TAjpl;KakxJyV5Pt2oRNEeZ&H&`zESwq*vbZnvcM5@H)ZS4})DMHO zOY1xiB^}z-GqkjNTWhYoz@6ab>}6nrFZ@eO^@W4Dr`qKtF?m>3GHwVNtUyltM6Z9x%ZErq;YBfEysHOUhAGF2)Hiqy- z^w6WY5#~9Sj~LB{x8YMb9{m`2l*3Ka1Tgiamg6a zdR0`o+#hZ&sbk+Tht|ERbucQmhGq@*MP{Vvl)6@q`D3@5Km2ibx;I1LTD1*Sv=1DU z32!I=e<$35huM5SR}FiMBC5`h4J|UHA^6~e5BPvz`{0@f=~8&Z<{sWiqr!vs0r6`e zv=Jz{K-v}FXeSWA_CZ^Lf(wjVW8ebr2QJXE7x66gGbW->ps;BLAntPMZ_kU>JS~QOLGQcd|DVE>iT0O0em9I6c$4ZN zbf^lSYV_-zyi=)%uqn1)GVKU9h-u6w-VvHR@##c9sf)y!^n|Smny_*|b1L^vJC;Kf znZLMimz%Wtwl1=wF5)KcCI=N#qC`rXbq1Tz^k!siR^JIJ*v$Ce?F&9i!+f(IrD49B zM`@U$?Z9}_TbgUe^P;7C=y=|=R1X|4vWIN+YF3eX1vc~wbZ8iXj%4UhDHmo9f2|v2 z2uktu>hjMB6P86gCyHD<+Z*5M8~9*+&G>;K@R%tzyc4}KuB&@U}!ZX(4{ntn-Po+8DNn|?`Q z&LYLooPJ4R{vyQ?o_e%(V7Rr4`hg&w~;Ry*;@4g zQ$bw3FEs}^W;RICq^&>v7s3iT&yA@0P`(yyNPN0dWF-`buVZyUpeJZqc&gFbIeQzwgp?!i-4$qF-P8h;_?uMO2p z?1+XqH)_T7Ai&+H9hrEb6Wysq@BG_LW52@sO#g~w&4Tk)B+gl{cJG6YzaBc34o^O=tKnU42C4@1Yb#s`jjG2VaN8}ZU{>%Ngbb}4>V znmwX9V7)MMu|33H?->p{(K1BJDM;IxiN?>kWL6v-)=WW^sDonz2FYW{Q-uf_uX+ zbgCaP@C#B5ztA~;q`)soG5msirNA#pG5kVj_>ls?AjR+to#00b{DKt2FSu82Nk~`L zPFt2=B#xt^nL(>D!wA7%@D1~OV62(nYsQ-Sedt&-zYiQ+xZF(rW>ugG%}o7fRlt;% zp8Efo=`zE28}E{j<-Iy8iTE5~{VS@HU)`HlmE1MUs7fB#%09ONoywqknSX1t&$k|BPNQHx zkDd)jomgBK*}%e^dHV+WhQ7i5_si^=D;Rvu&T4{JkC(>a6;>q|m+8dkMs1&VqSxbr zIsk#<)a&zd&K)vztGCf1J)k2$q$!D={1h+FpLEreNRPr(gD)u;L0mcnjiEqv4rQLiGWux(44yw^C_0T-P5KDAYQIQfe?Tg3 z3v}*0ZIfNuly`XIAwX*)q@;_yiv-NwQe3PwMJ^zLA~A7~a6+?dE5ES$JJ_+d=Sag4 z92=PAaH4fp0mj!nV|;b);`27Pves0t#piWJO?lg#)5&Cq)6E>pqKXB&-D+gNNo#rW zMj-M+SDSv~k-419@)gI1Pk)`JhK*nnOz+`BKVSVb`nmtHrk^Lyr=RQQ=vbHhe`MgF zvGC7Z_#(mgv+$=0{um2?YX<%n3;!DnUoH5PEqtEfH_b*jm(345{Ue<4#oVN6aizdG7Ic`!e6aebQZen=<{d1-lB=RjS_ zNjb&UYy?MFhT(cd+1=X-HF%>A?zxowQwQx#=N89{76fkJ?0@*97yGI!U!DCVMr`9! z9C^K_{3BE)W(oI3_#*<5H6@EO@*S2-k>E`>OvZtW{J9AND9&c>+4DH|Y-{`QP+Gxl zvQX=9VuAWQUI>=eRlYKZcvv@zNZEh=Q&An;p7#Xoz6acawJ#vaDx;&@v|h;v)9Y77 z6T^wQ^7Y`)$zm>2%PX_1>SELK>MCEK`y&%;?DaWsC+0zZX1oS))g|lA6s00|)Dh;O zo7wQHB0Tr;{j8>M2CBKwlPX}&egCe;1-1s>TeUy!-vJjUbj+Y5*YW73Sj69Ew4;li zRsufC*whb&KTpz{IyU#yFK`iUItRudol_!3RxxkiAmQRN2lw#Cmm!2e^umhhVNPt= z`#~stNY6LCjV43|%?EG=ZtnjRlFLZp&$85ahLvn_DqCSCTfv-F<8cmR;|FG353l0e zU5FlN3`wB24kfQLDoa@hIHw6k>*0iIx|&j*As*a6{$$dp8%GtY^Mzha8v08q2U9B7 zLhN1Jw=>lT4^GaN;#k1|r<0d_V-!raa&zb>+@dA%i;G<9SpQJJjn7MOzDu<@NChs{ zrUSm2xob)bgB$UL48bGb;sTHGqh?>Sr+?=(_1g^KeHQRB3wRoH$CB*;nrZ6B4Ak`&>Sha7OL*2L z-jh?))I}Mn3oX>;7HWu~e1g(6b!rCcWD9ksg_?g0P`fFSY3czui+k(!u$PQ%uJN`~ z+$L-Nr2|5vJubeR7Dh%^AqVF73z@>@bF51;`>AvmR_c`$}1!c9j3x6%PH7M%tw4UQr@!qEnXn=#> zOQUc`V|_(`p9=~GY{ICl5#~#Nrz`$yA|W)UaXe+N#Tn=+PpQ7I<$Lc_O_AoNgva~T z7XPHe#~v>zThlUe@|T-AXs0xH`|DNPRuzWM4n)?Y@bgDr&G1k1C@jT>b!c_-M+b!8 zJFEJ@7udkXntkerfyzChMeM2X(tmtQIzD3JFK7}9>JYFOL&5r+>u1b1C{|4%vd3xO zljnr@E_;A6qkf;oo}M1mukI4H`!WESsZSFt$>Oz5bK0>XB<%`-K3O1_MoD)iLFX6Z<( zxjvBkSotHWToky+sv2l(02Mhwq+1?fsFKqylZ$#roe< z-RikV)u8=m|D4+Yu@m8Z8QKr?&{h*+t-y(_b63p}!GfMEXzAl&{*J3F}bdD$532){>!50c^jn@ z6_r*vi!Z*&{N=+#z2P>^?wqBSb`e7YZ&;8gCoYd18{lg4$NJbk)fB+sJ2%$51Qavo z2q+%e;C@%tV~1afewdkprDe$z3_tK^dS`e6?t8poM_gUyc5g!^1(@M{ZnSqhy9@(V zY_PXc1sYNMyjbB}NU#l`iM9el7q$6_2N*q}uKYu1F{WnyLBDZt!(K8Xgp$9xqZ@jA z+n0Xt@Kz378=UD&Z>lc?L~pTuNdP1WpZ+vWbT6E)CdAKet=Xd{(~q0x9Af&2)yyaE zosjtXu~~aq1+^9si$)~;vAfr_yI&x3V*w_vFF~&6W*VsoF@sYrZ9JT+CUlN1^0O1| z=8x2p%ZP%4@Qs`X9lMnMQU?Q(X*4vL-RS52(eL`pzuc$Md&sUXveDnXyBkb`s4aQZ z^3BNQ$fXizsj$PJ7r$7h`Wck@Vjy?Dv)Cqgc)>xtD3zWn2!=p{X|#L3X-{05e&W7# zJ2z7Ep+Q71>Ex6sR7A|U<>CC;yOsolMdMB~4M1G~xB;iX|uz zE#*}2aqyNHk^Ta)S)~(_MVjAWD)G8lX$w1kL)T|=sZuVZ_!!59p{03KPgq{Sy?YkT z2}B_3W)6~}fr^KP&Cbye;@r9vdy^svG_;%i(W(^)CloMaYqj@Ikcl^=gBxFi`{J}+ z9f=!2z;aDy?s^_&gJRqlnCWanyxoU&k4ddcKMs8mAT(3NDMP5%;LTdA!wo*005^zU zXE?+Ax}R(BU%rR?X}XWBOdg&*K1mC3*G^R<-*r^3P~!fpAU7wOZ&?WmKKJAUb0Ok& zZZAf<8NXTlBfOD@zhe1E^Z-LT_@EcazTF`4tquZduZ|uPyI@avXP!5_jx?6EjNKc3 zoX5nIzq~~#$1qaV-JhCLv+26UyMUDooZw@(WvVC%KWa|(#^Ru}n+PeTNp(7RI# z{N>w%2b(EotGgYtt;_wgj!7hWM!Lvg%~#9X=lg@0z$07T2SHU`WJZA#9$ruk19q~} ztX@V}I+}vJyh(f?AzJLND!e0)g)dA9(FxO&{6wP%qQI|mqP?3r4uL@vEFGM*{%O$P zD!AH8$U}@`aOgJEs6ey?3p_gl9*5Fxg=q?7GE?_J>UWP5Z$a0Iw;Z#A7x zxI=g}CX{yZR64&k5HU<_`4rJJazJ(X%)DuR{gI7Cp9>G6uUhGyL_Ivk-~2HQg|9Sw zrXZ)j?$@HBvvd{>Zy5fw*WbJsrn@)8bXO6Ct0J4}KF^#9iF6D#^yG_qIQZo0V$vv!N z-C*q)pC&M_AE@~k} zw}Ow71<3=Fp7e4hRoP&^+j5OCvCe&jao$DVfKi@aeM$Aj7dnKQjLyJ$S3>*VkT-dp z(FcTA6|094aL*J|{mi|AoRTiCmD7qdOUoaV7KolMR-kVW(_+NpkM1zvdo$mkVZH}C z^L3f6+ojdxt1n9&&BNEp2RMdoXScumldu{Sudbgvr(;VP}UE=QJ)U0o=Z4Qw%y z>>bHPYOVIfcg*n@uuUZpd5`rg?6m<1c+RDDvCA7}GOG5;{9Nmqetxx>o*05Z>cN;O z4w`c3$F01|tZ+VcpA;nvP>U0;zUoMAR{o*iCr+RM)-Tm$65nx+zbEeC)gFrX-Xwc8 zi$Fva^qo%Z+%KY2cSoker>B15L{02BuM1Re_BNcQ_C;3GvUAcc8yy{P+Ql-$HY@UWp!}1dR}m<~t*k;nb^pb1 z$h0QiJSUTF=+B9dL2NetMb9auc_Y<4Dbe2Y-t<2j9+9_G_OJh-eXMxsHW|38OxQ|q z!+Z+y#~PcsXnr6@bo}dMrTGRQZNFjs3Vw=E{}FnssW4FaV(7yF8&ym&nsQ~z zN#mMg`Ep#B`tI~qJwkPz#U>M~SM^9s^VIP;#1>U8=0#OY=0%Gh*Jd8q>hYhM$A9Wk zHa7dMtU6UPq}fNAYivEqm?kz&V2RissyF*7v-kzUPg7k23@&;||3Nr$w16!HRUUO- zJ2;ts0$rCHKNo)yowGYKhyI;I|DFlR2>SQ+86QRtqHE9gaIke@)G5^WC_zrC@=X7A zc6qU>Uqo-%9l3$;Z{Yhg0MYo{%J`cmlH=G(CpM>GX(8m{Y~SErgpRM~0Q?<@>h7IC zj=D8qXBf>DwHVs3_V}J_^M5=RHP4=Fd(7dgNck^g1{`~gdkKNVw(cpL^^qP=zMDtd zBJ$~+&vaN|)#vedU)?6is$KEjBif7&m!u9J)=r!{yZjE%Fs-+Z2My1C?<}rzz00`#iZFsuF5ruTS**fzUGLO=Q$@j&fb>x&_d3N@HXDcL?~J9F!}ed zK;h*OCNLL&d$uHZUCm|mS6$Y-gZ+``BCo;ZnTy={eD9B};w18F@5qgH(VrC5RDM_! zS?>*4#_!zaZES%u;r6`H;-o@sscd#15h~2T5oZ$5<>+@B8JC~AkAKdRt3e;2z;8eo z$eG*&FF!Aa)kc%VtJ#ypr|#%2nj&^HMSNn$wH63W5$Jf4x-~^q|APT$<_JYh$mrM7 z{B&oLU%F;E8XTDULi^{b4wKTjedY_xRUsePiKi45fYEnk%@vXV(Of~KGdoojXsYPU zI`mMYn7q^cJlc(clA0|HJN;>zo!a^M-}BSmx!#5!l4E+R(DAQd)4tdUCKJ@cuD79| zzKE=%@XUNs!}?qkMuA__i>H*O|Gx9ZU#>Lsg=tc{iOC1E%hUX@)0dhb0+B7=nG`_#p{4gzG<^~BQ$S(4dMCOvR z%In^811qWW%}#DOHE0HB2}hcb_T_KEC-&uS-+{Mf^p{TT|6Ts3rDOHz3rE5B^4l-S z%b6BJ1!8ntyI|Bcr5^VT4w7)6M4dxFvRfGO0}}%B_a#kB#dPeBt<3mwD%*l*qJfF; z9OF4$ex~6Sh2-5l^&_1t`<_2?ehN{O$T3ikx;Bj{vsaoG6?Ed!AV>(T$U#LmOlr^v>@| z$>^@XkC^q3!Tapyw)2cpJWp4#sKTfo6P1 z-ct1v)_|92X*ZT%@&wwJ_@imr>ke-CatLa|;d^&kj|up=b6tyJXx!5telLHzl;5kK zuHm=l>C1Wd`NXZEvbkHid)3^%!rdNow>v(otZCv@-p1q4jeCa!_+E{-P~~UFxm0`H zGTEF)FT?0mbosI>ey>`_imMf)-`-HvsoWa;uDrC&Mov#tmQ9Ol2mVI4jcZm9;Sdp< zLjc#ZW(TDXjbCpji&`xx)iw{O23-nEQ^QOb73V)&mzV8t8Q(oRpt_7Ubs0TQbs2s6 zvI*44q^Ig=(y3{tj`&h+4fZubk*`;dX@0HxJ<0xo(QVX^JF^98EBoVED3^cYZTJ)T z7jJSRpBnmr4;=u7aV+nu(C1@e+EtX6>3Sy|jhV?jx7(>KO12PkY!S>)y)~s~YLTj*u>F4v@(n zq$DYp**3u{kNuz(Ss+qCl44ATlNP;nSKzB|~=7cM)4c9&Bhu(l!(VTy56`MZ>dWe&U1ivkgP?o;>G z@#$KMcx2det05-)<$1CGfA_~c_hAwBM_Lnm5dN_4bGAx_u~H^uKbHWBF{=^mVrG(_39|v}WZC8hcVaK`0X?V{S3zC* zmqGSC4Mo_&Q2UAd{5TlK5wd8c5#PAL^fn#3?W@SBQ!zTENJCxK>FM4mtLb8^E8e@eNQf%%-g1M+SoDWbJBr#^9om3EX;t+zq(U5saIDGk zwKsz+S)Em0^9GE;7_@W(7*S**ad!zM%ytM#&JpsH*#U;f>aW$uEKJ5`Kh9Mj=A0i5 zerVno7}obV%%kvE4Cl7#Xx8+T7^Rv3_E$cUWYgt4kt0*%?1wq#R0d%^fWoX{ z`%Wk+T1Ue_OCEzfOicsP5WO-SvmaYq(Ov`flAt2kC@?ZoZWjJ-R&}PAqhMyam)2c_ z^H1I58T1Bsf9q1eX!i{M(5Fnljh`;>_lv*86=1-&Al6tn^77~SrO0xoi@r*KC4GI@o(vEa zEB-#5CpK(~?hEbxWr#j*4)>C`3z9`zlAUgTlKHyy*Yd23v(Cy3$%GVN%6-S@{#7aP zIv8e(h2BSs)^Q6iku_dDkz-9`;f=odj=5678P(*+lbOk9mT}NU#OmOsde@Q+QCrfr zc5fu!=j)XA*RzlImu-SZ``ZMFw7;@8WNCjV54XZ=J=ad{?by0>U$eLGxgrb!Y~9Cfw3@ntc5|ty|ceXO8?H@pgAkc-k^Xh z8GaGRD)4{P-k?eOP)!f-{}@`yR@C{7{gPL5dCb(ebNvsQK_z*Jz6_-N=g!Q_2_4{# zZWYxip5CjLw#K|wF)3T!N z%WY2cr^uhH{G3Ip9pRTWC68W&Y_|q|&3q4tFJd`}T#Pq?oC4^kxg(-I=c6ogBKUH= zXXe-1%A(*EwUrlmg0=h>1V{2)7#!(`H#i;7`@<_YZeE#sRZoy0W%?pm_ zH#c-YUx$9p?)T32^Pz`W@)>;aT!7pbZpLT@I;5S}%n_I`+f1`}zAWVNtz*|D&f!Lt zc~d#)=Ec;Fw^7HFC7aOjm+}#2Y?;k=i8T@q83y!&*WD1UUK{Bjp5fW38 zv{r-;xj{dw?loB^bkU^Wgs+> zT$Ynlf0jwAU{&hGSc4{yOzYPp%^lTR^Vd~=&zsO}=%nQAF5|yOaND_Z z&b~A5=x+xPPnKuqA9ExgCWyB~y{ap>hnf;Pk}V6L<^)yuqdHI_pPQVV;Zd|c5vQQF zN*1}CYO97w%K_+)-2+e0%}IF3MpLO%r`8av+>SBc0Up*3BCG*w%)g#XW;)^u= zl6-MTy!5A@!g(ZSV2M&jfs zQe0FQJxh!9d;OE!g{j!w)H(#-f*a2qH}nd<_zHo)L?ad$F=zN$9K-ZlefI5CtGe zFInSfz&7# ze^}6t?mAhsa_bBl@KVMHx4Se4!_dVzxK~RD6y)`+(wQ82D9vp1#~$|$zj1@(97R=p zX7czhA5X<1vnK2NP!rBw{^k!o*o*s*j1296USw8(Cge?XdSa6aC|kt@aNmPik?Yhx z_=OHuR%gp`xz(+uM5*p<%jG(&nh|f*BR66AFj9aelEfF1l|dB3G+T<)ePZH1_jWOQ z>+Q%$k-fd4yCIQ_!0jrhoSk;7Haqk7c9606CT$jkHsCC)BHP0sd#e}wNC|jrTLa}( zc)ReR17znlH<2zu{51UDjTk=fd%C&4xL3n>VOo2yy<2`nTz^#0&y`L54A44s&e&b% z@+0gCJYCsgF4yHw-1=cz)ms^7fzd~i^6pnsUi(VQ^ZTSwHSK6otGtaNsvyaAk)w^6 z1E;svJkIc{hdHn=sca6pT9S>D4hN1j*`J&8Ip`$ImUBdw)^`ua@}zl`21KJGQ2BZ2 zRB<&}^i1mm2AHb7tnlXl!CAEp5Wgzm&gj(L5EDK+{ z;O`}`$bKwKUYz}SWb&NUqsCImL{#wU4hqnOz7}saRnTRKxu1{7YS6l5kL--ry07W2 z`RLcFkN(C-$jghNqU`-aPV_d*SLRv160v-x8H;AOD_2{-GN;g)^a46XSc|7^<$$KI zU9S`qDcgx2N)T+OpBE{}s3&rrA=NzJ&$C(Mu5-6PE!H-oz~Aoxr@6hGe_6IdP})En zo&?+KGPomOeUS8J@E_-l!smlm_}R~vi=u)7xEGn+eSkda6O^*7g4&C^8U2OB{s6n) zDaRL2D)mOhut&O@lt-~#37h8csaM%1O?TmT$S;iT2 zZgGfi>h#zr`T&BC)WS&>SxS6_6|ad^ub0Gbv(<=T!x<3Ez;Eo zh@lZ)Bh1n%m5C63f$X11Z+e2E8>_gtVGl}Re?(jzYj#(zUPxWA5bl`K4b6y-#rB@P z-RMjQmIe)3rS}0HP2EwN;{l?rKt_1*9B)+TkHV6T*;_8n;2KT$CvJeg$mmO#(ymRHB?NQ)LKpCe|XKQ=FB!m(P1Ev3FJ?euPadorrg?g@EyLka-a>xyrI( zQ+6o@gs->lDJm=#+C2BN-sP?PHUk0R!qWKGF>Sml^4yz>|NhE=cgUfzi+(Eo>G)=M z9z4x_*yW>tB{}kFn~ZWVNRd{ewPmr>ytA9LOx#O<>`^6}FOy~Gre+i)m-tkSysV2?%$mNsek?H z{(Zyt@Ac|m;{}%P-<^S2wffhe>ED2N1fdHqQ2+Wf{p-Kc#-xe&b^4@x#Sq4er9`(_WI+nKkfYk{dRD*9D-kJlenD`#w4b~T+;t11{ zYVYmM{>lz6oCwxj<-MJ#qr14Y4p?2=z`pEizO5?R>uns&P2|(?M}0&WEd3uo+QVNJH@V+PJEb~EPme7JWHW)_zf;E(Ph^1s+MYkwk)5ZKu%BdGy3Tg ze#bps#qZ@$Patf|^DPsve(-s%pQ@4*r{chwG|Q*c@U;x|C2-m7BRK^(?UG3fQC?oq zCog1y9od4??uAY#&0yanSK$0||3C@&oHeZw)zZiKH>zcp7&+z7!?>O8_}Zg*{jghbD6p*H4}qCy1~M8$&NP+RD21`mEiH<2y44d5%6{mKODsku$#DiAHqesz!;$l(>Z z7PWi!Jw z)B82L=%cGh2LW_>Nzn#Jx|qAU=LV{^EjPr9rx@UMLV zEs}IZ411}7tkUt{WE%gZFX|7#d%Bx0Fp`sXgq!dJ1n|7KK@O|p6BTuAdz!RKi>PJP zfj<90b3qsVkI`jzw6TGD+B82Re~P6{-11SujfAIbF_q&WQXZd+?}RHvX}u}D#BWS7 zHD&mJjrWwfQ_TyUM$~&a$G2>_a!H%{+;pcL0~9_$$!l;hz@cDcXr8jHi+rJFLPb&( zk-u^b1KiGAoMZVAzV2ob#@J=5cZaEv_Od`}3ECTr1mDCh!QxAL941JW(9@lKreRz? z+RI4VIu0=E!`Y4Fn@~(rKldC1(0{FQ!LzvYyrQuq1Mja!iKH@)jFELow72pzJWkvw z?%u$A<21<@8q8Udt&aS}_j+bsj-PXEf;y)YA>#2Z*-o%s+tY=<$KXYSvDH0?93)ixgvgKF#m~wd zi4`}qfPP1_MdLdnT6}^})l+(J{nM5Dw%HGbSi4y;EAPa@;jTJt~hQIrbP0?V44--c)>dAV*>Lix!Hc6TI z*097!t+2*@4yrXtnzT_U{fom-++Tht7kPyk?03mEKiYqu`v)mx5!HPuEy}Z{=HothAgu6g0Rz(C&EsXgZMBu>~!6)<36J=EOQqj zk|w_u4OMPGx#Nul*g*!AvWNt^l)OtH5nHLJePaWC6w%)u3lJZSY)5i{~Lx z=~KZR5{veT0Zd(8Tj5&JhA<03MC6>w4wDPFYIQDXX<)}I79Uqnvnntmq!B_`1{buRE z*!hOdkuiCL-H@hjlPjkGL}g0~Jliwle{?UAj{9qLUuN&8>VBZTpRW5Vdw+xO1Ll6Z zw!1l0d9>2Tm^66)jD@JqlVP*}MAB)hr0KuV<~Tju(x%_evu5>FiPLZ8xm3@#)af_# ztX@r(JpCq~hw9muK4X5e!A$S_<-hYbKFH9OIfqTad)b}&7ME~G07{y$RYoZ#VU z`%S}xsIBN6W57ZB&yV6u>O|QJIw;c5DDHl7p;6p@pY`986$cr`UB~Gt?mjtH^8h}u z{^+g$PBr?LT!wxFXQJQD!Dqr+AG#L$_4R4>+5ktshfv9~ZU-oGO1ag?G`?0fY;4YAdhsc<)@1Ojbuj z1>wEIl;GO@B-W$z+`JPU1S7W}qUP#2Gw{Nrt-1a*C zjlCSGgi7!w>u-!}uCZ9vldHGk7Udlq`WlxUx6IVcS`p5V^#5F@Czjglq`ss^pSYKt zVi!xqpcweM0wCB$(^@5KzoG~feMv)UJ=rl8VBli)qPnAH3Q94IZ=O5vY4{##C4B8| zcpo#Adn-94-r{b+JE?-_E=c|=d;OvL9m2bwiGD1)H^|MnMZ;N&sv+fSSWn8ayptIH zmFw`!rYg8(YxXNn9-5JF6)(VF`7b%qrs548L>-cDTAo!AQ^-8WX{|CUdRey_fgSFK zlZ6E0gGjh@WTP3bP&i|fy}HylWxv5aEoHy?6W?X$PY#F%p`MAKp}<~bvfvq)5=Dk! zeRu=#i4YH|@k9NogSSy$eF?3G3`%gSEXM9p5}fTWIh%_0gQ4ifWG8o?mI;|H}y65wMTLP zp6;Xhw`cNMWb^rr&F2qXmON`2-vP>Ji{M!tf2T|Q2vLLSBv#>b`zv?W=x(nSSxuO% zWH2+TBKtg1XWquiEEq8XxtCP$>R3LQ2lt17qP=GqQvL=Uf8)E0a$2iOs%Si9lmFQ{ zB()b+m9|z51e$x}s#1lyy=nr_dRZ?_t*T$FM<-Ta44jkv?c7wiE+|j}elX@sIm!T6VbU0nPc@Xj$Nj>Z(rW{; z-CK&LtJIajf2Ad2Y zUPJ{Pwo91oV~>>HPxi819BsUpx_h5Gcws@3tpsU~HGvM(1i;u!*^?<$Rd>itWv%k$ z=F~>7@YF^_g<3Dvq7y-GnWUd9lvA9)dR^%9sOLAmsC5H9ndC9cuAlQ=pdx0zPOdTi~bdJa0qtd0ilX8$Cn zHe1>CbglW8PNxqC9U>ULbXR4)VM-ZvVCKyuS#MZf5AtN*+@AG@iVW&V7v;{(def6P zJ2P*_XT3p&9`t4AO-jc!#N11z5Db- zlgIMDTZpK|=6&}Whb+-j#ruFcHXlWb8)GtUOY9Tf^U-`#v7FB>)m zd%71-vzTSsP103%wqI zPig|8?Gs&-8QLfu;`pv#vtzuE)FV;}7-7ykjR8uk;fA zUsjEvQGaSVsMd+4kD{uYe8zBHcxUMZJpy8`S}4bN_$*L-heE*iWJ*`1Vl2nB0YQ$c z_p+|3hR1uVX+)t&Bt5E#lk-ezJ(V)vpW7&IGY_@EqSHBVvJ-14y`Kct!_*Z{SOqnK zn^fglrB*Rx9!oD}fM`V(waR~tz1*s~a-ecK&eCb8%(&vppG*!qkx}@?++roJ)Ev3W zQvej}iXG+I0+J0t_>U8mztS-Pnh=C+2}+&76!AqSBWf!gVQLCD%Vwx_z-srR7SPIv zgSCKW{V1K~1kJxA@vna7rk6 z3aY(Sr!M9p)g29`f1q+ssv$kNry+mlvfqYinsob7NC4_76G48Pu35@bRbAtx+Z2GP z?k8)y=r0;tskQ|w)oJ&TLg?JmQ5q!t$dQrN9Nu-va?aAqTi+k7e@wb;sQhFa{(PUx z3lpt$9t)Ucujz-K7aL8Kxme*#(nqivB~Uts#XcdR=(p-;swmjR$b_TFA5_f5;X0Tr zv4oK~8aS$y8WXoY+~Vl$_68kGABbL^v(%@L@!g9K`pr-{S**w4qcsEDFJNNps6zE( zL<2z$*oMP&tjAOAS9hO+;Dkg&ig>sh>W0g$3HC{QNjgz>y4eHdx}1-;!TgdIxTvSDLeqOm4lY8B}9+)O+8i( zk`T)9K8xwW_A$sW5s{9Pcvr6D;X=La`d8k?S@x{-YIvvn{XQxc6}WPsv*8X}|0>CG zmdGB-UlUczWvg9#FYisIoSwLpiOrxiHt6T^G4xL(tQ_Zdj`;JNb76>~e!>_s^tjr*+ zr$yM&UxTmXkNuDEm5aKv{V?xMd5qSj!lLP~L37RnTels(e{7k7J5~92VhiF4ly+h( zGRmNDAA}vU+#;+k3tb3=s|ZHF0sNGHBlKpg)CzLuhi$ikO{SmK^~_6#osrZihJj3w-^8nEzS$I+SjM#O0W@Z-KavHIAv+K8QQcAg+f+Tu*Bc>EvH|45qK8 zul?bx3?)t$zTT<`Z`ENAb*w1s%1m!z|uOATw2nDz4@jdgGhpf6Y1(b zGkj|4|Fo&0j*V)Nvra!1E42Ij(ET5>upn^(iL&>T;Y`9~K$(OOabv3qqI#Jzav7}H z#t&#R`lx`nrXAXM*sIvC3c|#442pJ`(cvy?CK2tFUgiXkkeWN*Aqnjn1J`k9>a*cf z(z?MqvRVkRAGGV*MVWYk&ngoWMHsI}OLx|

%x;`c1AW^`p0Ob`sOvE&O06nTQ6%AENX|3b$LApFKoO56|I2!-lD< zmJLzA)1V5eM$^hDn9fLM8y)j8=p)7*!+e1*q%R;n(F#UvmNPS1PVAcHI^y;0ljUwI zVzRu$W;rmGW{$mYoiOec-O7%Bst)k{m3${ZsiIg?FIK>g%dT?Q>Qz`b^y+AYT| zwVm|`lH39sXed1KTg9D;4eO)S+o`B3w}~{><m-jIkZF>}H|>MJ-zmDO!rrgFckD&(*^zDt-Ng(Xkkker$R){?Um zOX&)YdbF5q(CeF8hkS|MbcO{tDI(~@MM$w(rBmG9^ek34Vit?!5^`=#9-TZSS%{D4 zJ{3*enaRk0+m!iMD#*kLeS3DYDtS@z!sHpr!L<0iJpAD4!Q^4vD9^K>bf~~^?@Ywed;=jYioE4zv&WYrrz9@x-L##-{eYkf^d6Bh+}=kEH8i_ z1F^qVkTv>IY5Owh&(^1XE&f{!4!ib@#eYl5*WtgVEQS9g_r-spg&hraz^$@22+8UJ zDVi&msu!R%)%lnMcXia@`fzs5!iUSKc>V0*n9+oI?^Cm@>n5GUVsj*G&2x+;Yeq4K zEVI(|Pa4^q1!h}d633f8x&v)?sauWqnzYtH3`))^p+`SdHhtvERlw{WMs@VolJ#I!Rs zM?=!DuI7~{kVtsFY7`sPjj}cR%hcL+em;y1+FmKEM2EYUMCcc44=w+Vdbz76V~=-+Hs!)oR+;my$N=wSEM zH>qeYTO+p1R64+ZG=-0>BsT#Bc7A!bz8nUO4#=X6JZ=f~?Vq=*Ud23+mknGeU|%Qj zYrkc}xsH9r9K16M?pMFu4Etd3fUTJpy4aSsyM;7HS5J4_iT#y_*>EQJ;KZf_w3|Fj z-=p(9ry%g%Q!jxpcmSJcrd+ChXHgQ|y`Yi3oJ6|z<8KHPZfGPDgDH3 zGVD0M)WG2$Uq63jeP(&orcyMGRcOM)y{(4UvBPqe z*rG&;vNtu_U;TRm)LH+?uJm_9Z3|py(ko76t%)Sryq=mN9W>=*HkSJn2$EM4>V>KU zNGhQxa`rj;B}LxYHrBVZ7r02k_1yukU}oWaS#lTOW^H}%w`|z8ttlHa=O7uVHMa=T zal-HIcADQiR1qt3a(uiSMRukZW1Pizl=^SiKbuSW3g@LyoZJ-zCFh$Bd;{{Tl-~p` zIwkgF4j=a6oc-b4{R{RH-oDYK%b+F$)a%L=%km1^pl~Ni_=e{mml%v{^{j2PV)kw*jB&N z!1ekQ?96>lwTlhAScNXh>stM4CElq-s=twOkWDX;yVEIE(<_j#CFuZuqPy9vQ^K!8 zLdoK+vqWM+%1B;aS%+Y9L^HbGAf6ZGY1(cf7wO@=oALa&zxOW zc(~qZy%^oWrfOL(jD%{yg^| z7HyNJ5FKvsVcu}GnDyIj9o=(kybo;lhG+4pwg|T@bz}KALvv?GJ&#d=LJjy)jjQJ# zH7})zBE1R~P-rMFz zaKKRb?E>{~H2>^lG;wJo!FGbY;5}qr;8v<&>Zi<{=ol8`Fe4{A*;kEmj-<+D@ERSF z8yj>Df?yyvY$+GMP7dtCzn91$4WTmuQPWQu)dNgPytz7;tye2?E=mSVmq3yh z3@g9eYB+3rN3gk?DB2b#^v~t>z_>{|lkHA%8MXHmRZY-Om3~V2$%$|IY&9XQZn18o ztdvS9BRtl*r@J^#s0N~Xds6MUREP>^T`K%(qccUx1W4yKUv@PlUXX1 zxqeiHQ2Te5KE`858|CQCEz`^$lTwie8;O6JIYOh?P9L(5-_SI57tksq!Sjr4O@%I5n0#~}9hTCtS4pBhus6Q;F9g5vn){LYCG#pqk`+-vAvCz68UNH>K; zZ1)@Q$3ALBS{{vMi|C=fJ3E$W!eV{LNccz#&ej!(ejV<);vNtRLFfq&hM&YvXF#*R z-)Jft-df-sxW;?u%E$2NyTuIU`s)wO2`P>ix$I%B=N?U&H2LRECFs7Hyz6)6GJwsF zTW=%tM^5Fq(n4>;6PdDG_0@+v%~uVtkZi_=&9R$Sz%RT1D=wfx)UciRLSz;F5^mat zPa_&eN^EPTaCQwel^-TfcMV zJFFX8ES9{#+p&{vwjU%e!|_62u_HqGEMMVmxPS#Fo5(z9`kpB6MzpWdB-9Uv2$9rv zoM2(3HFgfI^LTH%k&+X?;0ic1ID}HFts6tmP@JV|3llk zz(-YG`#*sMf(9oj)KpQ@8kHz2v`~qNCJ^9^On@R*tf&;IVnyM~1hDdS5@C8Ap+$?W zwrcCGTD?B%qxeoFApsQudHUi}l{1b!RR}2L|NZTKW-^Ji|J(oH<^yxi+3&sf+H0-7 z_S$Q=1dArB0gEQ({}wFEDcFKVM`-5G$nu-KRG>nKoNRhEpXS2Av=JR#WSYL7iJ_*4 zudr=P4rwD-bN6}RL|{^R8@VHmx#3xn#*Tri9mv|R0m+QSjrP&bf(iF%76HgNk$kr9 zX-?v18&B|$n}d5LuKZ?h!HHyNrIJnmQL}v2YUCyzZ$~b0`!wF%JlDyJHss(-XC3Au ztlP&P1*-G8oN-w;bgCEnendL#-YwUh zJ43Dsi$taA8y+_0pfmiv)XDcw_|_I?j#cznQ7@I7z?X3XRtEBZ(FJ~mTI;zE(&ADCtaD)&%Li@g3m zL%cg#mGBm{$f5Cjl1)+2G=c9=x3r6+f^-tRGmjuowYvmHPj650|e^N*eNSj z28H%|PsYhRIfFm8CI52jwSY0r!f=tD{AHlYeZI}z$LK1aHJu8z<;bGip69f!G`7Et zTTcqc35#wi{<0qeRiN^JBmhW|VcZnk?+(LdOGSrqCRj#ceq{w5J{;{%G$wPS75mx< zegw;3G#+XSwSCrfPENVh1i>XZe!VkZ4*4LMVB5>q0y&oZf>{FvGjVv-;;<^7ksqZZ8f;LB+qywAi(is!daA)wB&&|rDg-{;<+>Bvnx-rihJ#{L5XQB? zD@d|LcH1|Nz`J|`{sNWY=;b%>FWv{~;I_hrcg zeV!^gK6RMyN;AvqI3dbLjj=8*`vBg_o1o@I6Zk0!RBqv&BbjWcCA2b_*K`j7f!m(o z7n^UbMe#kB#br8gP343{C>zL_gvDEXJ;8}0>T3fey z_ebgSPHoMsysUM#HM1v&))nYu)VdOVjaes25jX!_sSsK(S#i|*65hwGA4~L{LS>mV zL0R-r79L~G{RP~`^=Pp8?eMAck73l($NIi^QOM;8Be{yC;uF2daRrx(?+~7RpF`}j zgk41QlPD|FCHT?BIszL+KQw51KA?0Eh|v(WLXIfLcQf}mbm`P$`4m@J^C{xfqRRYmg>Q=FL znoY$1q3#l)mZNLixmW44HwbT0DD5)YMQmdiR*{GMg-C!dtS&Xn)f>%np8B&w`;U}Q zMyBz(n(;}}sYE8{HwAe?=Nb2keVMYo>>hHyPRhjQ&v zTqjdk|0HLwC|;oQ4cLST(8S~v*1+HWxrUJz5FUS=6wT*TEq0hg#oK$WkbheTX(?hZ!-^&z!m= za~zh!EP;rE;xc50puSwFf|4}#gySqt?WeMQnwq4g!N`RlGOMmep3@j9TQHjSp>^{Y zsIqrE_=}durYaHZ$8K|}j(#2R8o+)tS)1Of*e#@)A=E7xw|8UCt)(4g!tu#*Mq>6& zI0$ZNHsR?1PNA{g7`ICe^RboQXm)U`K%jEHUTO|P$Wzzt4ZuSbmm;$I@7&-NuMb3G z>UwO{TgD9wQC+J?B$>v_1$+{Qk$mE${j^+B_!aa5C*G<|}Q1{ll z@Sc*ox^*-6jAg7%^0aGoj?7W=!|Ss4;Os3#@}28Ok^JLzWA;Gbmvd@hU&Zf;swQZq zo_LBYuE}JrTvK;&O2aO_)NVUC8z*}l+g|}kP3Je9@InQ08M>Rp{Q1pkc@HDg))#?7^_VM1)k@?M$~{v*lb{S{JU#M=1#pPQ;#DZj${)Z3e| z%7MRQrwPyb>oMXvIj^3bR=|ZS;CdB+AeRxpmjdv?7>r%GUwuPa+qsT#kZWe)L`zQI zrzxT0ukgqpscUrR9hTg2b2rgqY06`vwy@Aufryw{Q1@6n#mlDNDniQD?XpiZ$4Kyp zPCxk&ONHl)jC~^No<}g$>ALo9G399z>o+@xXEIeKE}Ll6Fy=e-Hj8aU|G*!^F32Lp z7`w@Ps<)u);sL~n3nx$n>yqG8tOOo7nn$S`@MKG$>@^Eooi ztWH_`CAEb1Bg4$^zbi~i%XWZT+Iln9` zJf-fd<2#Nx2C8(B!+i&@klF}cS?xb=$*#dfr~eIXLO8Kg=IRms&|-8=9Gr~x|CJ3h zZWHoY_y1MFx@d|nxi{uGv9eQG7yTkHkQwxA@`RBj(bUW@%#}0B9wX1ac@TM z6_d)`%Sm3x9e}gbpOy%G0$V6^Q^+0Zgfdr$+)GA;G8<5AgxmpWrG7aUpAj7Hxf!R~ zAjKt9f|;v=xNvj_%-|8Y69h8oF_xBo-A%0zqdJfEdEUG;KeRfuLRpB)8X5Hv-+RpS zE+goN5Cc5Ez17~KrrMep&sxQV7jT&B%jA&H9)3Ng#1y#&5In4e+ul8^u9*&?5+&kJ zQ^HXqDon)ovua!C7-J$nJ*&QT#6VNWr)SyKPwUvz{n)a?iLIlKNo5pdTgQI3Lrqt) z>T>JYRjF9#D0!bs#R`0_^Zq6kD!wAkY-yNTd??R*d&?nxv>AgtrxM0Mq zaR+pZZOldUL_PtlfOotI*@xV}q@v6=#0%Tya<9u{`NsS%ms#|}SQ-_lfygM0IHH4X>3BJ|@o1{)M>B#1!z0;ERBiO>C*C&(}MN2!Uz^gL*cYW?B zVG=0T_UMjlzX6m-DZv1x#B`y{wzd{1+X#>Uik8#64-{s4_a2Tn=$+TW^zJ$;cwrxW zKHeYXGVPFX@RyHGQw+nFifX?$zG8irr;U+q&f%Ybqd6xfoAb1(v&%@oIqJXdsMD(S zdu-`Fr498lz?OdhH%osRbT*y6)Rz858{6402v7XLmhXLJTOaGQl<{Zl(YswCZl7*8 zPqm+?+Dwh?8hskrytdWA=1q2{FkW5I5!l?upeG7};Jv9-zq6_SVpEM%s!xF+!yW+d zy+f)C_c15Jya456e8yK%O42mUqPP?D*L%h+Z5l(5gIno!xwJYOZtoH1TRF>$RJi^z8u%8Let&E) z9PILp9y0fn&x&Gbls5q)vXl}t;usLIR5fOw^ymG5r1fV({H2ya(GbPQrG=#f=9*M^ zqQEbZW{du{M2;)KAZ(y~@()N&sk#I{!NsnoA!*&M>?Bbtm?}Hj^U8J#mvdAI62iW_RZDRdn?#lPJOO*G4Kz6tXWEa%cl^Q_JShQ_3)WPz8i+;Iskn>0g#asY zeyc3#QKT)Wakx@LsszC~MP}8?cy4fFf-d#`!zE>1hyBG7XeBjzE=>r;iHC z(4q2<8ZH9o(jzbLc43Oe#Xx0)9CYn8n9+mkei_i3fK!{{)r(n_t%Fc`c4Mf4yU z-iu-j2m%Up+!AkpWmW;N*@3FwQuzDHwIkn?^@7dwBf2^7WfSnTN!MK$s5-M%(j1Dm zNslEdX9yI)CP8kibn9q=P4^E$Catp7shAbSl+i$i;Me7>YLo-YSGP|Nbgo8to`opH zjzz5ktz(vs77j}=4L}UtNE==wvc!vR6Y|w@@-L$V!p(hF_imwHlv|q^MuqB-KLl8g z4m<@3G=b>mb+DXp>JYlq2;vZbdcRCkleiqtAl5*q)k0`MTs%i)XgdV5JcOMo;aN%3{bct~)Bc(3|tx)27 zcLOj~G~G#rJ{8SjUvfxZlQoGGsM4m3-8U-H!>&R|CYnqGQN4o_Hs%pR?3>n_HdA++ z_W)n|vvGaNm^kLzMcAoSwMAQN23bd2Q1OrQ_Co`jL!K>sh)Ai_L|PvN#UKLR*t{VhEqU3*e)mv>!nrQq( z?2)OcxJwNBv%74i?6{2)@teWMcV-1PySBmw?GuAq=$EhrI$8}ezyH9&0OqOI3jG+?hkDY8o=J|Bk@yPbBQb`o+5hGAf3CUJyW#LX9))ko(Q^$M*iVmV6W?TNR9aIX}7(ZmP1v6^}`?=%_M&Bwg z`Z#dgN+B(<9|6t^fF(RbZ71OBEBd$@UED1DSfZ%p(1IoJ2*vyyDpjbER4B+fVWFHg zSL9(-mRkslbV=H$$aDB7y4rmEdlofp#!9o)S3#=Bz%5pWIs&X8E!!%5k3D2suFU{t z!F#lCQpPr&MSEJ+-?U2m4XGxf2@>O^Jzv}Q$jI$GDSf-zgF1h^H?l>0%r-WzSL_IC zhp~8b;I?O^SH~mfSfMC!`9Q=HIMI>O-kp#&kdY&CIqc3?7K&uZk|VNM-=V@$sEh+u zZ}Op8D}vyRzoYP3JweT$7#~lZ^CaaQ`&&9Q0wDdTdBc zinOKocqtV$_t?%?u1qA99`DesOl7;n1Pj+yoO*no>G8?-Ao%x1;8XUoz21x8E!fNE z{X>mNVw3H*idLE5q=?J5$z?_bZtW!M?B$p~Izs7$Px`a2wZ^N#z{?J|?_J9x9qMvh z#JGL4bNT?)J1jB@DVUYwB+7+Don13T`nU|u@3_$%#le`Nv95ERntxb(2}T2n(;YVxgS%5pSW$y19cd-~ulfNN zPw2N##RC*K{m~>-VkwfL_bwY=`$2aAmx*uUf`$t@e3l7GPYpw|m;d`IIastSmF=5T z@}GGCy;@iUo%(;6gE<%+rI4|C4xacG$oThnS|a0hqB(AU3hYVRWAN=4v?0=u?LIlt z)*>|4=bb|ovR2*=k}3VZi4#DjW)?!FELNK{%15J8KYaIJf#hEpJzui^{~0Dd>4AZX zw)+JW)QoiJ7VVS`@V1+cz++pqrFpD;8JWrK*M?BdC;DjUx%alpiV0%Q{gX^I-;gA2Z*6Cn>g| zj>xC^>pk_53~epnf=mZ0PX|2B9>l(qTjK7aq68QE1dHCEcCBW@BV@!LAXL0H5RrAE zjyt`8CP9C592o&@4ct}HR>gfPk;Nkw5w$2|A`!i0eC z#E0@RG*;G!^Zy@m@k*nx_#TqQXN!!w*=OLgK&m;(jtsz=2aJC1XZtf*#dr^b6wqK(J*Q=}A3_&g-ks~vt6grJ8BC?w zX={kuG%J+mhc?aWsWiDZ&ChL`|0WGazmNgZ%{cu*v9~iAn|TU)X?1bJYE%}q4?UX& zskw^=u}Ek%pB`wx5akK_N&}2tq0GnaU?c8m2qf@|7u(|uzO<9XH1FpRgJwcZ?`(EM zC5JykJNPScT;C|=DFP76%?c1i`W>Ihp!e|*1A2jH^pdvGHbOF`vFrCnW}-^MF?Cj< z*Th+GyMAW_BS(p}T>XG*L?#-|)A~g4FTcL&Z4ni_l=q!oOYD4rlugwDOD~VKt-xM9 z`;E_pBt2g{mmqIHFknf}T;V`p`lzx5c^62kIxFw{O9MlxtNoZw6rox@Jx$DCOpMkb z1TV0`fyzZ($?UmA#c$`$)+e3l41z+@&&`xPjd>Mf&&9o!ol36NrnvXL%Sn(}PouwM z8g1qSh}Wm#3xiuf{@mc!uc4?d=0l5~z=J6|t{-0)brIRX%oBK6UDOB)NSxuv789H} z*8eE6NxglhqVA-7h0y?hr@WjH7~4vK5jIKBq- zyr4UbM3uSb8x%sJZvKR2sAe;~``(t!h+HDCgUHj_l>>tu{Kz9Q32+h+~418*;Q>uSG(kH7_hCPR`S%lfw zLq>rFg)F3^b_|FS+>XAZ*8VMZ^niE2ib>y5TeVQ&X?Dx|#Djc;0n-im(^F{|cbgVYVXFQhHYX%}EJcq@?Dy_X5Av)XZ7zmwJ?$(>twHrB9Zs zq`~5^>GN9CsU~kF&v+ZmP|SQ$>zA4si@!sThQCWlkXWBezaTCBjYms=ok{QG-Z6U( znoiooBf1$H`Y32*Pa|DJIxPWXKL3NRSjp`}@1d1|J|%TDy-1fbyub~BCWaKQ{h8V80Sk?O-(&$ISSr~2qY8i)`SP82?g}*kEdmO=!1EdcKX%YcqXxz!#J(UQ~GJxxqC(gYUvAVb2$eKe#kUm0gKkr#K zIs(r(fK_&BdzV@gSvCduOM1FYpHS?c)S|N;CGjoRJxe=t_^QuEJYWkz?+qbEtpBdI zOpjQfeu|F1OZYQ3yoPY^0NoGS(KGae7K|HCF(7e?oWU$YRS zh&+n$zOs35QQjwQ-m$5?SKGWd*t{<@q%Xv5-gOM6_nOVyUwNn6yoIT}-?w=?BT!Rs zMW(+kOTG6fZ^Y(ZFM0cXy+@a^e-Xby1PT2CbE)+}ALBKFB#E{}ix2MOBaW{aC6AZd zC*Y13`a!bXh#OKx{2+al0qpoL{S4910R3niH7=fP?@3Em%hIr_1{CB}3qSe#`8}VB zj!Z;BmOo&e(zebr$FVqN}!q&qm$QR(i$rl zCY6iz;gh^W04s4qVx+n~O37!XC2tM03Emq@eidc@Br(dbx;6is5og((q1%~&dn+bb zg|D@YxFkvAPNY*JV6Iv>PD$c_BG}Iad$V#?1(NtTm~X{E1-7cU!f9PwMS0>f?<66^ zH;V{!Wr82ms+{mr?~?}QY1#DfG2Yt*TYP~JI6AXdqEtT9c$(PMmefZoI|dE${%VMh zCFNenN%SygWfw1l58b~vL$who7|bQnvT|ci$88^;@~-(*_)NZ z?Rd;bnvpGbFNcGSaP*t>h;FgFm$!n}-e2DEcQ3ExuK5=Fm`lA2QU%Yo1wZ|dZxpXE-nWvy? zlN4$TJ(Zj$GHh1{cbFz+_i3G&!D}Rw_c%jD!IF(J@cL$@!X4?!^{1|Or(|z>joSNX zi^vg!!S3RGP&ZhFmym53^tN%L8yqbBYmRW)-gEg3y%UY)n8$N@dKA4LMltt{@=9{O z%naH~W)`@{8;*{?6mbyiV4I=Q+%ty9CY+HIEJBGpsW30xg`Aox?8VF9Nj{m&GrxA+ zh;a(yx|&*CH`6sVeqydOz9h#P|Cp#mFq&IfN&z{+NNuh;!iEJ9Abx3+rlR)uy#Fz} zb#Vgvy-1zb1`Y4`=$$|2&jUPgC-7N;=1?^s%}Yr^$}T0JHJd=S1ybht6XnH6Fz#Un z6azils!EOL5jJBu(ZZo!uiIW?XvdUL^nnIbaJ}~xI|$}LZ#^0WvdM3Y$)O&0U4HX<0`xhfX%5##lz)B-V;iRIC4<$)*P3Pmw0 zg$cnRocZykfoOkt4n-h?Nuu$A;NucARi$F3{-=h@eDN zm77Ht_<%VrVz(fZS{IIw=p+tNR{z2@`qS&SbsARm`FnV@)iO4N8))ET5E&qCJz3JQ zFMx{xS70qhD@=XC9&T9M6f2j0^N+%=Ww)d zF0-IwhZB27FU{~Gfe3_vhr0kT`gpEt@GWYV@`^=GFDbm-K9k>VLHKPJtI*JUlJFY@ zceB@!I!X9#kv^0l{DuLTMdjjGRe$z{trIhz9%XvZ0FZRT-?~x}y$49BKXpnh#hd*S z#K?WT&`@&M_;E^34s(X9jZSz~RiaBN2P;MB%YmWDztn%1kycy+LD4BXDAF$dV9}~+ zCx@aRhhl%W%wUI?|0nhN!a`26CXSz)cS$VwHQmM0-if+~$>;`G?HM2PP6f!r-3y<9 zhMQUFMoP?N)Oj-6(io^Zm9`9xO?rZ(icR4zoW8=QSS{G)ZGZVt(3%F>CqotWD$=P7cs*~L0kaKV*LT4EEl;+d$f@h7co=Q=OUlKsPS#V zMT|rR|J6*%^VWV!0?I^5VExS^%0Hp~rcBRqUBgCtfyw1s;p4D^)jFC5(QVcmdBLdX z{n{^Jgvw24NfT8HB~OG6TOJZFbjtBNk>nv0xO_G6B!xk@Kjl2GCUibUiW9uo7Xk|- zG24L8_#EU5PM{dB!wfAE)z63hlL31+%2l?9QrVVMtYIKBVmOEU*Tg;q8J|b|;wG^R zlZ2M<^Nvvx%RYt^m7Jg{_HoP|mVKBBkj_5-iy;ESN%o;p5p1S7Z70!nri3aL9aBzk z*&5Z}V3ze{uIING>W?z;{c&SY|)+YSE5-ucRg5A{Y^HQnKv8gVwslKOF_bb&Ur0S4L z)!wG+Y*P(Xs&|!2cFX@@=fiTiZO@pd?Rw8NEmNtUClws-a;Ba)U@83UJBiMY<#L8F zALG6MLXyh?U<25b2+;!dZ_M>;7vXOW(JLR$Ioc>zXX2szk!vzCp2=jRC{mk`Q#G?_ zE&MyV+>V#gnvmPeoMQH!YyM>`>k#bRIB_ZGnr}eTy!|vN@v1rRYzx2tWwl*}52bK+ zJ}4TWmL;r@BcJ6B!b?uHA-V>}>2UcUpq7@%W+qd#Tpzbs?~SI&GA7y>!%2~C0(Vu*O~KtYeH#K*Ujxjt=o`kol^d16 zHu^B#9wujl?0mJsSOyu9`EN-|5N;?ER_5N6>kjMe&dx83diXKeTNV^{RS5Vs#a3FdsA2U>MXW12XnI?p6JT-%3XQ9PS7jY;_*7x z9Bgt2BNtj~7=$g3)x3bN6a?Yb6Ae?qo&z4ELkjOZG(s$C5{-vgC0%yHZwS zfVQ={U6Pw!9%1#@vtZ^)-kqbq#vCBr$$QScROGM_9BoQPaoV8L$B!xO32V?YafT#n zCw!A;FRRlXuO6=iY+v>L*jJ0D=;-a*Ob$GW9#57(Ad+|2^Mu6utdOP=@!;-8Q2|cn z1>Narf02;)c|W^Ad%Lw9PnVQjfGPdOvJWFwckQ7eY;X2w19 z!txQJVpdy(1uCoCX$~T}_Bka1xdSnj7Mj>zSrJ#H-Pa%7YlfMkcKeslDgI{(DKtaP zGc`k%&&*JR+@1qVbX|>e)ruKh+t&@JU zo2Rw?;#kcJMaHyxcYE8K8ihBe@okj3(yxhSoYgx(Bum&wNnt4cnX4?i@d5d|Z!r3{ zuJ;hBa$>n>AsxPH$du~wNrVWS=k`b&jCDf%?XieBtu)0@E8)-#Bna=$ zeU=u;2nNV&&fi*qoG4CA+cq9e*3|K0`W{AEfd5oNje`{r1UUaz$;v>|Kh zQrJ;CHAJye?Gici|q1=z_r-bm#Pfo?7xG&~K#>mSf%^BP2s^I5~`3 zCW*Zo0Wk1gMe258Iw}0}6XI{S@!-7WC0?fC zeg1@9uO!1;6!Bm>9bBRK^HTBZi%`Yuaei7mo=~D=Qi;4Li__qAuOfGvJ$-`^DO`D! zjRr^oSYXfXv_OSBOk^FAkNYa~G;Eqoq=m5XC?^!f@eIUlq7QFT1-zO{vAu}43T55` z?kTWs`8zGTk`^_CZKsU3!~lg-Uw%w{zf}CuhT#wAq@`b>^vBxtIKN)&8g8d)e2^f1Yu=5k|%0K>u6 zkHk;wy*qaake8R3Wc$7I@Ny_79qaRypWsH5U^}h7+e%2dti!$I zuK~4UY->|FL15EpZ`Dk~C|O}GRIh#JsdcHfb6>@?Qnq=Uyx-3IstJ-irmYc3KZqKi z>cR2t5pc`h3+r`$f!VXmJK_qRvB;^DZfNe(?)+k{!0bcg5ON<;sN>G{l}e$axK%3c zms06sc$N1S1~9sMH8J#`rY_z)b%+=WSHzOxzGQ~-srf@=q2~}&8VuTpk7EZz6(zFG zH%;{}{*3Xg`2=m0_MLlmheKFv^BKTTnJ!k?kukMx;^=v#GAMsrD+> zM5U7A;;dAv?lx5)n`*OCg-L~A{Kv;o7DOL5#(R|jA)~cEU|+gQV83@Rc6Z-Y2~3n~ z_Ft<6Qs);+FSi;G^kq8TX)m(f_=v!~tX}c}BP&%r-{Q*(C|%026+{EQdyg9*He0+( zviKT@(yL3aS!M|M7Vk!awO+mKasPa&5i6E8pPy)jK8Zjw zNRl>3P`}U-R0@3dzeVcU64D|x(wKMiX;Pi z${+H=u8H+j7z_+tZrn&0+vOuY^p8BqTk6fDED(Y#vWZ8PB@KYkzk_mO9iSq}*hmtjMIkY$F& z4C7D8Y6jc;Y@QuoMqD}wv$fJ!v5?yWkxOeX6JPhW3=~VCSo5W}a?YM#phu0@_WJ6# z-hA^-l!tR^MTR>hMFC^&9CDPYZvQ*iert=?7VR!nMWG@&D{XuI z3RS{^gisAz7NQ>nCfDtIl?_xQBqHQde09~RBp;FnrE z@0WRH`31EP{<2fnZ-%>N`Q3*_%Qob+h;;N7agO8QV0!85Yas)JjIi@K|n?O}7nP(9nif)7~7~ z%RMU1Rp!|C9&gHv)G*oBRBV(<5b#40<}MUOL+(5?BJK+oZ1oNIJqO z0BXSYc4S90*dC=7X<%zPADZE3ivOqZON%`k{L-S22ESDF{{ntZ{|tURTEZ_?X4yC4 zw=o4j@J})Z$Y=Q--xiDp`;3J~3j6lX#@YgTCO7K1Sq)safW6aF%q=m`O5wZiOM#Wa zCTq8Sb=K`ti{m;w`vW}L)P2>dz4!I=^1t-CY^OU7%n8PBm&QC8yK|9w{8mCiF!sD8 z4}8?!v|K+cg$>HbO0xV?w_ zUXgvh>O9!?z8st@->c(_dYu z^M*-`Gcsk!tF1|#b-Q3-{frZoZ-NAmR1l8PNF~^$g1B(=RrQT7sxPIHrZ&c8A0MJ$ zmAX$nMOfr;n?NX`Q?lioLhk9Oy{x_H-i6jXR5>@|X)Bf`%PaVV!}Z@rJ)-+)BF)00 z-TjlCg57Y|79}&G`Ix=GAuo@8@1(zpt5?@0u6Tz6p0+v5q6eR9>y09t$?@Jv#JIn#S4Q}<-1w_+`-I%2DeZYI;G&0QGS(A@SMv#HZlYyo-bs`xUJ z8T&`BGOK@91m5h~_yR?%%f7DEup9a!`Ke0x!AJ9(sayHxQBubE~Z z9Z$SNCU9u{ypO5PE9r+nQMb>pUxdxYI^Oc%;&EdNOUy0T@E5jav_Cb9f?^#XrAMVp zQP@hAW31BtL=lo=@p$YeK!*3#@5NM&p@xj+-u#0;FD$fX z8?Tx#_J~6WVQ8{tWcXLYyzC1MCY!hdy!LgoHmV(&jcmF3K_Tx@K?CZN) zK9~y|A^fv>sHh(xO#72b- z#46}S=~7T2MfuN$(R|xDb_enY5PK7WJcVPQ|K>jdv7N^Ov3n>wZB||nHaAoLE6rDm z@`w2JPW*v^nD>-{*y?+a3bF4GH4qy{CtBjzLD2NuhS;^nIY^)1%LXbbnbrLIKLN3q zIs&n4DLM^e=bK5I#IYiO%KA8V5})3eJq^U3M>U8%_Q&5J6=HjW24aPDq9u;igQnj$ z#7ZEP;MhbNb|oRUaM6DPV$ob6)|;Z!AeL<=X%b=wd<66%=JDyhbDn`%qeASCAqHZb*pY5IE5{p({Ov;Q3sy)#EQ`ug5Ig(M z{{+NNKL&`cjK}PoN8F39(+_ z@V7lHSFv>f#MZLykb>C3JN^?8`#cAT{gI;6aBQ+cxg^AX<|CjFv5|awf9PQ#w(GA3 zVjJ#0D#W@EG7zhz6D{SjCeZZ%A7UZwjXAP}n&$S$q*IRd`7?$BDR8ZH{}bS=a7Li=8>_lSKj)%$e&-be2@*!Lp6Ut`}-(fg(Ly_4RD*!OI`_p|Q@ zS@3!1+4r4#Kh3^x)BB0M$1^l`c9%gI7N|ObfVlbj*idq_0b!$LJQX)TD_ci>peXb6 zWb3Hc6=i@2@0{vn-B3z#2&t3^Pd@@94+;7@k}GCDovF#atPgVRG23PI5f{QPn@ zE9Cp>A@vGD0oR=7IYPX9ekGV$cm}E-Cn&C;=5%Py!pSt+*BZgTR-lSw(@k;xr0cVn zR;zz)%_bBY&x{uP!H*DUcUU{Mcj||rEhMcwfJ;T%I8lt}1C?vU5iu;Cs~Bu8XN24- zjdhJhoX6FM>$#U?cc2pGGumag^vpre#JP*CLeia>6Y4iHFHouLp~|C| zVUTte2X;zw@;HN)XL8;~&QiBs%sCu6m>HNq4RNpBJu&EBlj9ychU=o? z)3SQ4O}tt>EH|+D^~CdLJE(boBhuIwBmJJ-2swa3j@+7so$YAe8 zL)+UznS1bTwawghb2pVF?Y%>&gk{B5vm?t}vOqJeF62r}Yz^lUHwn@uPBg19SX9@l z0slnjrSXDAj;@rk5L%^>(A)^60+AWL!tpe3EsE zJaMVUVbM{2GA@aZ>BA|lowGR+f;r7QoFLYw|4>5>-VM1u@E}^3$MFYq#?J&)O?&3iuJ}?t1}AdG2K;-<901n zRrSWN+Nanmcw3nhAr2jZ7&yf-1xS^?g^A;pnMyXv@}NWr>e#9{<1_(9v^JsR|3zJ3 zV5ItRq!A31Fo@}k`*Pu2ppZDj?nee=)7W{aTpP}17cBaEKROJwlm-Y!E8WB6JyEOud|%qV&S7FiVm{+Fu+6D&7y zhu3E6594pDd=&g{Fo!dktImUc$owN%v!kuaDIf01 zhq<{c4T+psR|aJ>yVw_|a4Fyf25r_swZMSQ@=sJ5r4H_I6W`15Hqj==i!)fMaR-Z!1S-qnLLs@<0wj3} zHbI%k?Ss+D1Z;BLD;dCLknhn*ccMjG$YTl&ROXVgT&@6aB2C@bp?~0^^KNKt%;_1!qh+UO42@1Y z9moTc!ou`HnX5TOk5hxPJ0eD<9j7QLz@6)u^yIVApB2jF__)Iy25j*yu%sk(b()ye z*cX|sV>s*1+?yCG{=8xsh$4cp#kBg;5j z{kfbI^i%rX-X)|@cp8_n)KKxq#6ut+eQtq!frWFK{qZP27_MyjBSNb=<^91-tr)s| z`j9b8d;M`t$>OMP*w?!hg!C?&M_Xe3e=-Fjao=3?eu>^cvF{h@{ayQh4tc#*_WcZn zzs|d?xCB2{CU3C6r&IA)g?*SKPS(#@{ak6%ArPoXlZ<0-1rJLPzAQ04{c~_)V*2O# zi7Rw(2etW=z?jLLcbl3#lOqdtCyFdskXLjw!UuIp#!I!XqDbTRnYw*I-Ijo&_PpX!Bv&HJtMK9fXkkFN`V$I3)&qB)@!;%x?39AgfK)$Cm0!HYeQ z!OLBR57-dv5}uHU3af31JsCp?4peN&%l7!xQpq+Vt|A)TyH;sTS@pqD#4?=>5dr3> zL_KCsLt6BP93!(Ks6}2c-x=ge`qW>%JrH?=0!z8~A~rcw_PQDLS}@i<1Y&)8a|B*& zMF9omSO&r|GMDLG^YdgK6#d$X4Wu$i?*vVvZS3!xTM)zA*xiDi-TyF+b&W6+i;$st zg8+DMQe{@;V_}CUZaYBHfxH~6Bln3@4eh7??OwI&H#tRkhUP5K%7KC(@cIf1Az<@r zH3F$ggcO?n@x4jR|Hmx`^VdIVF#mlXFPu-Cjj82psZ_7nR8QJeYnAE(QZYY%2yS_& z&Q?tvvpe7|Q)8Iik1-w7?5A4Dqxm$)aoah?gAqL~cF5Ai&0z}mFsD(WC$B1k3jRg~ zLHAtFz~?wco*Y}>94bW>3B?Ab^Fp!H_y0}i0!^Mbld5Agvusw}nhbD5|3i%2n@v(F zO?7Dnaq=9g?dIj7*sXb?Xg|%5Jnz3L!VImjT;`Ccip%(eltY)y*(1hD@zH^h$o=60 z<2B)!!1EwfDSq-nyRdfqnHij?5M=z+$+2TV1cB6mT&1;lUpgUs0oqyuex<=H@>zG^ z=QI2BI>|~s^d8smuu(+?9 zAwKO6#bB(+89%T@mITpE$GxF%uy|)6azCX*{7VCOEf1fn?BDgX51vJDNUxOS;g$xc zSj^w!dd|WScUk9D^mbfOouQN^0ZlDXQ=OufxcjI%1bTAB64N7$G}WBJAd#}Vw-z|= z)b5UZY0t8vot(BRje3edBNMewCY|iH6AREl)j`UNYn`^pab>K)&=^w*hbw2zn#1_A z+0_BXJk1ay_zuZ#81Y7)jt!LRyKw*a}PRA^~Rnu7-?*O^Us>$?YO-goy^``5EyRc>e28)01$5E*3s}mS`cpJ z!qM=-e7zOujqZjUyX%ckhZ}q9t*74l>P_CjkpvG_JPI($ByO8287WX%2$N+*0+lC= z+uA{W>Q)xg$MRho-BTLb%3^+FsEAv7R;1xW?Sr!?XRXk0Xa#0?vnP*QQKH9~6=Q23 zoV6~iq4vRwb)g2LZd^C2fvBIa8`D74>^Uaa9Gh#7%{7NyL2Ot;#cx;qrCj)=3%%pR z85pgC#Vw3w=ig}CA>NtijnBXkBoayl(2EU6X`#438-nW@7YAE zWO>=%FH{(WWh4aV+C}mo=C!}Y1YH>)MqEq&<8XVoI|tZJ$paB1GYT>*vg2nOMQlm; zrmZ%A{R}_MwdX%9OIe3l;tfa|Vt(Xw{-Tu{nRzzNw z6YE@pvEE`%V1doC!C&w)YH?Ow*I{2)CQQJ z*5;}^(-YjqUYKjwX9ScqIF!J`qpY)pNfVm znb^GeEoLl^0`ay^%;B*3N@vZdPHaY#vu3vwSwYKbPbiiZ92U!pgkrfPv3zxd(Y1-w zWlvll{gPuSH*(Owt6LVG;AZJay~~2(Q^rd>)hcO^S2oeYsth<%8gu@ zloxsoVZO&SK*8JeS5Q49mL}~#A3<^m7H_CHp%k4=Y4rM>a<^xBw3nptyz=P#!J5ws z%8NGy?koccwKccg)EfTfv70zDv-4tVJ+~>i=5VskQI84n)&y=pmW)oU$7A$s8eUlj zL_^O2*>cnaxzEs&^5XS#UW&&V6+F*(oOLE-0G|(IO0P0je9IPr)FY!=i=#6^WFWN| zZhJ6y_GZU5=sg%GVW8ZwFNA=@zD5P~g%(9cn;lM@)-<(oUQEssj`1BL>vw0~pD`y6uK8}gkEQy%j@))Lk#xcef#iUYfJXmXIAtM#fES$z`OX8*v$4voHb3N z+TCC+BUzXpp9#8*9PEZ7Ic3q%OVkMBM6+I!GNWd9fjmUq`2`TFJ9yLEq1XUaE;Ye5 z@!_$~LDg=jIFMdLl3}sViv_Yl?oTO;E|j~+qa|4!imrqcWlpcgZ2VEurTW`KerQdv zJZOzM1#%<=y>cwQa$3_XooR1PubxoONCKQjO#2t~Dj3WCt)}oWL$LHV>bGeW>wDl3 z6l+P6Vx5?{tkwJ{LBq5QN)h@XV0gW2=ZKJ%Lc&U;gPGuiSW&cthUJwSB1T+LR4kmt ze^sFT!$iQykA4V>1qCaO4g*g~eHLcg{?O#k|x?} zXd3=_Ob1f=)#bsUPb&h~BL3f@Z<83@Bz>bhqHj>MZxXmXL*PX7qIvP@nxicz9wNFU zaVUK@Cfc;{H%7rogL@o2MFDe8jx^>6Vl0f2rW6C!h`a{F(@J|!F#2Aw=BqZ%T#F-r zn}V^SO~Kf}BQURv%VTA2kCaDW53V^Bj5I*Hvakuwogbo-Mm|P{Vx8x6Pu6z>cPy4j zS`#Nn_*mvI;@E-2xhZ_+HUJUV!uioj^9{wA5BO=0VGzX|1CbsyNkAVWG#ipN?Gy9C zJUNql1cTA`sNqVZLvkPrhF+WU;Hp7))O>ixLQ2b;??kf}%12-Em$SGkV@wBpB2MbS zwbEHv)yv7%F{!IlA_DcI7cTq>}Sxj?%B zb!OmBd0J=2zts|7g0ZWRZZ;!N`()s0%>6WTV0&}J=xGywsqd0BMQ5}L~l&RW9# zRJvtsdNqoU_$F{fK-tqg9JS+K+FuQQY8!;51*0pVFXge(fHz@S8dagzqv*~-^sYG|7d^-tTu;9)?K5!QWeQr=5nPXMkQhSWh-!?Yv}o?7}&0g$#(6 zy+)C3E{SHnhA$Gaub1P8H0c;vx$}t}Y+D-m&9c&hWlr?-#A)bHLEi~w(cbgRqBBz{ z8CjO=6u%RQNTEt?1AS!Np?4Nr`0q!%*A`qv5W}Na5H>KV^1blWtuUsldiLew%5RrP zYe6y2$4K-6K_rue0GDA9PZ|uFcG@MetS2QqfEz&iVc`bdcsLkcN2@cZ*V0xqSK^n@ zXG)A~%R|TO_H+!snq;K2PETXc*Dm5*Y6-IezO~RyHzr8rQ0{EB?I{M$Is*nhyqQ7o zr-G>F4E95}2Z!*^$17mNjA%bM1sIxrB2n1F>w-svi#HaNf?bAL{B%tDxB*ZV`! zuFSuh^5WIAeh{+rudD+!qpO*J_k$i?ON1WxXH932HYHwcrq>wjm+r5DAyXfOCw?&i z>i|PO7%Xn6=%(HbkCkP>+H*?=bBc{g#LqA{m-=!S24TD$fz?Xp&bGitpl9*KUy5Ec ze^~pk46gY!DIJWu-=CnCKX<)s zYUa-)g3&KoD+K2qo(Iwd?&u)eDtA#EIhW$Z*gc3MIoH=UYS7@(rx|=O>xn~hMf5kq z&648qT;;!Wg;`SUhD9IY8yPMO@gKLbIYZsfyTHS&1^jf@0%5h|&Tix*cj+|FNS?cL z+DY)q#LrFne8Yc(?sr5k!9RWTHSNS;^dcwvlSXqJU^;xF#|4>`(w(arO23lQ{Pe>b zt$HF2p3(2%FP5!~y%NW-T@-XLQrnh7al07*&+Y{rdPIJ%V8GGr#+N{)Z9ctypr&+NIK4%ul_Mr)BD#j+tr9JvGl84^M`2(7;<^$wjYhs320|=HX4xAm+H2Pd zY+xPiG;>1hl|e{5&1)0iOP}BHhvVCnyInU7k9OU_s!mISb6{e}m%C#&z{p2!u;cl= z+8X-%>uGN*Lt1}(t=(UfQ*q|emIL^`zVP^0y1UPX2hE^|rxabZV!l$g-Xey*Eh-(lk@H0@6Vx$ zNA1r)2>vJivFFQ>oNK)P6*KJVNilqPc%C_ciMMT>SX>rz*#Ix9yCinubIijG?`cYu z9~<{H^)OVl9qgEWtkEBT;(c)gku`_gr1s**lNxn3!pD;iLIlsh^UO<466uovcp3Xn zmt2cZEXBX1FEa`;;GFH9N+ALpAMLJ;=c!9AhQGv&Kd0qJ*lbgjGTXa6O5N@=1(3&( z`%-sNNp?@Giu97)@LR^vgkmqrD2k+d5EZEENgBs}$>ZB`7w^%-UR(3d&zpx5P}vl* zh}j}Ax7FON0^Mh-$wH+KJWQqRfizb-j5IhoeM-gBkj|8&eM4_K(35&n_C~~Ozc(4b zNb!I1@leg6^~&?gy)xWg!KOTahz#7Xr<}MJk3I-Z;0gu4$&cSc?FhXyPaZgXdXaHB zqv`8~#?!G_pFIn;wZf87E;t>7HAyJ4x20n0_#yjUe)ck(y`#;0u$cfW&vSR67nKYHyi{&7J`;J)Qf zpd7y_p}kO~6%}z^m~?Ev5d|~bVZ+h=3QexI9K`U-LAd73&`UeNUK`s*7^b=M6{rKs zD8M4eOCwToTWp*H{J4@-+%g-d06(s0D(+z$rvN{$AQk7@I0g7|d8xR`HckP4Tt+JH zQX8iLKW>lwzL-9oZ{rl;$8Al;oowS2;K!{_#WhVZeOG`VSDlLchmBK!ANN!$uHMEe zz>j+%75A8pQ-B}0C>6KR#woy$o12Q8YU32($4yDaU2fwP;Kz+k#r3ms3h?8cRNNMj z91_zr6|l+%tb%YC+kx(q!3_3V2U5q&}hm{n;yAFoyH}bByqg8Oz4%&#&?Q zy%>CVyE)PG#lFkAS8YlQO7%FYMME_IT3jtQ^)%6|=Bvdn?ZY>=Qg&vP(4>xYz>i|%=M6o9SHkSQd{ zcumyU>_3{hSsQfEhX`mxl|NNk-@%K^BvPdxkF#v(oh%G}=de5CR8h3_e z8(PoMfEtYmk(#4)ON-4*^$V-r?*NUp0p}?Sny;f+8@9vtKZ2@gOPsE1rzTT`ch6@b7iXL&O5PN!nX}Q1vFoxnPo=((~;XX2OWvv&o z8M@dE=aE53CbrRBLA*U#?G0vEDIMl|U2L`wc~H;NyJU-iL8`53=?6_sCC{NA3M*ub zw~6e%FbM23*;LvkPY=TLm`cXxMLDtZ5Vx%8s5YJ`4EY|>UJu4bK-eV=TCQ9dTy1Ae z8={l=?mGyW=r6zV z4=na!%dEp0DD3=-_gjMZ7I4LYH}97k(=JcnV!mZ4stc7|f#m`<;v`95}=f%pG z;iau)%dOtAL?$*T1;VuZN6=5nI0peOg6 zefhLKJhfUIAAb()PYVfDYX6{g?wr0E;r19b4Y?x0Kr|V62i^9(zFI)urV%~a_ZzUl zmN;N}C(}277ueDI@|ULyNiz~taL8E$TCD!c6gIafmwyTC!0=tO{JNG7rcD- zR??%Bmj{ma;y**(-YC1acz0caVc3G1h3yR$HcapnFQCFOPMUXECOZ}H1l{N7N>CvE z1k$6wb6m%ZPC6E2pBd_0KhwFu^OFkORs3+6`)ohk&i&z%(z&x~;&(LMD?ko5&Uj60 zp~;y!V8P@4)WVd1oRo&eJB_v=&t7KT*%kPMFBxEiw}cW=U(0jXM4H}fKORw_MkNG8 zxiEF^=N0~hB!(~j&Xg~MwSLAA%1Jg=2Z&k$2yfWU)fFNR*xmIRbE(Kvmtj#jO)UFK z)(tO=rGUgQE!{cia@D-d`}i`$m!429u|B^hnXxFsou6)Cp1;vjG+m0l#d{QtHuS8- z)Y4^J*wV9Ygu}VE>(kV|b)>0#?E+KxNY(ul>K1(h7+xuL8)AZG3q%H?A)UKuzI&@i zCuvENySz^(4Z=f2Yy6w_y^;ZfFG`!s-<%NG5Bb1UHB?j`DqbC^+zN6Bt=Lu5*w%R= zsi0@48^uSg|d9^3Y22z7lv@u+UJwssrxwPKfVnz#Ny9(5(H^RFq0K*Z~71X*_>WW~| zm*MWlV<7JI-|&tD&gimrY28pA4rq(?h_U4OJf@I_a12J9(0tH;0-L#+J44YT9nwJa z)f6sKyUdr?bV+>=E^Tt89V%yxoxEVQ91S=3;nw3jhdqATPVilCk zPu{$MEdYn}6fg+iPIArTAD>Ys_8!T2pyC}X;;#@=$}Yxvuy{FWw#UhggJyfAO{m-kKtXk5j;{F` za?!rKC&Xnnt1HHeUf0zZ9EvHA?kF!hFtyAa$-qRUh-r$TV{B-14V}Y7X6AUBy+3?> zsEGR$iQVVz6(HQ+X!H+YAy=3a@1l-!E=AY?lzVgfif!ptNWfD3?q>3^QRcl!Hmq}P zYj9D9wLE^oL(yBo8V>8|DpEKD$BIbr#gv$K>kvSB=7D6Ys)tAwpT|$H>hfrPuxKNC zVMpad2f`;XYTQtya}4FCjRcx1XfExPX>cFH)m^T=Q_<>1u;@S-{Meyv-yxbPE{D+4 zXSa8|K>^Vi*$M**dKt`I9*RyC2BjO&(CFrNhH>vs&p1)>Y0G5s=QI;z{Ws47*@{OZ z)FkoS>Q9p5OiJPPbJB=x9TrYn;e--V!C(DP^n;c^poK;}X{rc9?6Ifq8Dw5&drK)q z^Js7Zb8ldQu1t&#Lr+^1?#n7JQJk)S5tCj(df^C`fPZJ-K8R{N7#m`AUt7H!h)iG~ z1XXH}{!@xEhp`OCreO{-7=Y>C@SkAEeQCb<(`g}DT)QPk;_h$=!ywB7nlU(#cUC@ssU$;QT8Cx&vo`-t$@mf+47Zzc^j!omrrgMvPV`{>T;wmqmUVwm7R*v2>gbF>)d(Wuukz#6>}uyUvEaq# zXZs$ss3SSIlkH?4tUEZl7Ikeu z?!oem-TV1CXHj9mr2W(3a~JhE(M0#|HtxYe8M~WI`1Zp^i^>8WcyAcD{qR|fLML_@ zm)W2C`8s>isPh%Gn)e=yW?oE8AKEi++^p~YI_s1yP|#!>H|zVq&OKe9wr%6K6W{fm zRJ>^>{wWLAQ7BP`eN5WDUly!;Z`|IU1?xT<_nV6{&TJU>n;&JIxn|touFW{}y>WlL zI^)cb#_g>uShsUruRj#6+d8h--DjZqy{dYwa1}dsxqu{Jkt9WH*-QIO8>kdv?vr8|vH3w(ERrm3_>bej3zkXe{($r+u z9b8vKnuc9^QJQKjx58&8{mDA+e>skO%ZOm+p2(am^k0Ma1v9^7M;-lEO~tlxYo5rs zRrd#hRu-qiQ_YN$5--C{s=9KRnb#XdIQP#tGx8I5YU6_I*Mj(+W{GU{PP146SLAr@ zLz))5z5C0Vm8dZboJc>Abql1|iQJmQ>pHP^rot3s--OjN$M(v_OoJ530peYO$_^xv zW89sIu4(%FVB4!5Xz*B*zWzyvBk6iU$RleEW#n0A{XqJvY3r2?=G4z?X!5{wQWo#-Ze z1`g=D(@I!j^jfRzC(~zbMZK=N+`%Iv|A)P|fsd*>9{v+ZV3EKE1q=!r>>>%GCJIW5 zU={-0)eWLj2^IkZB33L28$b<^ki@cGSE<@ot!<_C*Q&MkrB($MldzirT6t6CMI;2l zyDUfmfrPjGzjN;0ySoW$+o#Xdr~m(-581st=iE7ObLPy~H186r zggE8U4dqt=Hm7p=B|PW1{V}6O8pn$S*+)(6ymSzj8YdNyBt|^j^oVBDZlq$e=lxhQ z*&C1ome<1+jM&T%&=7s<4YbOa{K?&sS2m4MuhiYAUb$3W`O~x|GRdH5iPLcL@?fpIFKlUZWI93ES z9b^x68OFOYo$7q${F65By+_VV&1cvr!~*W>939SnI&P2plG-EG7xc(2zs8Wf3P%j z_76DtqAHOPrR*H(jW!1RAajReRM)wrl|7nT{$_wh$fw~@= zzo-s!;XK_P!eLN5efa=x6xr0%6G!w!PHL;;bP}!}iJ4J2p_kHZ?iMAZ7%SB2IuX+R zcj?M5CudGu=DeRg&f_P!`-Q~dn%a^o=l%*!kxv5TIsN&F$-m1Lbk-B z6h%z-H#=}8FkOz&$uR;sc0OX1^qmkmZrCo$P}czi;N%TmkoQ@7KCZ8jY=)@nMR;Qh zO^!+p=gV$OEdTcAHz>DkrLZ?re52;gZ9ric7lMQu+Gy(WXx9%wRu*Bt03TR%ZXn2F7*8Wwho#<3G z;WbRq?N>8F=j91i*Xjp{%Pjv=9|uZ(qmVClf^CXz?$oamc1xxeyRj2kJ9?&mF-9h_ zTy^MMluN%9-6_UwR+Zy=M{uFKVQHSjbU9W;oQYUVbAO7C=u6lN;w;TGB9`Xb5yC)9 zq~jv~3HVXO_&fZ83{iiFr}$;sqaSZ&kA6t9ZlyNdHi7@hc=$tT0v{0zb+l>9KQSs_ zk8%6&w$+YO!xss5;`ZK+-Rtgj*|C(`iT_pkMyV#23E?Xz4NdzPQpLFnpTsMH>)Xp; zR@o1DI^njH&iR9M=iv6md3fq@jAjpbJOJCX-g`gs-H@KWF=w^?*VPcyC(NFWMWLM4 z3%a@o-=7x@YGeV<;iI&BA%!%&80(v>9WR$xp7N3`pFh*lX zLB9rY0S?Wz`XjqysYUrqklx)&pV1=SSQ{W7Heh6X09glCw^RRuDzG<#?s;5)8!|;T zh4~iMDOMU|Lx2!Dpm#kQI-3=>o{Q0D;`o79mV+4^d6eX;qC+x;+7*PRfw5qfeiQBf z=J8j9i*c91wYFNDlt>UG2*9Sx%4uCRl!Z9!X9kqD)4LWgEO*Wc`{I#oBzf8OE&grEo=dfN8#8 zI3&wZs=cf&xxgBd$P@Y4NmDEIc?jJ=RHwYZ-*H+a0fs}aTP+) z{>iAh(Lt7zsCl)|8DEvgSDiYZT8plPJ5^25XKswR=Vc#L^H=kxG>G0y-M`M~kh6CO zp;kQItS%HO`YEQC430Z-&uD+~Y1lkmnVuvU9Q1QKT5`=cbGZ@^iZ)|sA`5ssG$=3U zdoch#AjV!!F)AjGwJc)j6@i@1R&IWwiH#i1b@{LR(CYNq^)FDxN!zdGGwSix*W<;&}Df+_WltF<3%DmB=lH!@UVZk)MVYF-gjF8*}U^;RC&JW_>Ln6uonJ3pmKI@peFkU39Z;bq**j2*~ z{*?E~j$L9GCro40gJ)*u{UDW3_Ybo89Q%VYd`|emtip{8GX^GgS6}Y#iRyc7_crQ# zLU$bUt8;~4u+`ZZ=kM)v2RX`_^03xi$vt{S4Nm_k!gHOmFK%W97W+zBV`uwM)>7r= z>lV;Qzpt{Fy(`^B+91t0Uo1QNTb~%WYDQoYqSA+cN-B~hB$huw2L5Tn1E$|A=cN)N<2GVxP~}Uq2%SsB*%4(#-Q$u zM%)LXRMJh>#G*m=H#?%^UB8r74)s`Umf~T!Ezc8WZ&xYUZ`$Dao55cetK2*AY^X!cYolbB?7qRJMe%=%ndMGo zw{&8bnD!?_!MRzo=#I>lu2n43OBY{O#Do)#S@c+CH@t-E2*J(0MtBGdfCLm`6SC|i z9SUcgjbg_DNT=u0>FX#+@_<2wBNMX#=FiLI=w=b_gKkHLpH^XC}V z0n*jwvriD3@-#(;rt{>6g$k!3)XR;4d7E1LIRM$GAP~Mcc8%v z{R4Zl`(Si2=Up?nst9AFm&582cR`ugFT(T;|BqW`jqkSW7JM6l?*blD+iRtPEtF#| zE?W+BDE5qaj{%}xq6e~+5$Ht+(w0hs`CZj-_GK-xm54$m{Z4~#nH{#p$2~$9|X@*f;C$qRt6c-*9o+2~jo-pLPij`cV zxAq9%E-=h_tHS79n1_WfA%)?34+buLUARm5a!I^eKCX}tgk>`ulS1vjChG^Nzc^GT zhnYM`JID2tsxfd)og0)WaHqbUx>@vlD>@;CQC3U(U;d*>|nLM{1?nW(c4>Y0xAa?=i5qZvk$Mt0+wKT$RxUQj-t7J zGp}zG|`#bA+tQ%B0pk|S~>(jUIv^AvrG0GM+M6DHj)u&=LXvRuy(sXPbD?B$y% zQX&S1d&SBFXc($}Dum=4(t4|X_I%EGp*Ba_*G@IByDT5k^)T8#_I6eKuEo{HY~Ltp z->69YMn&2;Dz1H_*u@`ZwQp3UeFh1t_GQo(ao3a~h_tV3q7#XlsA^HKVTSwc6pdLG{*K6lo9xQ}sF@)U?OtSoL% zgWsgDReDdglI+wy>bq7ywJzQsqwII4domRglZ)Fw6N+I7aO zBZI>R)~ekjZq=SMLEj8IBpoqHqi3E%Abu!=zbs_YKT-l#*SWY-Ps?&;el0#RtPk~i z8)jU7B9@MaSc^?U9iB^1N6)xHn5*;^R~TFxj}x`B-9TBwn6Bb|6(8yd$D2+5bA@V_ z7%e0Grzena<3GyvTm0qc+E%o`$6l-d>2lQ+mydwP9ncDp)H<1x8mEvx4xfX>jD&W? z>HXL(v?3u&Zt&slCT`>Jt2x7z{4wzSEHSJ<A#1d1Z6#WaIRyOJRpb4KR;=iCL8fqd-VTp6?I9L^ zg4Te$qpNH|D!RrqF?cR)foE+L&xh~JDC@~W*0nsz1RsMSHiiaWgIf>%`jMO}okdN% z08R@thYTyYnpODbKvQEN-Oh1*E26rYOj~@IIMrUI_ooE*(8LHX964^DDTC$q4`U}t zjK0mu`Hz)f1(u~+sCHzA=2d&K)buvkdzJnwjd&r^1NW$dVAQ5kwJzRja!OuwwhpT! z<69?#faE=SV_}mionJGz)WJrUzLheWWviuo_{x~|4tYR1 zohr{e>pbYgpbCGnD&{CEbw94p1Wd~(vl}v57L!7(NEq;MEikP}kI|CIGSB7=Y5uYj ziZyy6+o)&^IfX6)6u_=g!FUEeP7#PWwAi20jP&^sjkdl6FrnSEnl~>1@tFiBT z*<&CWjXU$SmUDE@6hgvbwAToSiFpVk(40FhZ`(!0z|*p;+&Llp;9*Z-i1R-F+lDQ$!LkM7l7d6CKj{<51Acp5ZXVjm!wj0!i_3fv zh2(&Q-W%y1do+QFGe?s~dy9{9<@SuDY~Ly`k*tLXb~3IaRlUsg9qpfbl)afi8e=S` zqEus`y9g!VuMV-$PkG3G1hT;%5TqFxZlfcDDl=IE31!HNp3XST(<0*#`ju?vFi2u2 zN>zrb%&%oHjfU?**laU{WEj3AZn8ayI{Geo?f?_ZD}&g8r}rw%ZVdk;eF=^*gkN=WsC$k*oyHuDwF{k{zM zlv%ib6(A`$>*Hi-ru>~`+L(f~vS&5hw8uOI^XNL4My=;pv!_Z+JdT!Hv>d|<>3#uw zFVbo;uuzU?eUjI+I>|S7eG;3%?G;J%iRR06 zW$uqo6*W!Hp^e7mXwzb1^kibll;YpTTFwVdjIH8<9JylPMaYQx@M{?jy@;Y&C!}|W zdz0RR^2PoHIm5K-^|zpGCdpeYwn@?l^x{GN64gGbq&LUyy&n(qq~D7BMMy#=SoRAw zEO^gIf6In}F9Ci8?Wfrvb;@oQt>jJ5M~J((GnhX@9Qs0-QM%k-l9xcJx~tJPk~Ht_ zHdk{%n?hup5AFVT>ebF8?Qag~?JTR|c874$Cqx~SGGHg%=MiBi#)Tp@(d`Z6_d^cG z`efw30H`-ji`))yu^dRq*)XrI#<7pYE!HA)F3V*~DtGEUTg(OEgSb7%*r@k@QU*@tQ|eTLj5ei) zgW-w=V#&q(V9F;Fcax(g8&chErxNoR19XOHc=U&Z<9~+2-Akq*Li=7O;I2!hx_Ppo zNpM_@yRwFVqsrap66s$t`iMV}%pSwIiyZ55r0KW9&qT|(U7M0icb}*EZWyHbZpk&m zWVF>*#(0$-)^cj*W)qXnLH&~89VG-8%BxCOe!Nhs6hJAMEmrY>iFT)+B&>$1%uy&Y z4<}1y;{A}6H8Cbjy&9sO`da2u4FB*Uc+zk97GzdS_53bfD_!2zY|bngMwH$J z;yQLEt6Sp1^)!O5uD3}zs|j1sTCdnP?=?O*75(MGmx;T&ezd>mmJz;lM+`o1(bM5) zvez7|PAuZw^=!h-eRVxMBv%n;?u2Y=^_I|$9F>!hkpo;Dyl!CC$y-koQ@hgEE?w8- zR=n`8VAH~C(FNpRl>buseBaHyK;^}X*F|1fjk+qT+uSSYLl|9yYlBXF*jz#nuU8=e z*GqlZ&a2{cLs7>EJ2-Q?EP4ak0GF{8SFmYw(rU)uy_&I?u#n(R;h~XlLgX-7WmUQv zPbDhPay~IlLee2OC^y=OK;|BNcd93owG$m48j?DWP+Rh%TwjFGk9ZlRA&T2V%CpKe zZzv%ta@tXGeoakR0@m;Bt8{lKLb2 z+(!XPt7OMN<`72Jg9}IL5?1u42F#JC=ml2fQIIk{kO|^OQ*NokD}z?`C@x^At0Z7w zuAs|xi~o@~ZOPFgSH&| zqBq49E!bpqac%G7Ud8AtMzbnKRwWwWl_}i9!S`1k;jY*AJiGoPgRQjmh_P$s1*&7! zwKg5FhhyyO$qFRK5c1+b-wta>qbTneg~X^JNym?38i}SLx066bAtO}6Xf_$Y@Jthn zRXCOTYTCQ7m!BwNi*IXDkNm;YQj0h~HCKj1R`2D##M0%NN|XaxjK(W%l38q2sOUXq zoDu|{_rur4;=5AtwMSu0P4HZ5zKv-MT{Qaky6D>**xM45NLifYoamkNSYl0omgxLI z-)q%hkM{V#pufC~?tiwwMj^Mg>M!9Z%5ZcVdWh)C_~%>Bc!YXtm~&r@*dn37MZC)V zP_c=Fn2np1wv0Evh?w8yjX%VacW%2-CUq-M{EN}P{stluYT3aY!|1_3-VmF2_;%%o zU;LhUP^dgg3nf=Saw$%Nei`e3f6GNNN^_8jgVHQZZ7TbluL??}$Uj5x_^P*>mDk_M ztX!5B=P&f#_R9uZh5fRBoc(gU^lrTU@=|P>5&LC~y|Pcdy>eXCuo=x%wB7YBR?I0l zZDQr?(E3>AcUv*9K3}HFPW@MOr`{cs6|37tOmzXyRo}JxnKxsNyTU*JC41&L%0N4? z?3s5<*)6Pw|C&8>%1G&f|I_x&yP=fU_RQD1N$1t-Tlu!^neQ4LTn|7i(z88PPuJ?t zvwto98N<&w$)Ncr(Vi`YAkdaa)@ock?d7+zR*O@rn913{`u2bXgu96eNPrED#rrGc z7=vuL=4aNJrV!kD4=Ce?wUiT;Sbvex$;V~IJi@??oEsx#5Hl(t$-^CB^)xm#2?1X_ zLfNHpkIl!Q=o-G=UXJjif3d{=Gw1&$ROY{Q{-6EMqD=%279pZmujwuXhDWK_aPb#G z0{t>zy7$L;hKk119<^HkeOFnNW8*EFTWi)$owDF&9=EpE*uTUfMoz)OOpV**Bv_v3 zTKqGv*QZJi*o^R})X8R_aL%d_8g?_PC~m(Sn>EZ}T!u5}eC0-dJp3aGw;-mU>|>#F z;UV7jE)M3nGpla!52ys^!*Y?#(m-kP?MaWyr~6SNp%&jh_R)0tn($~=U|De&xj&{D zK{H*x)crBVUAR9+E{VytbV@tnAA7~#uaeHO(Bmq3jYPqkn24~haVkVxq zTN9hT|Dm+VCL<1h!!v|mL&>r!JxLzT>@wR zC}>z-LmZRC+Rkx-HB>FwG_JCxKYQ?GL89T!eyHdO(q1DO7>D`x#fzmeRr}g9FlOH~ zc(sB01wB^N^AUZQOL|hfu9MFR>*(YKsZ-bG%GZo_lY<8)ZV2v~xHY(A;;!H)6GMQz zsH|O#tX+((UC0_4kJn^8;^N;1Po{PV9+|kB@!$+KdM^%cVQqPnGQ)@y)@pm%AE~MA z$>E{y_Rm4Ry^+6LGfYj?owIMw1WqHQYngwC9mzO-s>XxJrc)$idW+DeJ%k&Jw|m4x zXbIV|W2Z4=aIF}g4hN9$3>h+dpA&O}lpt7Wti#i_{oP9e04QY@xu@#gSc5~zBpOFN856g?Z8G8Wug ziLhp`*iE^ic1m?twwrX$v1xBdI43bO?Byq*2se91a%Ky|jq*w1F4k4t#fs%%7qc?v z8cl+}?ZR|};aM((WeiX)JZY!_d!&W>W9PEJb21g6Y5Bznu_HmU?O^|xlYSE%>E?B; zpUb(z_n~GZKl`T`MYgN!QT2={8nNeS-hS9|Y*>Z&P}4%D3yqF6u{=X4`h8>go9*F# zE%a6C8W=F>1&8(4VX8|~LHqoiJw^D0U6r5tb!^9AAKyh5InN+eB-WGD5u+AlpCNNC zr%6m%AIP0#_F+4zz0n``@(SKWwA;Z~g8mT0YV}EA*+X48;7H%8ZcqpN>h-r_twxs7 z{uvy9q_Uz&VDN$;L|>2T3&J(+i1tOa`H}Wjq|+hc)a(P%0vA0NUZ%0rSr&WEmC#=% zPU3p7v&m0Hev$Bhx!k*)rVSpJj#W-P-;Zrge(mzagzeWk^zr1~&W^{{?mAz)`n(d*jUv016vF-C+J23CspI~=w*Y(=fV(RrfiG+e!wpMMjc2=Ta zxFdn*We-92t|fesVTO0xhc^nFqI)`xZ%#;9N*}wqlT9d(MdP?G_s13<`qIWBaC{m$ zIDIm8tRp>8A6hOf!T^w!=dq5g9at~7bPDxPOxQ8Ok)dA5a5b+hdMS8vB8SkgNIph` zggGdN0fXQS=W5w_8l)H>&)&@~{GB9VLB+9!=c5sWAgN*QS7_x8HtqOkEpc;rmzF78 z^vtY4u5q%6LX($LO7PIQHTq2YE&Q{naYgh^-%nIXtsxR&%f7gQPgv~u^Rp}EE4dSO zmxoR%=0mRwQC9@grwtG}A2V}Ct&LAlZ3dskwOZkvPcqt8x8=v!+UbRJ{+`kH?`;d` z1T)%h=IcO4+nsHLKiIx2n7=&~9I^dS@V@O$g>&A^=u_8NIOlH}eRRJ5oYAL&um8#D z)7TiO8Zzf&jx7$H^9i4q&)LjppE;i<28SFhubrMk!v-FFq;`osTz>G$+85-Z&%tMF zSMyMKcymVI4+;-|l+kxn;o+JLA`Kk=Dx>f2;Cl!67j8J6Ia3e*?cmou^_zJp_~(PC zc)D!n30@Br-h8_M%vHfNi~85{;i;9g9mTb|K{;!KJ6C0@Hlz+huvf@gows+sYI|CO z3#?BJ>~88_lc<&MC9y>F{nEysfaZHFl`p4vy|c8A?2B9-#9ZaR`X|$gW#-*nwm0$k z=cRiEi8~uJ-&+atQfa-Q@P1UfS6*WZuSs|?iEgU_#9&l13B*!r&J z=SjjtbeMT6cw$lUIz6!4W#3~9GJVxcy1ev>@lu_7=_BK%uzG1fFU?rTqE?kee2%?Z zY13>W+lslcAFZ}Zwf}pMNh3Ai`QFWu&ilydyvfjn>bl<5Mz={uF&b%jRXnDqfiDTI z2-Xqm$Q1zDRo>No{#@|=L>wR6b6~++Rl4gDf-=Ba>?F!utKgQ*}6?hb~FyWuz`E+E_&(@{B2;r-#{P&H0$siMPRUSjJ4 zH>Ut?b^FrQb`0BiAf8SJPSK)5aMYnFA|Fvnz&$c8dSh zl#Yh~)UU~tkI$b-AB6@+?`pGxMXw1qssuLAo8jRRdlfYXQgpT8X3c|FCwK}XNP4wI z@?i{;zZ?EdVn&C(vN?4^?I7uW=rk0b*AvCI>bL8!DBw*G_Z;Nm}OfoYG?QG5@ZUT zJLw-}A2|o!8}5kKlKjKg8_EMO|yY+di_M zcK;XpclBtebP)0HTKPFX_)W&}>4~a;cIxBRcdfqbfBsefu2IHZglde7i^oyozsDZ( zpYiV+2N|`thb&7HI;z#z@NL;cUKSd~()5Pv)SY??Y(O$S`FtyT$XV>We`%}U{?FRO zUu3lYv-aJ)e}8uSE|7txk{Beu21ULH$ z_{)0kjNoYLEBle!P!RuJ&w4q1mCi|y8)5Hqgo7Z?DCJPjg|XoR-_KOsLHNEH-|^G+*2v=6OZkd-fbq9?E@>xxpW{8s zr%01op;oCF#ve7+;II(mv#nZiO`^=Bo%(NhQx|?GQ?j?Y7(Af9YxP8$8MB^=zJ3<^ zx-jAY4*EKerih^OpP;YsCI1Y4ZQb}C^tI%--$h^T)TI6w>1z!$F($suchi^75dU-Z zwfRhwqOYOQnZDd$+Ip2*4|eK3C|{=0vfs3%FZR1z`TM_fx{2E2ff~oZaPg1wFMe43 z3qBRYzy6K>{(q$t{&W8R-DC(1`SE|RzkknTEr>I2|C&(?KO4Ix6w4n<_rHyDr1_0g zgR({x$a(R}h}LWasH-XR9Mgf!M{T^GefM%l2}#_|DlR(7=UymR9-v`NheK;HXtzVd z#SST)+1taOIDZYd*EsGdvEOvzn40E&B7NgPwvF^#bt z!tWH>FKI*9a4>ibngBK(&1>UqhB|;%L(pWIhxq07koqPhRB`|cV^W&^VV_h|pyB|P zA#0u#|M`g~1T!?1_>wOW%)mGxg8ebvHOBvi{hBuH_H7*xuIk^)Y#bLyWrH4)9sUM?HH zM!g{ywY5aRfixOlZjNHE?)JTr^c?SCNwk+g&4Y@#?y<8zjvOAoD6&3_4NVgkVgz76 z2gF3Tx365U23u+&3$LCm7y*TY7TQMbeK2vge$!9OOneks&hl@q_qD`=q({ z@`pGdt>1R4sX25%PyYZpu7J2S?>|G1gXk3zT4rH`ExJHoLvzX+lQB6~R0itj9It#Q zi3-|BY;pUaH=-b*W>2SIkQL^Mt|yP(}9b@*H(4opXka+7na@9kDbZlQ>4; zX<>hn{+v!$Bc70l^9e+jir$S?r{72-@qztA&X-78`IM&@NbhW-j>Hv5B9e~PyIxHv zB7vkdOK&JI!zjJv>&endzSi`j>0wCPK(T=x2tlz%&2QtVEJK1%SS;}s{q-z(irzL* zJZhqN7!>-?1as`Go?vK>7<#07t*i20NaE_5bvH?Ww5psjLG<3oBQ@=77IQJhwCGRV zFW4A>&iWDdl_{ws{5L=EEIW}lugmBuDL->1u5mW+a@HheO$fi`ENi;Nb3LDZ zi`omQZ}@qY0y?g5;MFDWtVBu+`vrVX_+k0#6e^~v^3)+T+kCo2xmgRZ3r#=PWXZ!Z z`#Yuk3zB7oHfdg5l1>XZn5>w&tO^A**N7sb`I-F0WYAv|xd7hq6e*f{_Iw)bS$@y&o2uT;V@dLt zWtn&yNy6G$ki2RSt76llaz`p{XZhQ79?ObYdcpoGnVjkfj~p3^E|X*Exa%l`D)e`R zyDEP+Uw?FD$uNK4Nd|ud3Q-d3CoM?gnC`isV!5Tc;z`a~@pFCm|nQejpB;YSh@aG*Pxqm6(znkEV7Wgp% zziEOUSY!>{J^}yM1pmzfKP=!SCb+!?o-N?}O>l#SySvo;b`xA`<(?tAhnwIZS>QVb z+}{MJc8b*G76ErN!N0X~4;Ap4!$#v}zrm>Y)dK#~1ZP@EE)?*36MU`(?keErCV0FB zJ_0rBzcazZEO1!BJ`?<~1>PfIj|pCDf$Ic(hY9Z7Ia0yR0v>6CZ5Fszz*m~!t1a+b z0!}l*Q!MZw1e|DspSHkH3wYllqfgdZ;Kv2L$pqW%k(xXr;CD^%Pz&r8@c)=#j|G;S zG4)4H@Jkl>0Rhi5!MiQ+ECEk8!99>rjM14cV3!HL$pYUk;HymVUJE=yz!#d}(H2+} zaFPkW)&dU|@PP)S@pghe7)4$o;LRp@rUgzH@cSnCG7Fp{;1^7AR||alO29ue!SgI| zNWc%8;K>$vn}Da7;JFrft$^Jo_(co+zJLds;8F|xCjnn%f`4Rze<$GfCirI-_?H51 zIA}EfSquE6fVY}pj|E;V;A#_mhXtN1;6It*=@$5Y0Y7Pi3oY<80WUDY6D;sJ0Z%i* zw^?9Kz&DxTJ1y`~0S_|4H(22Q0`6^sueQKJ0iR=n7h2%u0zUGU(fGS9uo|?_Oz;v5 zd{lA=P4N8|c!-SrUrg}tE$~$W{-p_i!~!oAaES?i#sbe4@I5BD%mTkF;9E@atro?; zBH+O$_%{~#IRQINu+2iURKT4~u-gJJ7VxnHM&qLuEEe!i6MUtWTTQ^#COFLk7fSBe zOmKk(E)eiDCioT$oG;)q6CACHnwK+7aJ0YGy!?R)?#vFKF)s%RlA$Iz+Eoq#_cg&Y ztRgQEaH>_k{w>Wb?*APJL{+EbQnm=Xl4L;8vB%gO3A_*DF z;jdWPL~m2FkuO(2R-Uw)#AJI#23QcRla?YHtwgG1CrzCwmpcWGDlZzB!Br zVP_Fs8j*E!C@>=HX#SU#4li1j6oa+YUVal9MELn3bylWW6i7ltsjarm2x3wG(lEKc z$xs`ez}jPQaI8Zf<%Hsyc1Z|c!GT2L>e?$d@GRX!zp&RXKfF7*EDWXJR+J=d z_73Ottl2ln>2K4$awyNHZ>LVa;Yo5Tl8N|O1GiG?e}s|nLTEQX#1pBkzJzm2le3$H zr)MUwU^9g8P8<22wzTljf((y)=xOymcIX-PJz;3G`gRO0JhUitkxE|Fk8hF}UB)-b zi~2808LE{XEYUbQvRs8*Ur%u_%(9oumOkzh$xEdqt~Ys#Z#q#c`?7%pS#sS_)Fp^= zY@U=Z;Onp`rHc{9^iy@;v%S0oeBtr0CZeca7doFGk>R`!w$ySsF?XrqyqDgWo)Sf? z#fUyfw}jRmrcV;tvB!a_Dx9KX(|*JuM9`NEom6zNrrqV?eKN?lnv#pakr?_lZ$wJ2 zhGUV`#6K#Eaj=>@>ZmpQIWJqt7RlSyG(Xe2(USR?WDga}6gMm;Z9^*nkgTF(n8jvm65FbEelu<)iBu+sF5FBKZD4U}0bRwE%NJ@FE$ zgq&P*VhA&6jnHNG@@GM;&tam69*-&FEf(}|EFwq^DX(P_zv*oiK`48xh@xeoK{1%` z_%_Vu6!ZKT%=U`mEG(gI4biS?Mu`8$dUrZI_Pyy=y*aRF)!SaN2XCiPaSZll z--i8_J!Y?1eSW#BHpIpWwcM;Y)ISx}y<TIGO5Qq9oL0xk&+9zRj zsc-C)rY~C7dR{E*L2;-*#t0g^7t|(cVl2l+_x6~w7R8h${UB>DeApmOKV|WQV?O7_ zmQfsPWG#(M!XYsjqAR&j_Gqc9DKvm%-NiC&Mi-PUk&3)#pox)O8c=AXO*e+ts7qoz zvc<8;N(9-|R>*$;jmX~E9apn~vB+d6Idpz2WVd}Ivb*Du8EftEiy|aix>WGh((!ay zOiSB+Bfj(F@aaz}RA#q5BFG+Wg={&(*x9>nZGBuzZ;VCuq#(Pz6|x!Mh-^VTvNOG6 z2J|UGRtK+)Z|23{h%74(nbB=23ZpQ+(q{#q2&3`%)?w~BdrRwf#kKU#*vdUG$Oxm| z0@>nkMD|2HvaYeno)csug;Fz1&G5q%8kt0)wDWV0b{Su*f^?}(G=^acSq>NzLw4H@r|YRi!04&Zo6o6)54--SiQUf zBV~(T7GtkI0Ok-XYgO#;$bR-l{&i<;BaLDoyTIz@o}unySdS@rT>dbv2l-$86 z*{Wv0)|tg(t@|URR8x|{?}X6$kj0vN0Z0rjXDx^=w$9e7!Igq(xD@+%^zC%@_CvgF za{4|B@S`Z8w*rih0wh{*Vn~YuzNY}!Mgct);L<1{Qvtd~0Y)p50K``MGNaO(uh)~O zkTrcBSKdZrRiGkvqW<0smG)LyAW3k=By_xpgzw9IS*}~{=%f|J0dgOM5StT^+Ncf1#qHn3gn$HjpvC(Z|@GiZ)uD5Ri^@^>bnhHgs8# za6VADBx)kL^4Qu|d)JdbmpaN%R1^-!5!ZvPvaOY2*)9Y{uKqXGjnVyOd&RF<8zSwK zJm>2_6Alrx2W_vI*CJ1v{_AZm^V~`vxi0Vgvc}6j9m*OnvsdJ**QhEY(1+d4ZKed| z3%RKLd}#zdB(+*Z%W_LjUv7Vrz!e#fI3!Q7VVtoi6+HR)n!-bKJEThJ3Y!FUZqfQX zcS!4vwLyQL1`?RqWXSf4>D(R>xqs3?~*SP3CFk*rMs-X72lfvF<^hXV8i5IcWv zW^cO%|6NZ;{SCT4)RsbGGdo&m-XfWQBAGXHB~EPS<05dyuAc=i5EVj=m6Z0m(PA2YIosDSDz-Ld2yQ9~0veM{aEj?L+VU3`_iBkYwQAFCh2 zeEA_l-!ZghVoY*YL^~O(y@ZK3?G_4@gv{$XB?LJR6ttaON{u|npA?@*%&6kFhu9_8T&u>+GD)9)I zXS64c_T()|xmF^3n*IRmWz+v54rUU{Dy6-%q-jFY4wrw7@&))f&%QG0{1JW)Ux22x zd8wnPBzawltDVhtHA&Y_2*2wrYrMoWfX}`~?SS+Rza(&Q<#D#){JbT3*D6PVCxLbd zKg#Dh;U#=_3NPmKlF$QUu_flDT=O?=v$Wt{wSBDl%`JSzBClf}2$N7d{Ogo$hFImo zG$)(>KV)Ezlm^6EAja>6Wo`A>YQ9&oFX|Zw$2=crFEy^DCL}m?WjPg19r^ zDqNmER<&bD7U#Y7`HKFATqB#SA4<#pL;A^crFwfvisTt<S; zH?WDhsG?jFeDYDoM|e@FSe|7q3(pDPAD$VSA&=8TcL`X$14BQQkJA(tz9n>%B;FWy zg-5l-%lW^A(ADyBnS97KqaiV1hkDA#`Fw=W3kkndJrHgmP7bx|Hv=HI(#+_DMhb@)WgZ) zNQ;(G!T8DfTbT=y$$6C$kmD!kLNdr|HVx`0WI!G(=54pK!LiBh8(7~Ur;7+FOf0B@ zL%S;i{tJ{vtl0O(uBbbqEFzgOI|h=v5)H+izN^7zBwQ*9gRF#Ctb~!0kYy$O!b+GZ z38Srq1y;h7l3)q<`cx|+C<&9SH*T~NQXrX?eXNA5tb`ttU?~uK4=Z7uB#g1%NU#zf zlZ1&@!k0`#qk*4Df@URrq!NY_B$GaCV@&!7t!~N#uoAuB?51TF7MCPM+qT3?cu*2< zvEG$9VZJ2fSP4ZcfsVD&vA-YVkiM>j<%_Rj!TviEBCcu$Ku1ClV+N@%MRhWeg( z77+D22Yr+Jy{r%orQ^z;kCrlwM5^#uH1WA;qQ^*_ZB^l~09Ia024m2lR%0fFwNVnH zG+JUMq+U!ybpG9KCEO+nKeSMdwi2F}ggdQ-fmTAjBwS@B^t2LANkY1n(AG*wN+%)8 zlJ-UWc{RDse!hZC^nf3}4owPO4KPzC87Xt3DTBf74xE6A z>CU~zKx@7yL`jD_ZX_M*cvN9xN}@#%eA<9L7=aOGdEl1@Y#K1#IrfZPZOyd{(vpkV zpN<*ga!SzcaXEe{IqtEVRA^Q1Wl6}l5?ofoE=hRMO31PjhC4{O$x1liN_bEb23iSc ztfBd(B#f{UWc@bk9gu`5F@Hz`!rXySM6~@((x+H2zHGhNmtE79*INlsSP7#gAvzHs zv=W|?1WbzNytz{)us}bt9vGebbJ7`v4(p|E#v;wpFoGdX4E_8gapBENAYORcetb|* zYWR7NS&5Vxy)D~Y9jHHYY>mV*;vPEgiA?bl{-La~!@Nt-QLg3kQEf_6DJK$|>ons= z@M+vvaO=#_0aZL_lD%yOGInNYdlVv86I-hKjU&FHPV{B?$*c@L97ss+OvNRN5(gvX z8#7AVnj{;K#?dxh$`x{OkDGqik3-Kzu}g8&?@9>08V7Pt^V~&Ey3S7El$;H{qZKNt zr)yf6@S+jS_uMyYrh3OC?_jn{s2SeJSYLm8>6LE3)WueH)}2^&b#C1=p^n zB#8x-tDcCBG=@*NV;0w1Ng$h3U6LH)IsWmpXSkh?a}GZtvrybW@E(qI@@nW^^{R)9 z3IndPm7<|0c>1|~Z;Efe%l8ZP5Nul;J?(iG?|r9lG@;#_^{Rws+}MlmaeKcfw=&3> zlm?{N#mb<eePHM&70zbfPUZ|?Jsd=f)n74T_?-O~u)qNtrA~C!*(k143 zJ_wX$KwT>0_Fb0UkSe;;cV5|v zOFYEeKIY->ozC3A-Zszsy~sIa<5m#=kdsJMx-STB|B!?7ya;4vgAYHcN2G2|7vUIs`$=lgY;CTNqu2!q`baBg_ zp3vIp)Ntp}PIO)goyggVE||}65IBErwAx`d>BpL39Ox?>WlLXW#p8>NKPRo>Y3?Z; zW?H29dk3N#6aKdcyk5?}a?bx_mO|V`Qo6EjClkaVrp0NAqdHN960OQyrteNAA(c6@$&wopMpA7uN?QLSzzp=fo?_;?@ zFw6Z#U3iE+Pb=~9yhK^yZbL?!ix?5*B7vd*r&1Qjpt|iaHl&Z7hEf-z=_(2jHxkH& zS^OY~juF;}01rfniG=l8A%o>h%a;h_z8h75{0CgbA%&aU!rXD5V8+hTB%F_*%v{S- zbg_Hz6s|*4kr**8LZ0>dlgvb6?=L8X7fP(VI|U;n9*H(YTwfzL5Xd}lg^lO zg1fy|_7LG6rfNQgqgM+Y>tk`OldUA&69&nhIk(CxpbVW7)`zQEs>F{tUB66`#s6(_e;J`Q{>A&t=-GcU z@^8-jN{~|bm$AvL_fP~#{0EuR$Y~F#Z{j~pij8k03xdo0_4`@$r~Yu2WQ|5NnngsT zSwu9t&XMn%Dg?oYifz#cB5e`g;s((FQ{e~hf!1ZDZHR4*wh`CJ7|rNaget7Xq?JYs zYI#E+W~>eR4aWS^f2Y1{^?nm$53pPDJ4{jRAG)9ICu;T~HJjq{FQwdnOFUyP>tbC0 zPscOzM1P9sg0j=iMcs(kpXwSsDs9eH=nWz#6rB?u>GU3uWqN2?Q*%)c(k?kh zrrFEHA%l~iTta*sPvHUZCsz69F`^!(IK9E_N=bFw^BM(DHE-LWR3Q74)mhc{auHxP z{~Y8fnj`Tp})-xK}!0TC@X}Q5k31sbH8>Rl4W- z7Wzn}w@%S%)q~dC%MVkS+&xD5W<}dcy~2~uhT5t@IM+B}&!|{1%C}eKA=^sbIin9^ zbVzVhiKsvSyJV0XYs}tv`R{Ad`&!N?_J{w*E_tgFNEKR)^a1aO9wV1k4{5flLZ!I! zFyx*KC_a`D>=j2y3W+n3!A@dhU0BVvN)Spj2`#jq7o+m1)4ULt_$5S$xYmr~tz|Z} zxGwo489gouBC0^Dx-+?~vCVQ|2?V5FP1J7IsC`7@{JzG02kIZUZ;jZ8pE?0p_i^H0 z$k*n|Jf`j!Cl(y>8xCq!Unc9v-VvS}ko#S~a8>P1=5mz*sW*C4m@7XZ@bw7dO;mgA zH~I$A$>+iytmdOq z!_FA`c6sv~kTLmU_0<~rZq&TF4TP*{q+JO$G`K97uKBfeIo#~dA$7Hd6lr>D4Rxs5Of1EyNlkXKoQHOz;~oTxveN!e|&{d69R?Xo9^}!goNv* z-HCfN@AQ@z2B;Q6#vg{)p}DO2BMahPZ=3eBh&-z#x9__Dlu`Syy+7c{o5lk0!{w~C zn`sI1fd08!GIr|Muw=;6vGTUqwN}=Le`&ovpE8t{%3N>zz9mET@3;^3KXZRTU#O*Z zyuADWW^~%qd|T^nu}~#FTdsO|r~aAxuGOdh;B4#df9igZxeN%Bp5ozCkkb&>xNj)j z;(m`=G>ll{=mFn$zsC*a;shVJe#PJKF`J%{g$e1hS>FTo{S)_lO!|^JTJ=V}epR~P zaPxT5@f@70^h5%j*#5{yI3Kd0NGzDBB&ABHMaCn6h|`j9wsWlM6w!;G;{^p79)XAh zoq`OEKQ_3Q|bKz98qW?)NzCZ8)OUZ6@9fg+~r#^vQE{ z-dA|2NK#UhNa|`Cz~KMwfQL`z%oKqZxey`w5VvyfuZVX@M$23Aal%^Pbnn z#CmQ7YuBX0BeB@JwJSWLuoy39xk_sjFjtF1T7lNg#keyZV_<`0wZYG-KH@s!_PENV z(vp>8vzEZB{zn{@^pIP8eZ-C@uCRC)t_e0~VlyNqcw$l&wpk2>+>S<`69m03aQVmO zM)3%e^(K<3`b~mC-YzlTmNz4>PgSpvaru8)LizC+>P!sN^-d8Cvsz=At}tj|sBDFy z!Nf2_{{Yj!s>9^g7-lF8d0&X!eS*uH55hSTCkr){io-FB&R(Qskc*+IOH4!`st?@_(g{QytNh>^) zO*~5k&sWg6YE*-IDW+qMpk@kBhV)D;JhMzZvMaCuA%dr_H6CNzp8j!2|1eqN`$uZ% z_8UfZePRTUSf}F#+Q^{llP>+!3eQp#kL>m9-6MD^TjMdD0jN(;>7Q13o-^@?K&F30 zvg)5#TjP-osy^Hs@yp6qcwRN}h`WIPu;4i{DTS?pD?0rh_bL0sZPm!7UcQN6!Hp9) zX!oufD~xj^p2^QTWU2&q-`V?FM=npP#-q?~y74GPm_^R#6=MnP9^d#6mgDsa6sUF95d=@(f1s{Zs4&alp+i482gb|)XkktYPbEo7u+{7 zAVXXkJT?9iM~NUhHC`41tLx~t;MWC&)Tis#7#$dFn!!~D_cusD_D%~iHx@o|)Yd2I z(tUwa`t#D@wu#)*H&1^a6U~kO376iY3cp1xQral{TT?F`BX@?5A+TTefpB-uyO#x1 zT{zR0o|cidmlqQR-f4g9l;9Wx4uWD`fdSQFx4&6gcxYilKfK|`%QcV5-DyEln}=4~ z-x|!6NOI`C$?mK?ki-sKFq&00Fi=%o;C>}tzLH)MNLNSj6czn?;#wMu03vkqbpbyL zsue)^Lh#gBa*SQe%?3|8>X<%(`l6h*yq#06K*ECn4Owe+4+%r6WBX`PdH+iAhI*h% zvhyXvPSn4a_7OrpVy_u{E0!9DzkdxP~8wn2{Dy=bYGu4ZoW!ZDa52Q|*Jz1)|&o&N5VoSZ1`_#-ey_gD6W z;EH~y1NAq*$VC+2@5k*!t`}=Z*k5TLk$A#YwyI5j&gXp5pqo2Nl%YlQrsgE)S&_Vkd6sm#CLOh!MEC6o)Zepw zmHm|i&cuDvi4fex>cT_CY2&)x#2V%FRy)h;+m|)l=A;Dc#!-juig=B2X*59={SLFg z*}n%^_XK`-&x?*{_&K9TBEJt%$Ik>dGpi~!mq}iqX!q}K7(aVEPj^#WH{{^Drf_X zY0o>-3c2&07!FjP`>VE$`y#mQj&06kRh%KYi=8{htec(9f0%V8_p8~<_d3Ov3i1=!+WBH#OZxqbNxuW0f#imgYjIZ@~Qt0b+Q2m|D>mn3- zRljPu1d9t4+Uw8wWW-S@;~^Bvq%k|aQHBVRdH)9_0e!yS??B*pcfQy^LZ7eqI~v?p z0L|X{A~Rc&O&iGsaIXMoSo9f}GeVznIV1ELmoq}2aXBOO8JDxCl=;unr?h7=?RiI9 zVbSL{?p8DCbGt#GLDA+SborIlMJ98-{Do$9)=8M_l@a~=|3mco;u(6UY2m+>K0DzR z^#9YRns5IAefANx^qc9^cKTcCbN2ts`Dcey|9<+sZ|VOF=<{$a_x|6>KP%86Ec)E1 z_@@X*@K4e0x=lYJ^jR6sj*%Y%RkQn@6an>A@GwFI^68gqAtSI_72@ey!vjQ#r$0qJ z4YOE^czReQ$|KIw-G)TD%8)3Jh(viL&%2}Q^u>W;V-9bZ)vSXvC+tZYxujeJL4?fYhbG}upJX@$DE0Jp7*FAtR8dtE5uVzheRYqd-*U#qhU*C zbVv*h;z@~~m581Ng1+ldgZn0IqOR4lu+M5jz@~(1XKB4rLbXxC;XHf(<~;9a5kIT7 z!-v7%wL@OcW>3PH|k0SnFScg2i(K_XK68$LWuKgVcQg-HCzv2hx)75R{FXI0A9OVBvy-ZqFab ziv1p>p1_+4!G((mu`kx_RI|Racaf&uyJaj!fZ|>lE#~R2SH+-#fhcca#?=Am!0l@T&f*`g5#zxz`DhLl z=D)BGSeMQL=Y4TmA_=3@?QdT7O~?bzyW(DIjdyfzz zSxaQXe^nA(cIE&g##h8`Yg2Y6$zE|ebU^L0UFq_9a|XNI>S=tI{JwLLv^`EA!B~>g zUTzqa4mhkjAS4~o^zZ2aL7((Z=>ONcK-$lqb{n$~LS;>6`(?u=O`JQ*0%O^OW{)Jv zVXyc-9s~HF6~vohmWJ*{XOH2pw|X~~HX*krETmWT(IooI8c^0R3X z&fEcK);P=)Ux=q+t~x0z!xz;j)sE5|%D}3&tlm9}MNYP?%9YL5*k9uA4lEyP<#_eY@=e1vA}0!`!s-*g zFTVY7v1+kIK7T~kjQdY<+4%#DY?#2fZHhIN_>HiR@^{BjuVCX?qJtJ5nr5qBlZf@} zgTg~U?vT1(YbG*cO|A-kC^v(YRr1_Sg{tr)KAQ5Yc-BneW zB<3r(J*jY4v5gYj6~5NfHnzHO!$lO4Qh5JDZ)zj&Vh%1`=CO@kTX^%_gh@N)ojkiB zEq7ExT7}Ds<)xb!CQSOkNJ~*^b&{4%8jJTilbVbq_#3Z@>02f3!ybIlNa?Cl#EPm? zHexm2VWe<>IPauKQmU~lZ!%JPs+25Ac})r`ymPc|(g`CiU8Utl(&`HnC)y%qVQ2

U%2d|gh>a@v`%JP2bH$5aM=|JllGWtoy|0xN~KRMe0N1=*_4BnojB9%9hfrZD#2)XxeJwM(e}kPQy-BteW**$`-4 zDmV{K>mbS96NCmOnS=~BYNfjlO*=_(_fU$#yJgsRvUv*?&;x z+r-@>{UV;R!Y5Ayq&Q&sHd8)eSRf9{ohRBR?59JWO4ljh6s7%x{V3AXg4;IR9z zVVQQmBX~lW@fgF;4U??g?N?6qSUsAnjN0xtYdc^`aP9cL!|W@2b_l-I?<7A0bmTgg zjX!@7sG5JpnzERQl}RW zQF0BPUx{7$14iRRO#4;#hx?E$Du%O}-J*=o`TIE31=!LN6&vm4eF;Kjc0^Cb?xXA7 zzuq4kOaE*87%;jzp+1LPXiLmgekG>1B)0QHd#cdJzIwc{d{@+ zyZfvDpPf&SFZwU+FEyRE5E-566h_C2@{?5nqx3RT_(`Cu-&q+7f|4+lpDp~B4mcdo zH5Z(3^2_30PX9B)Q1WuNdlrZL#52>~1~ZLjv6uf)7|M!%M_Muzi6i_i48@@sO4kk+ zH#rXg?Ujw^bl1I~{W#z6xX4+XchPjlJ;BApWO-0j)Of{u5di)yzgdSmgyef-HI|pF zmac@TniFd(cO?z8xi6huwJ*tAwINip>r9~j{^M)C)n)Z`i2CZ_e^4OdZb*5 zn=1;|pqs6+uUu2sbn$}jWlg(^FGLlpPA6mgd4G@#rg9UOUrebaE!eZXj*_D350`N1 zII?oul9YDgCDwiz73`F-fC?U-SSiZ!b;}4f&cR;<-Dys5-bFJwBg`dzJtFG|&SLn9 zA=RE_#lr0{Brl6?%lRw zUrGI$K;gZ|4>>FMCOJ#?W;ty&&XSrs`&*}_GQWL}9eDf7nt(HnZLDXEy?V8N%c@W% zojij^21<)lQ(lqJvFypC_*SwJzdE>oVx|1(cK??&Pd4J&kXL}I0+5Y(Hsk>mbQ=eN zha7$xJMsnH3KWRVc;Jz3cw=8acxXmtu<2CtV#%@W0e0m@;tTD@>m2$p_(?HdY}u8s zC$1!gOAt}Du)DeOV7G-YkJmSvE6W?HbM3YEUVH7e_px`aNV{>j{r2m#k9|lwm!qK~m_UV$Ig5^x;^oGW z(bDyvi0Mj*iAV=1(j78N91<&#m1Zo7rmIaz7gs`UA@n_6-63OE_dE8kdsx)0>|^gz zWe}RPXg@1rDQQ3KhRJE?p_^*HcRRjB2~sR>pF0S5%|Q>6N^Evyjlqe2SYv3^(ck7g z(1O7@#Z(Qm`KrtmQ_4PuHzM%p{WTJZWDX87(VB7~-g6{qP-Q zDGb4$kkmP-^$hSc8u%%Vgi8zF)^6y-t*HGqLt?$r`@K)&xW#)^+QCS zq#w#t_|`mx0`MJhcJd)l=!f!5?Q@tB_Y^QuM(PHLDr`+n>nwC+M(TVDAq(i^N{B-V zAq5PJbEguL&O(sS=q8cRCMDJ7N}P>Ksw^eMqlA2RSd>+c5IVpEt7$Sd?0p)4`1dOs zq4&XvMf#;H7Gs^lOqaQO0AF%=ltrfrXKDES+Ty|72%4*`pgn@VNvNtA6w6E)U~{dM z0^NwrKVr~wl}1W|?u|k{QB~8VOlDJt&!5uj(>x6Wn0EcUFv5IE#`}wOx=dI|rz16) zRMcupJgR3hPl@#`H%)>vs4(1P z-AAd-nWNIf$E{fCWNVnsDN@DfQ!et!==tOgRs)p8Q)W;~m=}wrE&Y;uMA9)m&nQWk zqUNMs=Q)f~k3_H*2Mjt+el8-c84Fh@IBbhCT zg}G{MRi+2_rt~b7`Ew~OO&c9G)?^ezbBBUDAQFWj z0l|1>&9s+UZk^#1jVDMko0fIOd+$in#}IE`Gf&hYRY7M;_=NWacm(rnaptg(?a$ns zW+gUA*P*A@je6T@e&2BsnUre28{J%<4zwe!+i)YpR;#S;hp zJp+})C~ypSp|-$_^~B4j)-5>ebsl#deD*a))z*2dBIM|y~g%CaNHhJ7{f(W*N2tuf70a3yXP6@?h#`SvX z7-iz;gE6YH8*rlYdz}2JQ|6&OsJd11Bla8E%=0S`n=vr_l?KlpyN1>)(BPM@3vZ;J zFL+D4{#2SI&1=s&io<%ayZ!6*2|d4fM7>)#VNlP%vVipZ%;BZ&#S=6TxRKv;*o}#> z50t&w0UxC8-b`dwyAj{iHK(THZ*eO2pTp>BtueS0lr$#8*l`SW@E+*km=T0v?C8bo z;k=TxPD^|70tuGFU7^k&-t;Cs`^f{t!$sYANjJ6}(Q@crHyy#c?TPnb`#thT1eK@6 zV7IAYwHg8GIzH3@JO0f!_3W)SeQB)~BQ#FvN%EK?@n z&?LZ20uD<8)DiIVB)}X34o?EiBj6QDfRhL~A_;H`0kuhh(+Q~R&Qt%hb4jkLBKCh0)JA#hbMtQCE!;ifj=wY zBa*=XCg9p6@D~I;Jqf%+z(*#5hXnk}B=BtlepM3qE&(5v1in|mM<;CvVFov4z>a=( zss?gGRccyc%ZIZt!pu5Wz~qN27H&>WD+%s|yr6AwjTczwaA_zqjVJqc_G+BF?QWl$`s?XA{*+)rQd#ATPDFIdpUq&EK1 zP!efVZ=jGkeZgcUB#%k0h%;UZNoOI@7rb99^4SF3vwXVuiF`IHsrr-k#3 zwZ4G9qCwwl<@x~9yK_b%v942y1jRcVfSM?Gx??2B)J2VO)iI z&wg6E;ggsm2q&(Oq2AC6WSa65uWywW21|dv?A!MMp_=h5@y%O&@Gq; z=dn7JgZu_Adz_MdCG67oKf;AN83rslhISIao!RrKl6o&seIezHl6tR5t?Ri*N!=mJzL0W8Nq$@; z&*`}V$w@h5ah{~EPO6)^+IBJBOvx*3asB^K-OSwGm#&-ncZea1Zf5C*fpjw~5TmCA zphVq_@x=@v8JmCD(2K73+5X?lGIsr-pIHmL3it-|{l6*DuGD0lg?a`{4WXVXVEP!S zV<=pc!L%{$mES}e^NaQUl`+ega%GIWzcQvC(P?f;ri@t!Wz1jyE6SK>aCEt9(hb;c zL`KP!F+7sYBPCPD@JM4RlIE{Tlra>ic+$Au4at--D*em4kAIB*B9m$;`&CAx4r~gr zi>#n0iCT<8pO$8BUv?GDsA*3i)yn2(sOx~dg)@ri^*r|TmLAi^?^m+^BxBfhVPZkk zG-Rq>KZxx+Kt*SE9%sSyp=;7j;yP7~bY<-|6Jbp~r<~q&MbPU^B=va5eL}U9iJ;XJ z2lf1!0_eDVCJeDtZ9k++V9RhRcKs$-<3zI1c9Q-YN}uwlQ=Sb;31q0 z>!q*CsOXhwp^N_DxH)#^-n6to--3EWR=igZztMO=6Mm!n1HcVO!)NZzwSRs`+KoHI zZy)=N<|6mU;j?$+Ohn)hSTU|1IgS!7O^c>N0#_PKdckCN=;V+tJ(^A_&6R%PbcHNk zM*nmf(RA;!YQBYPc2hO6a9us}F6`xXfI+to>Nq^81~OYQu)-L5?+kHl3oXsEF122DEIKlQly+dIn`eMm_|En@UE0la+=ut+$v{aG~J&($+mG zr0g{ZQO_n^zC0-tYP!r@Xe>eI{#&8v`e4>Eb0R%b^%esdd;_Ai*7yPyUD94NJ%v)= zs3?0&pzJ-SLC~aIj2)jsOeb4|cg%x9r&v0`;%pldD zekhXb?J7?S?6!pi(RsO_6P^(~#FLs{3|$_K;^^j7A=~e?72Zo`y|W(1y8S z4^f4s;7vv%ps9zq6s|uO7+NeJ)UNknJHyZsPrt|#`0B(8o$6V|@U_K=YL*Oz`Q>e@ zdS1txl%k4D=!(T%rHXr(f~{D_gsmiz6=cOC<5u}(k%u$PL~%78xUvLa@BJ~_hTQ_1 zf#Od@B<#=hF2{%ARrC_So-G3~em(2#VDkw-B9XYB<@bGoEtP$TPp+b=j$ihLc}L84 z_&5ojkN8msD#vCZLkB%%HuHUNA)EP>`@k?44pl-nB0l&pBXzOsh&)LDzz^vQ1&>wS!7B*lJ;EtX^~UEz5kvygip$y~}kk7l-z=k3PN zHGn_M?h&znv0ImQ=tyYYy=yaX1n3K_G7)l^h1A?AhtDWL-Dll|BEhbTYacAaC$q!M!$+S*!N@cy5nQK zUjG4m#i@^-r;+^^lsxNm78X6uvHr#upocYQ6R$k1&bf!*ZUWGw{0pSi991i7ZY=EI zjHRp@iPctDQ&i8sKUOXKepPigp*qoP&CHpyDbh*Y8$BDc4x$H^m7suaCQLE1X_^`1 z`TFNa((;fU6OmDUIwnG2;uOb3n6X*7@3W3$OS%0Ix9=i*Q93}fQbz{&bT!W?oa}hy zg+u7-a8DJ#%*lQtH*NDaob_8}-E42XWoY`2q(?(uh6Mp^`>^vLUYU(73i9MkzVA7BOHt%PPm36ln#u1z(a`^!gT!PN`B&r9UFOz z!-yIfEM0+>Nt?SktVMyL&U8|W9W0?{pd_LQq+6Xi958MAUKY4i2^^*hOh+JIM~FBQ zf4xGy9tVd5iW;*V4)Iqsi@omUufxP^2Y;oi(7BaeDP%)hucdWc`Xnt&L9-9?Llm-f zxaS|J<(YfBuhsoTmKHqMAMg>>st$SB|0eQZHYUtnb4e8W91d&7TR)DbdXdDDBL&DTD$ zC1kt_){RBiKCv(Cg}i+w?%K&;^hU^j?Gp!qltsG;v?GRxzo_uAneea>TYAm+?o8H< zuLT}h-8l}7?Z1#=H;$7o^}t0D4)hprTKIYh){Y0FH_MzA$asOC1{}UQEAn}Pa9~`A zDG)a&wLh83u@s^VVd|bUtiDd!A=03UH}Z zpx_lU%T3)vL7M|Jw&8&C41P5o|GQl=TGs!aif&#@<-?5RH@KS#9T~wV;KHl!`6y)6 z-AS~~Iua(=$L4Jm{dZ>3Ay9QFWURR9i_qqwpz42x1uHy9(0%*D6*#bS)1t#6`_Ly2 zQO~|cz5}uDBQ8?N_~GxO`yO6&7&LH{XyE8~*Lzx}_lCp6dv4Mo(Q{qYbM)2oQ0H|x zBK1d{K0?P~y50%*mbUdO0&QNC5x&F@RQm0L-F5GC(eAqLo-UlGX5Ce8QM!wr<&E*3 z8Rx4wza{q97Nx%qGj^l?V(d1Qv)dl(uU-Ey^j9@BQ87+ifH%fzSg)iIt$Q%cFJ!~~nb;{B z8qwB_6*I)@@#-ToD?#nYZszi>@p(QE@QLIN^8)&mBWCavI>2q z{ZSr3sNoZRsK!>D&%t#+}ADg}by2dHwrBjnvGg?BoA+q|$)7Qa)>Fd86U*$Mi!x=L< zeUbS9O<4*2crkpvF!%opd_DZ#(-&jJ@zBQrQMg!D(O*QztKk5};b#=atIe4Guf+X# z0}NM!o*tcZ0sQnC*B#~;^z^ae3iQ-R0@!L@Njp9_(h(^s*son5Pfs}TMuIPkx8|S` zr`|~Ljm4(%D%5%900=T%m;tY34A_a`YB(FNqGMGbxDSIdn@4)bU<*t=K9M+8_%zJ~ zt8g6-yQ_hMep!NMpMkwCwmsf((h)nevnEp}eFQGFU4J7jOi27IhB|>R&y^rezyk|< zS^`}_6oi)L5k>JN&ad%fYV2zAKSJM(NH7crt1XeGCINuF8e{ii3B*qQ6zYlBxR9gLfCIG9oS< zoeq%@9`%vCR2FohBX@4Zz_G6NV`%N%OZSsP<=FooTy=lM?T;M<4rqTYkM_sX6s6&L z7{RbRhp+=8G=^!o*!jpB-NGx@{2Af|`9?P>zUvzmU)FN?=pK?h`{DDtwc;cIc^_fl z&}5;HW_nO6^J47{FKVZn|49*PGC2x2U1Y4ZG(Vf9vkX7S*Xi@IW5VhBDIr zbU3ZM$G-nhX<47)-rMZPeVTA*2ULFi!(DQ^1&4BKc(9Z z7Tx*n^u8m7atl)#UGxWmeOJH2(nAH9LRs#icu>v38zN{pCEV=VOv(1s>wZ>=eJPnS>*Ebn3eGnyjlM=nj5_R)L(qPK_Acf>o zQ}8gz!)3NJml3K)8F?BOq#I22>SE~WBITvbp{bPF>y+66RtRELBZmx4w8r~@gGuQu zwhkAK02N;lUSFd$uk!LTST1NED*@Gc6<-Rf%G9w$>6cMfU8w16p&w++qtXHMXCgxs zlQ)C~NrTuU4cR~Jbh6o9YQ&og4f9>Lx=nBJsEnh z@9I-2;Yo|O3rJ_*y~jga`mR1f8-JrR!cQ(bfyKS6i)D#!r&7`fgXR?(NWj6v#QK5>%eKk-#NnH8A4TG3gIpSmg1ZU6teKsj9G9+5og0w#BA7Q301Et zcwI2t0^RveA|>!Eay!TQ49wO+6t@-m_FcVNFk6_?Aqcsjr&7BD1$Q_tMPWuY!sWA4V64b%(k1-K!-?~ zb7n(qp1$wu-GbRD5ofj?sOcWY_EBb|ih|j|bHr?T2m)n%)}`V#x-kw6f7wW&Jmow>;trWfE z&Peb0v_$zKWc1>igCb1g7z(d}vWSBO9ErQw`~UE$1yZ-&P#JQ5zY@S|`})&7uO zU+}8Qxh<3M<%N0CBO`ztDV!-lB5XK?jfoE%MPZ}j!^Tk9q(wh(;m==f;?LiA@aLZo z(KA^!L|K^>C!zct3QH(|5``s{Kb^u*J{x!^E&7#=X5sz!h5Y&FrS!Z=`51Qlm2V!P z{Hg)UUo6UR;pP2mlPJGKlz-?F%C8xq{Q3dP_YYA1YEeF0Fi`tx;Xv)@&p$7v=SAAT zet_~H7@+(|1}OhgQGN?A?^l~d`5mJCLzhtg;{%lcD*i0Joa5wX-Y-#j&KUkaT2zjV;Kfv>|?pyaB(EOZACrL9_oQo6H9agf1w zMpKMquu%^U41z4CfhMrC9{@Rg zzXNg>j_Chp%~;&JN{=e-aC$7(j-p46b`0dr?QkT>(!!C1WW1ALOAEq)_;d$5dy!|}y)t8cYi^zME$h$-2eMsc}fOfrT!6RCdFDUCr zwQLxI?{O`YB`%Kc2PKjzs15x2SRsG@ZYiFW=L6cuqvens!OLNT4J(Jq43|(&izsK4 zD5pb|b0}I)C|VB53%ndQq_A?BY;XzXNF*ard-(IQLjL^SQapKkjz-HNxqz3$1`bvZ z$p9DWpAhnYutk)!NtDyUpFcW;r#bjC8O){9vcOcZPrLqWWNLP9r*~5GYqVwUk7!9? zFt`7RR#WDJ*R-uKVs%y4UZvGo3SdtD6yC5~5U1b7My6Tn3w2i9L`KzcBS)|xT=^dh zDuZD*qJ13~md3*H=_*yc;VkT2Tv$2_J0BM|iiJV%uS%PNu;I|JsDj4ek}F&_(veoW z?zDua@$W75ej&yVx}TzV8up4*>%Zvtj&XhcQe3q}26*ctEb#*oWKjxp`rzxGpQYiZ z@Cvi@L&6{3nns5#HtEq%?DG1U{vG`SGkdas)8sN(IuyWb&y(`^*S9M-UtsG2IuUm0 zL1vLiOFXNmg+;&@_VS=La?k(Y&=5h>+ULIhR&0NSPjv6Zt!Oy#z9tX0GWWWzFT(0_ z|Df=zjr%ne*cIO0xPJ&4f3ExmqW`J8J9M8zvp81+FP7|qDbNnxw?tEun~4B?zGs1^ zKDRjZ-Z^RerwBM1y3eBV=hl$5=t86q6_rR^$V_y(c-a;H$5-9<&nN3D-a@e6Pr;i@ zr1$YM`F`37@EPw_pggfeYga%F4JdnE{bZGy)L|5@n zsMFH8cL=K;>h*Z;9Xa8n?vt4r;=^kLVObUYV4fQbL3zg@?9ELGtm5`rl&%pC)`P{(9;O; zrJA1VWAqgM`pQ%zu*gY9MW3K2*tQCKQa*Api^!>BKDKk(=6+e)HvcqKJFe|8_l6>F zZ@AC%#sa!m&-in~^pdoDchI$a+o7+qjYC`dJYDGk*AOb4bKkKe|d>Jhqn z?`kBax4LKPuloS*{L{IwDExO)$-J+7_XNL|FY+^7%V+=MeO*?!7FNkHD-Jgv7!-D# z+YxE zop!@Nblshyq8XY{XtPCg(r);vuDjR%UW0DRX6X1vYd&L@8r{Y!^|r42NNDpC&3o9Z z&~={yWopib-|pLPfBG*1@&R5yus{7LUH9jd->Fb%Ngwp9mcFl8KG#=MKKW~8Q2jr? z+RnnV+)7x;{`m}D#VagS-%O#;Am+L7KZpVU()j8iv~mC7@NXMm)r5Zueq;o>3;g-& z5ZHOUUtm;E4SkYRIk>Lt)A0L^`%@|KSw`aIdx4QRD`s*&74sVu^RhsNeTHVM-GBz{ z3l-@#ui6d2)O8=mDJZ#OPiXV?z!18o8w}ylyla2@RbBT-p(6`4pMe4UdKelQRxs9< z^qsLAeyhVrBY@nO!~f_*N(Fx(+VdN96266s=I*vX1OLas8~A!uC8+PfsirpAbbum<&j)k?a})ygMnU515oZl-Hx&SFx2s2)b#M- zuSkvDydA^^TEzH$EE|pfr83}SrdT3PKJ`_xdp^P>91$$QK6=C@WgB2iSCp3 zE|Ac6nq{8k+|ZfT{UyCV%UFVq=G*L_>p^#{E?GHTd-<6eG_zgER7uxAU!Uoflx~L;6l=$U%L-i$l(%NgmE9HhJE4Bp%FTBHSCLF z>KU6RJaKd!wZBl)GtCzYJ#R^%XW9*Bb-;WAdXA>&^8=*NFM?(m=RaDPM<)t zL{2HuFYzazTIeoUT)H(H!L!?N-PY_j^Mth7ZB-MJP%LPb(QH489r{|7W=UH48O_;( zbT>Owex^|Bc6R5hLa*yE2~5B>c54c-uL82|rJaf9?KDREC*)!J_yXrQzv?0ZV&Ip_d8sPe(-P*Ikq zteE9m!^Xrtl$Ftnb?^F*PES&McY|NL<;Dnq%Sv zaJRh@?b(AGU$1#_Jvx_h&#uts$(r9iP9hFa^6UhnMa(alWWaji(YW%p66bv0!5;ep z>56?+3sgtn0}sOspQUJCVc4MDi+I`hb(z*!~764_-00uX~xqCa?|BDU+V6Iop6-iEN2kk`DAA zE$k4oRQzC1^17NoXuMa;Q&+~xQ|q?wMcFZt3bnpA^{amaYUrdF@%go>Kl*`q&5PE%f5F<{4GVgUi#4Il1`FS~PVeBIz0d>#-%7DN3 zkbM(??v3`5n7$9Pd~l-)b>dGz79`G01~uD9Lgv{{gy_%J*slVucLJhsgvAHKR_A8g z*HHy>iz(q%yK!#pG-E)>U^a+(qo1QjZr011J?!u6_TRvN(0+1ua6WetCvFFk(pwl1p&@z2Rw+drmui-iYJ!Uv7aY*Fy;j2*2tfS!+lt!V7A1XWsUrSSg!y}IMg1{Rlq1*El}-b6BQ;dMW6yF zLZC{=T-AjxRL@npmnKss%tcY-Wt<0z*;kd54OQ@KQ}9&JT!=U(C=}Y}rJIIsGj;aJ z{$Zxzaa^Q*>cp`jw&~Yd3Qk+JbGDWiyu#1vzGBq=JY-D!IPkh9*cUv1Y;WuNh<1HF$~0A< z=U->il(iLKE|<2CO1mvsb-8?7w)t{hz!2FMoSjkd@){j3p}aOvEgz;!IOZ2VJ+kn) zsd`=pOg+*~1+T3B&=UO7HdNl!)|z+4Ajx#|RnXL$v~z)hpRj<|P+D}xU$TXNll82* z{qBYRnj3!~N9$G%T3BckVv(jLjicSL1bx&_rS$byPgW?~7y;S*QdJ3JKY-Z<*A~1~i04ORz~cW#ejotBy{s3aYusXjRp(l5SIY3Am9+FU2dEi3*y6TTEMVEie!mT!~70 zcqL_C$=}@E7kRmL3SLsSwie()zpbs)72j-z*ykyE>ALguXd3o6dQjC)qA5znPsA#I zl2`nssr^2RX$sDkOS4}$2meK93Ld=M$f_N0583+*eV{Cc{nm1gh8 zgn0?gj?qIx`!A*ZR?u5Q`B3d}v|Ii!luw&=3FU86%HK`qRF^=4Q4!(Wv>$~fl>Y~% zd<)7qy%c4EBlzl&sq?epX6IHSVN>vRb2i=YsK$S1Y3E_k|0u16=;mv>QoI13K6s3g z4)6>309WAjf)BQHKG;@Ta5@0#CH0zOnv|wsQW`xDxwnrVazC4V-obgv=dj+y4EE>6<6_oQM~)K>FxT4~#}nJp-YzQyY|1>dFOt{p-{)w7wL7f$*YBfLi=0y(9g z^v|Wh&yWqpSjYn zu)~CFCQbA?8*{Ke1x>C>(M7ffn=^19V`K|%1RJ7VPv?b8+a7s~*B%I`^Ax5S=^Xj` zB(mulj!j_Rb$d8s6_vN>v-M-BKQRp0 z2UnPZyr2kfH;?em!#?-Voa)-w#SX(+_3cdoSUN=w^axNdCvbtYWaB>R8J zHZZd=jG2RVth`{EO_&Xb<~#w5;lH zb3v}QmCn?%4|2Vnj^w6-t%084>D~e)rVFIfEy26ejFH_&py|Q07@*84ZRqv29BW_rG*ptON6tB3ip!VKXVkW1=IjpMyvHaV5W=?te}LYca#=%tUbg= z&9>$Y!c=Y2NG-_y`hCoK!3vHxr1mDRzJfs zG5dUV1Fe2&eR4--xvBL<;%o_{I9ngwv!!iLd27e{*3$;<{r^I=+WI?0KjCt$=VGm{H+jgh1w0Y)&@+6H=TCF z`~1y>Hv(Hhg>}Sywhr*twnnb?+j^%H>Uod!hVU6EQ`?Ui_S#ygEyu>;{m1Z_ z#QT-;aMR#9RVJZC5Z#IiC02@Wg8F3$o1KQE;J_QI=_nT6yN@chD|FKI7x^OoQL6`{4`n3g5O-g`dPanpI+iy&*=RN z?f1_Z?=-j1cyB@qh`%)WnsGsC@GE1LF?e=?sa<~*!IkFhb672=_5ArdOkADk2K#R? zbc^$C=HTb%;HSmG4~>yg@8U3IimBl3z(=f?mKYZsmxwiLRGy&nwxE;fNqJICdjIF| zpBVTj2L6eGe`4UD82Bd!{)vHqV&I<`_%1MzB0DbGvt$@!V9nd)KZR3Ow-{@Y(7dQ{^n5Y?)tNVz!nWt4#9UV!|Qc>r^*4B&0g=2{cgG5ZL5(R+%=B+VN!$5=~Bn? zc-;*iKVnfdSDmcOs=>>0hs&2EH`rFnSvA?iq+!F5M4l?w+MM+cg3FE-He@E>?Qqq| z?poRJSm{S~SQ)+vw-!yPt81v&v)5cZ3JkcGyWCAKX@#TOkGf6uI(!cA3P<$aYO#79 zHngSM>U06*7kHR;Q;~VrRpW03ep+Vzp!;q!8#{E8@N=Nl^1d)~vQULhetWrBDHsu9(+{`Tvfq{BiM<>4yGlH<*+0@x7I6MJK1+H1^+PLw>Cd9?O1Sc*exYCDlZRgDZ_6zAm910B zK`}I|>BY6Pns(isPyI<@N}kj|gnPm_^%3Eha7z6|eMJ33eM0>}eL#38ToaxNw}el^ zA>oa1Mff3{5dI10glEDn;gfJkcq3dX{j1Q0(yvN?A`1H{?N;bWp%aeP@a7g$e z94d6I(62(b3cV_PqVR{p7YaWp{e!I7Pw9umcO^XWSu~?BatQ8uWgS*_eI{B^X=S_;=V{R+%2$l+c38;@@u#Z)3Ny=_s=R?mEOLw{dr0#DcnA$Q?r*JJPhl{RMga`6XCm?BUO?2m2zQz&Y@K z)f>ngt_ObeTYZtcgnJv$%ivPQll&KR6j$xscqgZ%qx=T=%=+r$}F96Wm_7<8UvupNV_`ch5s-B9Foye3-j8 z;s5@|Gm*t`6t?Q$&O}=9d=AgEa5Le*@#ozA3{P@3Do3zwaMS+%OyrGUpNaSmo{6-= z{ZF{3;kLoO3wILkiZ{+gu7}gZ&49DOErDx*TLae)_e;2E;X-f+;Eur!dh<-=TDUwo z%Aef#-;Q^40lNooHQWPmkHS3(_X6C0xcA_`fE(O>CUOjoI+u>|*kHBq)I}bPF zE#wWC3s(fU1a1}FV{k9SeFS$k%9r78g?k3>=K1F$GjBf^F;tz4l-+SI^31|>k)?~y zMXtZ=TqJk#xrk&t7uk3Zp5mvB-^#qH$XSX!rNmR7iae#rP2&VWC!ST}=@QS4;`x+# zl7%My$UK^U%6y=duhfS_Ih^9i!YSd(yYgfiQN#qNeidmcDCtiDDo>?e%2TOd;!}xF z^MB%xV3b~wNfbEEt%@8qkbFe(D6Nt=$zhqQ_$hE9W++)obLU#jR%2ylNktW{qNZlm&|{cH>yzAS$th2lS(K_pAm3P|1+@=Q5TzVSv`7jXHUbuLFuHo~UMKBRZo>O408vg`qKsrCdWz#)%|l%d4J zGOn6MUGDI@9Q6}e{q44To2%OCs+01tR@2MWXsg@)LoC9k%Km1LL!N>4T@Ds-wLXWR zURDKiSmhMI3+p=aduRbTRpx1Zz=B-ucG02_ps5_2R*%=|@{8>GLR7EU%d|kmbC}e` z3LHD5AHJ*oEA=xQ91T``ptjcGl@Xy4!)KeBB@>o_+n6mXNvVYna=5%Mn@0g)z3wv|p_o;(wD zLoCYQ#Bg9SF0>YxS}Iu_LtGr(mBza)E>3=2ocsafOpJ>&alklvME+D2gQ%W(&5wWO zl{YAnIW+ppq4^4QBEz}I;cW=`3Fp)UvF;y$&LtB2R^aairP z>gBp@xz_EKXD_q)=bA0D%k7t)4IWN%P)s#D{Ns4v$La!-Q z;c1D(6s}pGBR8s8ObNEqbUSHNm1>=+$FKUREB>U&A+@9 z5!mGRF1Oa${5DlAn{T=AZs!tK8E-Y!Gf&FW8*cH*SyNV$^0Ura9Sy(?zQW=4Io&R) z-eIftvC3fP;_>SP!z9vj;+>M~4f&MG0FLFJg zz3~xiosb#$Ob#yd)(I(vNQHTaNnShWRGtY&fc}|OgI@d&HjF8<3YTGGI&7jSRv{|h z=|W#Sp(ZC1ks&_W9o`(d4g*Y99+8eZj-VP5i^E9AjuD8u3o_MIc|2ife2$zou|}?O zHb9%6Rj?``+nfCkh@X_3NWq$Q@e~T1k;KM?Ib=LlzFW?kTn`B@_IHWs7L4(Tku5`t zB&6M%JF!-tAf8m4CTQCS01U%`%t%gP8d77wf>6X1v;h-Jt<~@L+v=_GTHPLNz;`#b z8pBw$pkY$2N2+nMj*{of|AMN^E=LoDkL8Hv_XZr(D2U!`oL&T(D-h63+Zs($o!8+2 zZ#wIi`R(-qht#yp>31OZxGc{&G~MTK#{R|(xz<^aU5RPzS?#XJ{DAU((_}WF!S8ic zyS;$d+w6||8RLL_;KrA|TEbSx-B}EM^)*Z6)$;U}Gqb?|s!(874Wc-8h%;`QjNJ~8 z)3yA+TpN@r&7NOYS%rR8)kZ>5Iw>pH;E=$vfJtjO)zp9@^;I71)Swx9H~!|*3#MDa zU-T*@a?Tz#fd-Ei$t1VOugC1!Wb@YKD(^I@Mgx3)n-}yh-#%}WHIEOT>L5d6(B%HH zCR_W(n$ka(b)FcSlQ5p(xw^VQEn9?HYq3jZML#>xdFnBRvG6kRvBL{Wv(@_`KHMme zQ<#~A73%MPNtQdB_2Bn_L-yF5UcrzFBLg3SFfi1*flr6a;kDK0VvMMBk$}r`6;Z}# z`NoUEfHuPvX%Srv2gt<&v?un4Cc} z&Ymq*E+~>L3(Qh+=^aw#?G@yzB=h{a09eRbE9T!>DOpO4Wi-%3?v*M^%1ew@;$bZ^ z&YmY#mYNrEUxlU8S~kDJA}Jmoj1sICB$~gV%8CulVySYr(Oe=`mQ+cz%NG$7P==Kz zBPPr{N=xpP%1bIN3#v+_stRLSW%>Nd5@~Kp6|alp;W-zV8t0<&#<|G9a-OxcY>w*f zu9AxRQsw+PRRAuGdPGg31U^Ud_mo_L>Q zHddOf#;U3^;hi%_6~^u0W%R_dFj zl{djnfV(GOI`ZTHxl*ck*Xi)#%sv0HlALup{hi>FFacv9m>^=+8TE0A9IrlSeIWYc zZSX-N^fs^*!XE`LylD{%ULiH>7tiFC%SL4u&uo@xg~riAwGAlA5mD_6n``yT5?dN*wO9k;icjcYXFqS+1@8bCyX^?=CH zA!pr^n_nwiiZB4Og&xb%QG;HgGFf6;<2hXJK;1Ig=do48%E2Z3mpNo#wb$maUM9+s z3D2_5<#3P`#`IXW6|h!dCJ=a43V`fk<>RWA7LKt6A17{3S1Jt(){$Zjq-Z!0;<44B z6zX6_(W2IH$kY3&JrHfcrE_!<7E2Gc2{aM|t4*rWc{F)IcmZml6T`dWb4B$^Q3V=S zO|%}Osg~)O%B)O6r*TkTy;@elv~W|_2_%gA?vB2RrJY{pD+Z3ZSY#n7s`MdcFjA>x zt0^l)mc)GeMWo^=B1D|L*dmn|!{4JCJIFFvg}WW1}+<5QVNeCw4_@7d;ZKCQ8I#kj!0qM+VxFBIu1)y0S@B0>{%@wo> zs#BG2^=4AxTEPTZPD1IMM2iZ6&;_mWRN2Fq-BxIaZD4HZzMHKs4=G`oAy7<}jw}_u zUyTpyS~NF-gbCFHBALa9(8l8T?;Z@(am0b>f-+cwm^PHNF7-N8*i;xvf-FvghJ%1P zG8HSi?5rz=l2~YG5ja3-@|>v1ODQZ`^C&~IO@}#<%Hr$6ic(a`3AKrXQOPl$i0xz0 zu%e`kg;ti{RdP2m&=LR*12mCpfw2ofiom^;5ww{UIx)&YjR70f!Po-P5iWTRFu^K4 zL+y(JqN&6<%FrbVX1bY%Y+jcy!gJ*}*W2ck(kd!jB$zFr2|Jx;(~YtiuKKFzl&7z@Xr~XqU&v zQfPXr9Ui|Gst~I|mu$q1kHs6#iisN+;$yLnW03~rb5&6T!YTQMM+#1`0&4HEPVnhL;%C5V;w|tkJ*sFwnEkcM(JTG`|0)`X@enU$(J5s6tkAx-swGnqq_L(LbtT9`LVf+AB2t>%PD zaYb(28;#2egaT_=SXp#{6LT6P!~UvtEJW%PoZ%KIfuSVr|JMUWYd$nlT)jD!w0kp| zrjxvPLl2H6FQ;0_|4?oUViStaaf)V=R$5s7I~-CCmYfa<*ly^ylV#m6IYTi< zBGI~92udFHZY2~MM(mB=X{;zKEt@-4o^WC71JZywm@zsu)&6C8TW0ggcIb8?jWRoM zwkX7U47xa()XGpFz;MAK%>bLstuU6Gz&JDL?UH_qEeL>fjb0O2;>5{wtq#_K6c+7JE72rv--E5uA`-OR#sJO2cL4^y^q#Tl+h}Tjt}%%7XCc%fG=k z>$(1J+yG(fc#|#ySI4~+{~YA^$Oq>l55m#&e&PS?Q65ImXAt%z96f(4{QvuX9!5`U z6SbY;68NM(VYEYl#2-7;!RaXughZ51`H}ydIO;}nAR*S*vAkLk5~Bl9BYUTK{MW%A z3Vq;(C5@3>3ww{z4S$d2&rIE;bcXcIR63SFH?>yf&rHQ)`7={^HC?hJ4rP`8TlgZ< zV6ayDu_ed!E~RCa)+yFf*9x38@s?KlC;J0XjF;YampQ+n9=bIftr{!)90Ya$hc+J$0m5Q|}uQ zy(x#NR20a2FMfY&`3h^bt$G+GJae4T@Q> zOpU<4HTJUfGPu#^b6}U)>6deJbCoKxgwP**ZLux6C@&-+jO|BKcD#~yppSO&_zpV{ zgH>d}L!0-|*)o{eS?5qMH54L)t=_ZDCj2ClP{jcO26#N`843gQA$O=0>tjtBCgoCX zS=Ge3IM%}GrX0fnSM(`$Vn@}40RqPdRL*Kl@ra5RoF|rPn~ob*j?bzQZC1BWiRQ%a zA=#XuX}~IBl0$e!jr>uhuCPeScNw7#uXP7tAb~o?8q8}H)!PMpgb!O>rDY;1gefE< zh#B3{j2&_x_9iJysL{O?r5>uj9611l+GK1!$_TBUn6E04XQ$Uw3-nYG-WX#teH6tj zN(LlObGoW=5)s=B?9d{!<0Qigjx5@-hbV1ByJ=5#8pL6T-0Tj>z5qOan1gY_QXyq@ zFWHGPVUtnBtYK1-Q6lPbDEKZiG65ITvDXTPKFAm*N7WAghQHOGW=XEWRvOACZfo%M z+oomg5Qk+o;|;_tg4H=s4j7@y>!jg@l2 z7(b!)ocnwZA0unxV~z#tRXSG+S-x4EDOJW1cES|7CXRD5|FN;wkIpAPpTd!^B;{tt z0HQT8)<7BT2NO2hT2DwsCNGm~WG@&Iv2;paw8-E!f`JFipC5T;;Xas)xRfnsN z3^V-|4l7AtwC#d`*w|yn2EWldx1xj_{hN#xFjuIOqN8Wb4x}OAVs;u_u;f%7PxXwE zW8@yq!{+k^h(hIC^NPfrM1v#_OS|N`w*yHuwJ9XcM*|}Igt?T~!G6}2h%T&DT-FuX zjkQxK0%4>`Ea0+YEZ}{v4g`DGD9NKS$qc1mAk!>Rs5&_XS|NJG1=ib|(K!h*z355y zQzx%$v?T_5$uhv|@PHc;6V^>i^=oT+R8>*6UPmK#qcH{XxMH?v!E7L>V3;~l`)k?8 zV6+C{bY-w+xaD#aq63F)PGA}aOn5$+=3oQ^^>{IrD58kY=Wxi&{CVydxU6%#y zxtN@AsNLbMv$@H-G%~=1?M(34ivb2puiOa1-hew zwY6l&=xvAvV8h-DoPlv_a+?fE!t;}aPgKH*2C&TuL8dX_^n%TZHiV2U>u!$uY2e2= z;&)&!A5VxIFYDZpEEJPYCM3c3kk1a7p`y|d6FxvKSSolFT4Ac`S{Vw~{+SFA^@n~I z)dP{`2b^T;>L@H<*6}db**t8btgA;gX_t~LHA&&z;P(0Jn`PQC6yDp(AS~uLsQuWB zabp5i$0nmOB{tcPDSk2`Q~YE^rubn?hNVCy8Dk`bSsXRaKtn>Hna1sC7;Mn`CFWrX ziD9SKFCq_1h)A|0R;8ra20)A(G z?lM>?L6}iada#%2a%mYi38PoceEh7C1W6F<4sPF0`$C-C$c~&K15OfbihzxYo_1Vh zaf(T2tEDv+`gMg9$7axP6pdyC#N=kc*=*j(@<~-*I#@fP zK2Q%?2S%SDRj5t9Znr-h>oL{rSL3L)VNAC|oWyA>2}5R72QhUjW=DuK&`1QT{jgC4 zDG}C~tuvQJX$_14lvZZ6_{8-fU>}(7K$c=cxQ|c7u++w}F$l}`{#>qEVbFwE{Y)|_ zr88g3K`*imEGa9t&MsfTN9yPmB2gnCZYjhBc1$*6NgTI^miTgdxp=OSl(iQh(Rl6I z6hM>e-C2`X1t!p+CD5tEv>i6gFiVE+SsZyIKPL%{5>v=B+EPT9oK*2u2X#5D+iPUH z{e-PF2xT?S6;2;poY8w^b9x-iz6V~?w|z}R<_DYL@eYWr}#0m zA}aCkrN&*LS}Ti;7;QSRZVHe#%7LcS_{a9D6G zqVwxi*=Jipjp4SB)sJu^9=<^e7NLg!Mwe5!1?e-em;kOKO{Sty5Jh1# zNm$Ha50O~|vthhA=QV)>~MJ;O>e zWYFeQlb7x~VwR@UWMyarCx{atJ{s0s4z>p73u;5cK&MO*;5csF1?zJRp=={d7~<3D z&cGN7^#(~dR;&~;sgPWrP3sa}ENm6oldZmKtorK7bp;6`^Y6-MJ2 zFgk;_m|gKooOb0YC-Rimo1-E5v0UKg(MY^h{Cs*ym=VP^Nu!OGR*DH@0nUzHXrzGj zH+8DbpOB|7V8;}j5QqZ-71^NjPKQ`A8L99oz$OzkmzD z4Lx%{@&TUcI>~yGAC9(P@Ryi8G4epi`*CDTn&5YPaFnU8d4ghs3DE=Nn@`f?za9(J z8cFYmPaKSdkE^OAy(8+^GqEY*RZr5q5}U`+d)<=07J(!MNLiBM5{J6`Q%l&)Z5xx+yfuU>{_ z3+mngKcAZPgdHf^ZPX%Cjf;msj1#YHhtXC|h2br_n2(2`$Gkq&4I(82a5*1dcGd_4 z;{XV2euVhgL211cU9D%Q!RSS_}lYRxZLM46uitnf}LLst+08JTVHH6D7Sy?VOnuN^>bR6k8@T3b% z@DK-r^?JyicHB-8zO8o3;>3Lq?uUyKc6rG%rVL>n~$ zGUj2EM*u4~v zJ_VN!NFyH5o{tMZ@aY!2a^%Ih&R{Vt^cUk&g2m(D77H5Z72%SgJcllykcC3c&6ROV z#!PR{2%wf|ohj12Fx*n%gHE0fkS)oaRBtCNSQNs7e%iJIl~Z*)ZSTj>0L7zH0a~$R zbZ2DNzx@;vKk6}ljvs?S0B#%`#0 zZa}CG0T1jza7K}4aAJwL>4Rk{i~eSHw9(B6m|vLgg?EHV7@b#Wo)HQ8#3L8YuO!sO z5QpIoplV3uAXKW~Qfj-HrS6GW zL5ZJzE1Wpo6E zF=LLzHs|1Y*o>$WTa|}rfIP58M+PAYdE^>ub7VvAK>5_7k_2O=_qN_29Oh1 zWUxL_1YdRaLFj6n2(kx-Go7?RwJ1s~rFmJ=%s4PQ=dgK{m#-qISFe_EP{L-?(g}5x!Zzikd(moRJHLIN2gJ3DF*9V^9KFiPtLJ z=QJ-zE(=uGI|3NNtBBg>5Vg@XXci<=CCG=HH+g+*J4&peX*xxzw2?G$5wLhtadToH z)5xY&IEPM4Mc?H7Tp=o{6(Z5bDv9W_BX&?2lbGM3<_#PdRu_#95;UYBPER)k2!4xZ z2Qdg&J3v-uw-l)J^Vpq{%|CBIq%4!-*v5oGh}1OAl0u>0K_X#J@@$>Cj$E3MRh2SK&YLvxX2VjN zcErA~LmsHa@iP0s!}tqik0E`4%=J9;8VB1{zHsJL_2_z)j@wu)#tKGIMP+mJR4rL7 zW~sh>GntekYDtY*%y1fIw}XlVz6ltwy`t|GGwayY9+NYfZJ|;MN&mc318CfYTrDR@ zxT2y{rchLxc#|eotT}4vU{S7$npDy?)9`WAqS#yj>B!-#gXt`0cO|_lc~;`ctc;d% zx=v2(74Gs9%=1(kMKh_+DaIx-MQN-yEO9Iy0cdtpru7)3*NGa$G#;_hfrrs@l$t|L zgIb176l~NX0|2KBw<0NhOJWD<4T&w{NH`zTpTgAf+7w<`jhl(X(fI+k?X)81W%169 zsd{7eV;u!MW3fd}m8`{fI`y5f62Q$(H}jjDVq=@KvCSGD%S`Pf*_^gH8=!5$-5paQ zu?MghPD(*+@!}p^kv;VbGa#Ztm`r$JlM(Q;TSTIq3fjkhFj+|RhLCz%8O)S2DGd8R zScpT#5bK**7spgrN)lPDF(E)k)mk#`zjCz!W@8BBSS&QL$u+tP;dO{Azhb#Xv!gM@ zRtd!z?o1)O4wffhcgXyPkpIKp`@qLBt^ea^cXrclvfZR<+N2uowoThb8Yyi>+i23J zZAuVS1$6~cK@kMG_ZFd5K@miyTEtatPy|sy5mbd1K~+!$p+T=3RBisw@BN(fOtLeX zrPt^4eZ79K*Y`W^<~{R1&w0){&-pv&oSBjL+q8jQ)=io{LXH=xvHwKuyQ2o8Hv~1M zCKDVoj+Sp4;i`t`6lwb5BHYST5v8Wm8ix>@$d|KF2sB%6wPQ#Ky~7zPC!}^uM{fE;VF@IJN8X{i*I(g9G=Uysayms}#XO069W=F=GP-hB z332tKd8O`>?nLDVS4>zI;YD)i(G(Jy3WQs6Hhxr#Y&5PAts=T3)k!Q=m0=!wXFEF}ypl<`wtRU`C<=hALj+kmf zVJMkIq2ofEqVuE5K@Crv*52$cnLemXQAI9ElR)+|cSzvzmK zjJ88gsVZM;8r$rRIb+~pOeUe%D=MV4PEe~Aa#NzxVx9w6+T`|kqp+W?Y+$t3mt3s; z^)B^G*ZbT^h!QpZjGgwXM8hT&YfS!}#l0r# z2$@eURgsjMqQr(QqC*3gk-VsHK0#^xlL-YSL~mset@Sh)d^}BkUQvV+k7s=FT!L6o zc&^B#9e&~@oY7u_e#@q^A}WwH1|os-pHox+TyFI*Vu1SR8L0k6 zMyY?^LF%6`TKzNPVPMyv9G1@=^t&|d6Aq1V45W0Me#QqA!l3cJrf_J@UhB2zPY&bz zVd2oo*KnBeCr9a@95esqFuqe24vq3)piwRiG|Gp8MmaIiC@%&Y<;Fmx{1|AIBLj_a z42&IO{%3@1VBApiKO-In4vRPcGvZ}n!f^9HBc28xFv9%Lh_``>2b%vG=`e8QDDyw6 zuhd=`_4GHqo#4-Y`Pb|EAN|OGsJo$WDg4p-65vYw2H3|?cHuFH@Eh_I}w9XJj%EPAYV{It1g$EgRh*79`naRjiE;j zE}=K{qhmT3Z`#KN-Wxar`!@5=oO|LNydIVIqMsz+txL<9bX!I z)C+(Kz!G2pxDuEKtOAw+Hv=nyjlgPP3ozjEJT^y1@#g+M8T4Q!=r zJ*X#*#WgSom>}BqGP(xV(6s~KOQUOGToSIG?RqY-5?DglTDx9L*T7wL?ZUSO$Ke{7 z0ZeeW>r3exxDJ>Wfp4Y(tAXC}$cF*#`ea}eun1V_Y1b=(t-xkrS!BDOJOS=e2oKl_ zTnVfm)UIy_)&ZNz9Vqa@uIgxfmk!tfTmozbt^|5w+VzdV0B}1n8@LCU2NVZEZZP}- zgTQ=XSuDQy2TT~!t~UVFhPLaTgQ0gAIJ{lo0jwT@bRPnLiTFkzu=>Du zy&70I65sa&HjHZ5(~_ZQEbI#mB;osU!0d7Ddg4U5k8jrtfz=ZbPhegE@dG9t1pB1G z9as!3I~d=a0|tRXVD%wzKNNbB+x1dlU?S2-*T5EFLrS}jcgl%YU^Xxa%m;c7g+1vS zSVz~B5Kmw$5U&9gm4~{HE(?5iel23*XF}34IYA`VwGu6uvbG%=2~V+ks7kJM?B?!;lU=IRoy)JM;`- z;Rt-w4_G#;L$3msjPB6)5GHl#A`|Y1bm%RFM|S9wXTjgJ4t+V$GrdEv2POb_00Y2Q zU>dLsm<^1Xjqrga~GFdJ9}%mdZ}3xN&565uXi8L$ml z33SatIKTv89WWW#0L%b30r5I|(F!aC27ya~o})VSa$o{*9WVf_1D4E0e$aI$@&lNc z3wr?zfiZItzY}0DU|v4-0}Fu*h(8PZ>H2Kw2bKWK>G~Y#2bL8;Kd=(G9aw!X^aHaO zcj$Y8CFgbMo_UCGVTYawOuGQzd<2$U)S+hqD=&dw!Ykk}8~&d|KFx>zkC7k1z>lyy z;jggo(QxlUegOmCPQ3|OIIL4oItKa^I`tx8Sz@QY8CW>7Q}^WHdQ7LD2doZs>g#}o z6Fc=bVD_X=J^5JJ>BvrfDKMDTsc#1sW_Rj|$Kg7^Q(posEQLN`c6q1nJs$R6-KiG? z>(+JZyMTF*Lr*UB)Itxiq^?tMAbhn`k2?YJdb?B42L`sm9aspg1Xk|o)LVg`FFW@GcS0m416OV0o%6nE(>fu40; z`YvF}^IdxK$p~jxmtG0X{-#TB0jBNl(o+{A9(tF)6qq2o^#))oFzyt@XHd6Z3Tz$O ztv3SGMtAFz^T3Y-53B}O0Goi-z*gX9U=Y{{^o;M;TYw3`Ag~1JKNa=|P6k#2o6kqQ z4(irZFMz&@-THE1!eQNd9o*}Hdr2;>ThBNR>6_85uLOG1yY)86mCWwe^DacZjzzq| zm*Cr+UBGI5pK~MlR(zvVEP*_}%~^Un^a3k@dB6qWD^Ks%*8v;OK|H{>0w*s*{DIlP zrp4WQ%tg?DKKv2C1aiQ-i@Ws#@P);QC$O@jTc2?Tcwio|ay9G$Yz5|DjQHFO|GDr8n~}cD z;D0x+frY?2xR(JNfvvzGu=01LCm;TR1(!oE*sT}gx&*iu*Oi^HJFcsNmB0qzMqm@L z9@q-p0Sp3Lfu1ge2TbdRea?cOo^Cw}SO!c3)&X;X4ZwV0wcf3l0Sg^H`gUL?unpms z4C~Qbaa{&nSq#0Sdh}*s!ss4-&pAlnm>#|2O86Vsqc19e9Iymf4O|Iq09FH=fJNuQ z{oo#bGq5hXNB0-PZin^gg}}n8J$g3EQ3J3P7&rpg$nQ#EH82R=OV?>VdLr_z0q8|O zrA_bA8*yC-Y@vJwdXaBIU>Dr;j)ERsHvk)uf0Z+P^rg6N1(pL7G9Zt9s>|%r^MOsX zd-NLc*;zgMF7gLlg8XTi+oLDL9tqh!`V6GM@M!2KyXC;&+0X|p1Ew8^=YN2C$3y-K z$mjOx1;E0S@hlMR)v&Nf&jB`_(xcacuRN_sFUEDr={>p&cC226=ZSy~z+zx4upaJ# zGY~H9SOVM(44j4Z!rgOrkM21aa=^e&7#4S%?<1kU(7{9S`|5<*l$2 zFrmChucYfckPcvL1=sg8UKWydytR7!b+3_VBKn@Zz<&NMf!k+YoQ;QeIN1* z*l<7eUX5@cKzf0N4?=G#uGjbIldpl^4Jen_LGL#3xK8`9M-KvMBpEn0dhTgAY#{16Jx?1gyIgb0$RaL4MJ6h|>Fl`f~_J z%<{#|@+D;Y{L5UoX}3GIa~-|}$$3un`7blhWtbZw?s*`np9i%1bDm^~&hhy(jdP?K zqAmF4A|4aSU#2f+ne!|kMV-<|hvOf)IWqMF*1Z3?5F-p5Te!!JrN&c~F^10JjKSCNWt)|58|tczQ=~g@q>_ z+2FT>cauC0;ue8x07vPjgYu#X+!k=?rlc(keV!~IgbQ%F5^kP<1@(AMx%IPisVmgZ z+BB!Vp-Ww7+d0hfmAYiS_9C46bwNFf;(&wV>Oq*B!5L+O@;wn;12}Y{GLKNKsGMrI zIh|+IDGGvz{ANKe26M7xA3BK31D6OcG>?{mO8|#1TplD>3a$WLS_oGGt{mK;5Uv{B zN^rgqZZkNV8y+3PHG*4bk!t~0W#NM0)>=5P3vG;r^Mk7ZXT*i_a56ZG-|-YK%y0&k zOeBWVlMA;ka5M6pxB_tX;JhS{gSZlKo53xXjN(>;OTb+AfgxNKxFm2Bnacszg3AE6 z1!z+rvwWovQ#m=_Wmggo_djR|Kxu!Yv0^1dftJ2l=f8w*=hXl2P17aKRA2$#AO&w-?+p z@@#EO57LZ=5H(n}$@K2*1(?T=C%s7P$y~2JNc$r2V~B?|@>JG$Klozsvh4(aHuGuV zSAwTFKtA<6@JTq$1-}kFhC9|ebCFSJvf)$$xh}|cgv-NxQx-lECJXbazHWi88mt*C zr7)2uM01(bd8ur6DXbQ_$6#&Xhy4rd{%~PYzN8L7`Ku4=Lntg9R3Eay)q|scO_d+# zWiW?G#brL+l3vAn05z_N+f}lzES7&y3;BC0*CFy#0Y6o*1@))d&p)NxrSh+mQ$9oe zG{8^tmZ1KGO*ns(KGomTL&E8TpXxV)`X-Z~i=>3A)5=e1{Z2)?Cchcf0rDW5WKmi} zII>9|xMXlMNDApbn!_deVz}j68^0=WE#QtspG9`; z4OfjlGJP4Qdh(dVuBn3*>E8ogO<22%#5om?|Bprt^F2Xc@bA*{3thmupI=J51M~OJVJR zE`c>SyRazDd)eJcGuhdL!9W+*@y_GA(3@s^J0JK*c3uEo{>HxaQ<#O|62XOrxfEOi zxc`m3ZGbNC2j(z$f%AY14YLiL3*7%kI%l9m4}91cCe@Q%a7o}Uz&ZCvYOK5;^<*Xd z=E3hD)e{P%7IHOzF^o3&ZP|}75-{MIf%V$Il8y!NTLiypRrQLPr+cmpM@R|=jTcGEU%0d8gWspn6`v20t z@c5n>H%pM|j$a@Bt^j5_$;vmaqJLoN&ZDgKJB5(gt+;P=nckp;QtzZga-{3d<= z=V4SsZYkt0<$UX{W9G7Ko98X?TfZM+c+p`deDT-9$cEoV@cU=BDuP_?Ukqa-{08?U zj2)26{qnD+BL)Ne3i$n_bWl5$2Dv?uTSfWtr{gl`QoHs6jflK|SPoqo*yB^cx|Y%H z19Vb-t)lqA?T^|Is;>Agk?sX zkOjA7$TdJNq>cKcHgU0Cn#p#>&?B%%smgw=mE|+v`;7?GU*wandgxk!eND&jmoDcg zc5%w|<;$_K7n5gsEq&u9>SO)j7J&P!V`17Kx8vKsF;XI>A6z3i6jP(@n8v~t?EY%F zG?~Z3bHdn1j)j{k{=4nNqPWmLJaQk&aXEo9oX6$l?*9|_40b<&{U2-bpZ^E$XqrVS zySvCg4$9LCaJh)n4fg4`^u5~5FzJ`~Jk8Km(b{K!sw;cJm4jPG0ru9Fe&d2mt^H>J z;;EQ8X@TF{`}}f0eY?|ngE8Hr=6M%EmiEfgTx$4mw3|ckqtq-f+5>o53;%&1*$$R? zn#PA(f}`I64^|=_&CpSdeR;Q$E&ou*=&(AH5>U^um+u~19Ui zCSgD4RpE4*ZQg0GOWC{&y7JnC`umi=esv?HTsv>JwfPKq%NT+71UlqBSm-z)Q_TzU zlF)e}ZHX~eBJVRw;lBX;Qa4i^LjBv$J2=r0&PN1nW9 zmnSr~qhSv1i(b$j)JL%{npdYVO297ye`UCGY^oF50NbjHNe&yb_uh0$(XLr&cgsv{^ z^T%|sJY@YJ10LL8Onj zGZ8rNi&wwhjUXL?aN42 zxH6Im@2wPH=n8kw?HQ-sx9wlN7yMDY7eH4Lo^|UF4(>tvIUTR9%FNxJfG*QH}CD3r1=^g zvwV&CHR0Fnm=6hUpBroz*etL|GwZycNh$K16IW+IL|*+fR81y(IGH37+pO z!ltx($Hl;2UO?6hS<4EI}5(FA?lPsekY&M^9F%zFtB zaro+7NSX^qbvZlil*@Rg9)j|E2A(&=+No{+S@xcFdhJwY%Drba1}P_0U!EVY~jZTdJ4(cYw?P zq+NeyKlQKw1O5IKtY3WEu8$0{4?SDk{KX%{-+b>JF8b2`rO>s!8P9Q&t#LHtR{^f* zn|A#_)JcVwKhxM$3*wHhce51+r*nL%m-S;x`Z4`wv=jPkx~iYQEBAxByr0Y&W(Hd< zFz!aON)E+(?)T_(23gXm`rLl)-QNe<)d|(!k-i4#Oa8rGpGkRsOvpYlwi)+H4L;B7 z+uwQjJu`bsNF>o#&=?~D@k|W1TlUHIyLUOp>{FG=1@M~zzp{Qo7hxf|8Q{1WNp2~) zG;myWC07n^vV~g*F4e-(^Vi81ecQnWz+t&t9x?da436|+%Bz>7XSOMR#(WLQc_txW zz|lD!go)s?E&5WyEwFG|;PNe89=IhIZV9*&aG_z8f-43m?@Kt`3UH+szt!MYTDZ;N zD#3-?v=Ll|MXm*0m4yp}+X&7W2U488hhe;L;r!s%S-8pINMESmY;bEW+#+xkzfid% za1=jo4yB!!gDbaimEb77p?)`lE3?SegIjLlc7P-Mg@(}zZmEUq0!R4~Di?FO%8yVk z30x86LgSJKj`BBDE(cs8xOr3uaga^(!L4g=*EcXnWBX!o8J+F=7#ai8J%tt@P4n0! zUk=LHwj!bxc`LsO#`<%u|ppk;x`9e!+~bM`QWyIiwuz~23J3_Lthi3uMAuc zo~KU@ky{I{(BGl|ErhE9SBvNE^Fp{S;KbMt{Rqwsx@^Lqora*!jD}}zv4|eE11H$OT zR`-x+%d}%{$7-+z$}Wv98=%1vd>Hl10RgP?S#O~d^8 zXQ)@<;%3$rCIiYH$VRaox{8|dt`e*LP2>F&Lx!3d?PDZOWwaiC<61iOx5Cw*dM&j6 zINR(Dj`~eOlfIbg7@y$19XrC+pK{Z@f%7|iedS~p(w7f?NxygKi>+x>WuD9#8b8f% zTfZ|Qru5Z7Usgwlerp1h_s_$5z8Z6Sm)XZp7IlgrEo?21?bOc>*FIYGUGpdUa-nY> z-cQm@;}X00m71TG-DR(@l!|BpepS#nIkQuz=U{C0RhiPOy=z+s=J={K=NooGWx|ER z@uZ`DI=WMTJzU#lo_}&aX{T%srk}K@Y^MX}!inr%gm88&=+y7@gh@}Mxjf|CmIs>J zR^_1{`jXD*)a&i*w0u-5wEvh9MxW7t#2kh4e@>@X?}*zi`W7H$1mS)9$h^C&R4)ZlmqvkcBx`Q(eZcJe$^?#FW0J&{utZr+$98GH2=g z7Y?-7M|nqOi{4q)u(DI1;Kzks-K%k%;@1>rZn6erU(%PDf%t9g)ScnhV#>|>qQcC{ zmb1Pk&{w#%Q-7Yv)|PP}Ciu+u%e8$FGZSK}-`)&;O?W5Kq2bzA^E}kDFl9BL((1{? z{mi?a`VqJ%w6~pUY@%&D*gh|6FdiX&dC<2JZ-375?Vr9$ws`@QKwkhVs!5-(OP3vs z%3EGrUx8IQe#ZcI5mUX}0oj-#U3$IOHf(D-z1BV@RkFSE&qDuz_c{G|sI4xvFXl4v zc^D%%a~UXtzNF*3^oxdu(N|^aU$q|xhlw59r$VTp_o-zSbm>2g38&9vD)WnM>m>RI zk9O>+FhNoCMRrY|jrz3$@3;!r*0!1YqA+bxn?|}ypli{sU3zP{vTLpjVa882ax6yg zdJ}ha>Ho(3xJ}u$jD@u;!D6XsS>fH}xsiT&CLeAxOs%Y;UrEw8-Wk1rj zzk57Zgws_BUDeRF9lAEymuZZ9%=NPyXa$@`k6$Pa4%`Sa5u5I$iVpG}=Zqsed@nvM$@$gNyZa^4a4PE+)s24V2 zSk^?GVN~VIIjFd~i0?aHz3&5|d8q(6FE}hA$%D8V-~!<2oDRZVa7o}IB%`z59`_HMTpLUl$|y6UEIFBICdGNB1!~@R!%rrQX>vPtL*jhKJde z*J;|FUvtyR|HHK{<}qBH?T{Ix zGfI;mk=ycBm;NBm`_>{Yd#je2*VJ*_Cf|Lq2!0EH>Cz`tx$m_DQhXc1bq6}OYbgd#n=~B;DnOAB;m$R@^Lzd?95C0pX z(?kAoAn9TkxN2}8*xS-N{(e53ElF44Xw2_>yY;8{uS~7BEmP1#WvUpuVq&`Wr>Fw= zmUW{{@z&jEL&_BDKczGg;x+JF3_Fd%Ir&}Y#JeysLWhW)7jK07f+M=sJE!K7dk)-R zm+krI>dB|kXG=1Jgk!Hp$C z+8lGgZ1Vu9EV%o>?bgpBcU5-#8oE=BhE|oyBFGj%_S!yKlyf8R)n3hihGadWIG5fo z6M#lFK?VkXDXyELtM%{Q`T){}gYNAb!L@+9p9mRO=UQWJWe?m6|Iw}9X9i~~*Wy@| z7sRLAR2JogTxupYu^FZ!@wIK=45>J%%7L!x8@tszq%amg1p!lD7l5w;eDEsU4R?t-T(YC< zIP6D&Tm;GCAT9x%cTKnHolVK$Jm7{)DJ7QyE)JY4gj)bE#=;e{zNiqnrK~TMD+lL- z--Eexr?}9*5kKBVIxU2&1DA7OUwcP#+rednO9=_18C(H4BW+}tz2Ne}?I1aGy?D#0 z3mI@qJRWwqzgvIFS~k!a(SWM=IRkB-!@LiNS|e2+^P!{Sfo?T!lI0Qa{(&3CcPZS; zYP);it(6b199%(bw|+#pHrq6RtnCg{9&;Djl?)7`Wcm?C#t+>(-Rhdte}R#F*-j_s zqP>J%Hl<(H>wa}rOT*p?OWmY4J`aAE{D}ASTK&@QdVas1^8~Y3R3lj@E8sW(7raB( zYIkGK)NF&Wdx$Bm&Cpf+E6OqJl6#(V)KdY@PTKLL+5ry+or%A9Tb>0ow@Xeu`4XDc z9AByvFOLYNIB>sWd+>;@S%E?Cmn9Lhea6tYP#;l0B4X~pWW`M$;(Cc_(1 zUZ|~ZguWKMFE^9IMtLhXT56nA``89|*Q?#V`=Dk22tEcsFB&Kh+(zM?4wCnSiwh$^ z18xa$8z`lf9dp6eBK*hg#|hT<^g7!y2g(Sw&1<16h#-dZP+7`@dvZdL{xZb_2gPj( zxTfJf;rG3n>-Cl}MP1HQ)Iry-1AFvC?bB(U^N9Q-T`qKx`7fj2rB1BZ-o0Z0tFEcG z!@^m<+_FE4ZucPhSZ|Kj$uNGmHZ|T+x?e7&$1C|c0-}2&Y9|{@e zM;G+ez16KBWuIE1;_$@6>*G|8za_ zN4lD!D=Vo-|7W=TFwYS$v~7>@f{0YuEBO@Ehw(l7d)n2)*xv8 zjclKlhwlH>ITxyvxKmm41>>$M`6@TR)e|&u*39RzDhE z-C=m5aPr|N8{f*1;k<6|Co~*bOwe~P_HOUikEZlKfQc44R?{+^YT84VhdRilrgZC% z!Gr6cO0Q*|0&_X$b);8^+n44xv~N_8y^C=FeO$MG2G%-V7b=_d(^-C7l&Ntzb%?!E z$YnrhK6D1(?Uw5wuH6`UhK?0XJCAN zUbmdPcWqPlTZR{@q92TLB5;x^f=nx9FbyYMFB&qKu3{MhWfhRwTiC517Xg{a!sJ8E zji!8=ZFaUE~3dOA&zMQ^aZV@3Ei?Q$&7>xB_RO{{hHJ5tpQ3 z7Oe5cr~)5U&rT6%$p7$jQHm%|!3Kf48>7%S-WY|agKmskloE5QnSqxRQv@QAn<9=$ z5$C6f0u?9QCt0w9ePh(oDdH^YYk~ZWOGFOk1;5I_;eG<&Zh4t(@Zo54x#`_2XSM-6 zBL)Z+A3VD@MO>UBPDl~gq=-dGU|Da9&q)#INq6VpQp5$6Xr!sYS7js`{t=zaQpA#! zn3H_flxRd150Bj#b#h8fo|!?Sk=)QkLn&p%>2_rWO`1$JY_r(-4+AVr5k}HQGSXgg zCen=W)u4z9mxi=sG=(u0EgIlyJWM9aI)oB~3NjfoTOeQYA->~+vg7)3tYItM8qq`p z#$|XsX`wMJll3A)`s;$fmCL*3GpVj8{YJYg{qg#QbH3pZ%_#*)?Zo7KtgpkLpZfb7 z;V-mK8+lW2?)U0!2XC-F;!NZ4GU%+j74uPX(0M#`_TKBFJ|bW_farZ0s1`NYJlP~; zNBtU&Eq6fI4%p*3s@uQY)*s7Sp!Qpst}>s}n{*cHn|X#u9>~*mHC*f^1r~y zM%64TJ9WX|M);dW{r{+5f9O|3+CbBoQ#;nS`$rp0cF#E*CGi@qfgrY*JZRiY>RZ8k@sb3r;N|8_v<0#}c|kkWMw;$>G( zi%g?9y!g&mrL2cZi%}*9_UH$ZzIa=G)_NEfMqdWSuNeCF8u~uAtw;NF{Up`VCvZ13CbB?!=jV=$>VJ=JBM3e-y%(7ji z#YgmQWRo)ZZ^OJGy`#`|ysb^FWqW=Y8_Tx13Hnk;^ynW$w`+v0K8uZ=74}6SL$1vS z3UU7leX_3nJ~F(G!_*b}wp)23+xVMM|NCcSve8E9Sc~~iSq?vhj{fD^k|ymB%V8Jv zEggyP)RE2avejp8&%?d9JD=J)`cBzK%>R8!Hb1?88qMvT<(*fuD5!R>0(!Fi9Iu)G z^LVvDU-4*;*MPqkFKVljFF<>VIZ4_6{&=9B{jFs*Z2Jp!a@@D{Ld@Oa+jrLz5vJbm z?^<{RbS(+s`+lSgM+|HOSWP03$i3DSSYf9Ed% zfBEpY9Nz`}Y=8={Z;yob-T-4X4<{<)Do7bGv=YRD0S2eFbP^WIO+mtv+ize^QuolTYcJdj_%BD9$9GrJ^vHwKF&SJcIJXzH~;Ruxu`j&C!Yd!ijGLoH5EOQMb!qihrM+tPTeA}FkmEg+2S<^xC)o@!2 zw^Aw#IEdQ}&Vz6A&TvUea*g07<9obDdCh=R3%FEpxm2%v+iRn}MWybYfY7Eo`?a{p zJ9rCRiFJX}0# zprhqWeE<4SE}`j-bwKdohk z=~pu-FUz1S8Q(KL4d--B#&0dS5^(nt(Q9){--Ne^hTc*KAi4won&I!MK7SBVn-rWI zj13B^4AIAEi@)zHFBBL0Hg*v>D&}<1*gODk3Ahm$FWIEE-}4FYg?Y%P6l3OGs3?KH z8GCtLjI{NQ&5tvN=7_U)o5Q%mM&;0Vb>J0kyc7UsI4EDYLub>z9{pS*u=o3R+%Y8F zrorc}t~tKIN;g&!Rzzg`(v}UF=gYWQb|BW0D%1aW`2W4nCZtZ+9hV5yqbrVn%PU@S z4EiZb>~I{mCrUi+obq9m*zNScA0^gkp0}e!h0F7Ll-TLY`Y1~L%k6n3N<8Y0dI|Dw z_lsZ#xbY3P4Fg_;A-?up{dJVs8ktxdCDulHe~A*+QQlXhL~YdLaHTK@i{&U>%bk&U zF61`n&s1O~AEPR6zi}F0}72ib77Gk~E3*$cRJyglg74IP2cQp5&Sn+|@ zjkvxS;r=;R?1=-X- zgJh%gh=(G?BW^c(q9@!pLkrn>ilE8ga%W`wK;&ceD+9$km;1qi;t|(MMT&P^n-Jo%2zN)MD2q4=ri_Tg>1zYs|A`bEBE7*#@l2!_A3J;{ z5`#kWzhW% zkKCQE-7`A&`*f{ybo8z1+RbC&`0p`s-$2#a$gk41`mwQJrfchxqHjspo=Jk*`^Lq6 zmac6X7x_uL_V01AAE#^YkB`17U27hX8;SQO#C?>m?VAw!Ub?nE5c@t99u$3Jy0-ct z%&zJO#ccz9aO7L*+RlSx-%QtDI3)UpbnUG};P_;6-0Pr|BVSF|R!oe21<6YBte>IX zk>ajO*H)!;A-C>4{DjY^Yo(Kyg72DqiVz=8@!ors_T?0>p052k#rtcz_WKm?kLlX7 zsornXwcDq9KTp@zO!a=4u5Fm=eFq7c>U|?!du6H@Dz{A?ghp=au#abGKTi!*&(KyM z5sLzL#IVn%Yg>*8d@^0zb3_`3NNK~~o36c-7I$ozW;H#k?!)K(80)HzeDj355ZOzo+7RI8c<_v1%tRU@KS&(wYyF$*G%iIGoC z*M1%4DW9&D`~8nh*J}JrF-{#lsCuUM{piFRl;|;sV^L_#AV@Th@vNDtHI2DpFpAva z-fxc5zCRqs+jscz_37H3lLKWlv{xp35%+DAy@+<}WFNBfp(!!zrfbhl83JRyJ0<4c z>Dt#*hTJ_}`)vxete84-?=)meVA(Y7y{U7Bxci8azf99M9TB*0n)byJLxuSDh>;&o z(<;&epB|}gPKy>|TUvDeH0{&0VJ}Y8wjLS%(UICmNA9FCHO#&Q4IONHuO_HhEOi{P z<&5k+RD9@6p{8NnuZN0ySJc~+M1y9Ie#;;BluMTt9r;26q@i(W6d*g9?ace^22dUzb zgqSx|#g_@uzov>+!{cvH6^{?cxctEb65mP{FC7r`Myl9*K=ig$@vjkMKA0pv8F5rQ z!b>cC`7p8kz)g!IEoQpM^+x6l|`Uo+U@@wlTMgR+oiC|YQ}CFgK^91)6J zN1W~$Cz3IS^K9~pYXc8>#Rdn=Uf~>5?iC%*FWpu7i*?Q}*o4a$MnvZhUpqm3<%oOiAe1ZbbrZx#&eKuHwBf&u7x%g1mK`KY z-QFk0i-iedEQ;13*`JhTk?`TsJW8%YbMZ=v_Hpv<(Ek zHF9|OIPpwW+&clW%IobMC;sIH9rO;z8^^yL6!&64H2Azfj1xP2pdXDM{@FNjPfXk+ z0nriT{bZb2F&Ol&!NcDkC*F#UyC)zv4)HdO6LmvC-xfFg>2abnF7Aea_+qHHW}Nt8 zDCqiO!|xp@9*>X1vv}nR-n++%wF#hqNf@pti7$u8eKtY7c7V4dNxXXi=yfB8w z663Z`5PK87UnYst13~XRaQItE;`Nbn&rT5QM|rm-iKj<_F7pq6B1!z>k6Q;LjrKm0 zB(59lh1qT$>xGq8kM+VR50CZ222YLkBFQg}^&*9DjrAfyAC2`Q{$GvtBD((`>qYF^ z$9fT&82R?6CwD`j3{V`hn z;PdW^Mm_gJ%k|M-c)cx}3WyAEFvcf<(#*vCVTQQI9DFJsvWalFeR9tQcaV-QNs^Nv9uM2qj8?vJCz zPtJ#s&O6*+I&sl9U>Ssd^|WX`DUn@a=Fr>Kz!>ir?MtF1OI#?v)Xa<@CyTxT^!{7$Ime?OBI=l@oUa z7#H2)q|;UM9X+zljfi|)bBrs8QTzO@Imj`J?v5u9{?g%?LnUChBj$GpN{jnOr-Llk zE3B`X2%jR#dA*g7N=t~5qvq2Kh9aW;UDs} z!*L4YjM%>E5N|u&A3GfHS>pV^rlWs3)E!fIJN!?2#Es6xZw88$+OWrQ)2zkcI8ao% zPbrN=*{?#x@A1%m@IxMVwMSHYZURH)rB!o$svS1yy}6=NOTBZRSQZ!iYnFH}ZrJl# zV*Sv#rnzF@u(%y_#m({b4)RUI6Cm^M@aIqwZa8oR&VM{G_MN$6-N<1t&J}l!8v6WP zQ9UYt1AL7dde>ZWi$A_+j%f4`ZJ#5)^~c{mS3EI#=x1}p>!ahF=ZKYKhQ2gMJUAxy zwK-zAu|x055+9F^{dKl@B5CN2v&HL4k$AOCTT<*#S>pO}(Qjpm zm&V0DGe^8PF8Y=^;^y(OFU%He$49?ATYNk|cI9mG{rEV%re(u~*zdE%^An=q%o2Mi z#MaFbD+19s%@MB#Vjs#99|oe|nk{ZSDE84TQFTz<16ksWgJN5$Ir_dVQ86)g`5du+VqABY_;Mmi z-jWhqnI-N^iMu09e3G)<0qV8GW1q|tI}RWEV3v4sa%?b5yfZoOWkhT8$Xn-#=cYuj z%o1Ct#D6$j+%&b?5j;}+GOYmKUOy6Rik}}j48*TT4#zz0H1{2|#aABjo*%WBgAJ%pA>= zT;%x)*Ez&>R9xk?=Y)jw#re2jknP@RD))!HV=edl-|kVp&JZGF*dq?d!gBt8EcpiXPeZYd%Jfj z(ui9>I%+Uk6^f<5C^Dt_lLQg{*L zWsKJNUd8-U#_JeYFy6vg&UiQDy^Iequ48`k7pi#GGGE8InekP|HyGby ze24LU#*Y|3W&DEiYsMcL+Zcam>|u;NL&YP3F^=&-#zPnnXPm(}i*YXF(TvA4p2E0@ zv4HVn#;X`_V7!^Jl5qoL4dcs1n@qsUk3h+-VZIGQn;aT?=X##0&3WxSj*lW{5IGR9jOS2J#4e1*}P&(`avCFY)@ zsEKCvFyGE-$onr>=`pSiOt{GGzlQxaTKv~o{Hxxo?`Gb(-aANXHm+y;%-8vr>w3P% zDy2Gd6|fpnIYq@Aw?XoVIakFOyWQlGz=%60c?8%!k*|%^{nn#jLR2evpUO(uzk=yF zI#h2Ygdg{~bg0>Hyq*ioE_Beg13HZGu-0Na*c&!3s1S`_8nf^6z@(Ktj9iH4>q1O_ z&|&cOT}wKQc&BneMn7+a->Iwg7@|FVU6~}kxH-{qyt|(l=j%7F8{Cp zn`)^ChC!xsd7R6*ka02Nm5eJGS23<<+{Cz*aVO(pK(0nRK~fC3mF$PUdgzEaTViw#!ZY{8Fw;%$GDF%0_$Nie8%yNQyJ$n zE@WKHcqQWs##M~#88(4kD;ZZXu3}uzxQTHq<4(r!822$oVE$c(&p4iO zD&t(ng^Y_CuVh@oxQcN-<0i(fj5`^>W8B9W!S}br7{@bCWt_{nka02Nm5eJGRVr{_ zS8TN424lu~vu9892Tnfwii?Y{@E@5nIVE-Cq$}jrq-zdKO-Y@cGU*`2i*uHob@6%r zE6=^`^7Ajb*gt7%3Y^W#ELR$)*lEj^_0WV9i@a7=2o3*?elgTl6RzI=(9FAg`%N<+ zA&maT%nuMozhve;@;lC<9Gz=opfLKyP&bWwWo|Fce3bCp@?KGH$rq)6Q185kS&y%G zykX{X_ncw$(0wi%cD#C?vEv7ek~i%5SW$IU|8k*;A;K8f^l2wM#0g_OW9Em7R37J; z`C-DYAJas<$i2ia+=SloidoNak!@3cHF1C_vgM7@Sf3Ue|GC7^dLN_C;wALA!$Hi) z%{0f8hDT1(mtPZ<8j%llAU>CJe2nqtkvRFIaBGe;>z9Kd5i9ms`0Hdi;B(FLcQM~( z;U8wc$->t$?>WJ&XB+c=3;zZ4ITrqB=1VNROC~&4Y_#z4%r{y1gTa$MjrlyQJ!9UD zlOaj(ILpF`ojs3dJsFnyz!R7+vhW4WTjvk|0iN>FC|9_Y$4!#&U;b(FlHza7PqH`` z;7L#adPLvBvWx%MQvLu@v)#xSxo#pRUqgEO>Q|84%QluT0&kF{9bv!;er+ExTcqnk=JBhI%rQhSQEddmpAU%l>DR2*K>T-&IN(Z7D|tJ zN5#|NX*(?CV=8U_!?BDL@GbL#4Ra)}>A;Gzl+Tx&(Xl4U33KAci}d8KRdQV{AH#g< z1ByS7<>~zqBp>sj;tl=D%ojammY>Fa+sleK!abV#=6c2RG_=TLKL2&a-@xJ0_qSv` zn-za0^YmS2;_H7?JZ-n2<2L3~JJj__%vUp?(WQ8Lu8xkEnXl!czRi2n~RFiL?vi^G$9`OL-JK)l)U^tC4x$3-k7h))?s;MGoR0fp>)zg z@AD%)#{9YQ-RM&0i+H|#Bk#uey-kK5UU%5Q z{P%`DuRn}IS5L=}%p2&!kdahyKSf_|&`Ma4n z)+^Sq{(G1=)-O_7{t@O&c^%_#%-1t-tYhRc{{i#Hx`y!$)t{KJ3aIiXztxGb2VkOx z>|Aq!;^lWW!TXsnx>@n^`_DE38ZBeif8jS#mf)gfq$R*^7j-kztaf*2Sd;Miua?O(V-!} zq^JBN#mjFef*;L%&1Z^#h2^I+U(~F4`AtK}pTc}@i{jARPypWfZ*RTlZ!fM|yH+D_(wI5AugF z-(dMe%p4?obpkpP#Szdf1VUtC{zor}#73K5LjSy-@LcS-3SI%yx~<+s}4md7U5BHCMxLw@55xP|%LRf?D2s{-G`e8#AgQ$I?_Z7iQ>k$;f+N(*1hdYXBDe*^1D!Gt^M zum2JaFAn)`XW&E3mw&E!`Ce)8BV>7r6)hJ1Dd0&@>bFW>zS|k{ud#eg23I(6bZnD< zW5ps1{}JoSAEfk5;r^ocKFA_}IocHpxBM+7Pw$JP!?-`HwaCZF;VvH6vhZV>Z?o`I znNP|z$A3O}nJ>qxaOwS|bmW7l@pBcAds7uHu4CTA_gRLvb<8(eY@-@t_v+%DoUu%iyPVi)Z{|*%&V;mBV+ZMW?s^Rsq0LT9!=G!d% z=|hyB+>ewVx=*0vYw%>ZT1$L>V?LN=POnGa=Ac|z;yHr(T^2rt`Jjc*WWI8)S^om& zH(L1fn6J0+|6qQPg};^ggn4HDRm`UzZRVc=Pj+s*SJ_8?BLbKyr0aD<8R=}{&^NXmolGYiRW_WgW2Zz-@|;N#mIhOfSOFWMNPxd#~ca6L9?aUkNy`xAij>VX0 zlkF0>^M?GW1jQTcyBG$@<8$z2=QvAzer7&k;oWjUHdfSI_~FdAS@?;}$K;sPo5_5@ z!k^6i0tW6kmZiuoB9{x|0HEc`&Z zP#r6ZE&PGZS6TQ;%x|~wbC?&FdCOD5)3~CbSdA;VI}*h#A7ha(V?JQv*D}Ar!q+gr z#KLc3zS6=sfv0xZ*ry7xl!x%=&qgCo3(r>+>oq6l}f_EgvAJ#20m*+9y zV_~=QPNkQp)dal?&uBw9NC_u~;m>6~RX!EZ2^`EV%+~~!mhG&pius}mEbmtiFEXFO z`wE*?_K1%q&z?iz!hHL9{s#FmNLN1Ft$_W^M1x7`t)U4b9EUK!2t3Ibe`^dqS1U?c0}mX6r}(7CsPdf2 zKE5&ZH!32A^|vuEE>`^MEZ@a?8l#mS*-gXkSiiD^f2$JY?Wy84@Dy(2?T1!RnIBGOIiLS$@4`BB#lw=DPqI( zZH<^J`4B-qeUjyyUsi-6{{r)=s}z4W%Wq>o;|fJQs>~-ok~|BAKnHlTTSmGPq<53j zQ7~4;v$;@RGc7J>zK!`zulYg|94&E|LUSoMb+s&{uy{nGOxxYu~;^WqIfEMq<*iCVGYz3Vkb(DdLQ=@;>;oP6L^E(bB+xL#dfz#a~hyy3eK zFJ}32j;9fyWz5HLJ8&?2SrtalGhz6MaVnmv7CR&`U;dPee+uiL%6vY(=mZCDyW}yC z`I<}A^jZ^YpvdBX4lwU<$@02UXjgs%L2TvGF`O?#rB$X#Rz6v8h_%zgI zloRVZ@;=s6&h|9g3eQ1Ge@3EP>g8!KF@gDd-ZyT%e`+@KrH3g4{3d+~ai-+?A_T4t z!{5z%YR*-{a=4Du4d4f39A&KgyI9vImM{NR5pB$GmGXQM0z*+y$46!Ym5EMGcb>HnDd0(4{)ZYmRg)_ z+ufC^*VLnDI%rYCHwzg zx9)RZiJI(Mzu(O5`@i2g_dLGyo$ve3ch3ER(jSeC)1l|edk;TLChE89tA>G7z1kH4 zepB<$D}Cno1ZOo8f8L<@+T(<9R`Z!>TyFxGRijsb#a4ax8pYdR6M)rn{P~N9-z)8m z>hSN^6(9b-+{;t=Ar;?vwcs|te+D>2eeCxQrjNnS8P`httiD^GeL_i|sQ6k$E?uVh z(ccw9s4@I|4shm^CrZD6M)7H--*=_7n>@u?4~#2O;BvRpZ@gSU(+A#b`IMvu-ZL!E ze@F3SZTDnI;?;%k2;xUH*RqqyxS z`l9CZBH+{xM_wZ}wfSz{=#}udmHyL;JCINGD{BLNhH=4y|9>w9Kks=|{`l_-#=+;W zk#@7@c%kC9@8wEu=v#nO`S+bE1-W0d{28S`s`JqwEB;HCzxrV=Rs3GXH*|h6dG%4o zK1oy2F~LmBl+K~eDzAj*Iy_2H#D!` zPs#t=nt%JJgz#^)L4S}!KbMp8Prp^@t^6Kv=FiuL-}@fL_uVMC&EpS_QaES~Y5DfwTzP1?cs zc^UtFMDt%$dHcstkr)3_NXdc9ojh>It+zaA7~ zlk<7T0~zR_GfMBAD+RQE@f2S_B7`2^2C2@Dfov{ z@K2@SUj%+R#@!x`H?w*6A5!SAe7>}gbG68GqknP*wa3jO$I{^6VERxAPq1&>!9= z#MT#H3jM26@c#y!`q#-x|KgYe{@vHdd(Vu=1Aj-~>%3O#OW%p3-+4cpT+T~?Q~s-O z6hiYWKA?D8=c#8(HqO^l@|k?0sM6bhUOp!p z&;M5OqdM;oX_|jheC?;D{N@+B=0)<}w$j_!8B=^n1@qH%KH8@EYD7=C8924i@ZU*$ z-mUH5Q~LD?{ZA{tFQO;^vf@W0{QP0fe?8*w+EDt9bLGACtyB7)^J1x&?VB#C{JB)| z_AV*lHk~jsz^VMx6tILJ)J6X7Qu^T!$o(CPyGq}V(61={n$EvAzTRl~ha>}=kM7od zHnd(4`~16LI++hIV;p+v+l0=>`!&F+{A=5!f9*TLzoq!$D+Iqvk~t3nx3Q)WUoPC7 zjPGC^`ia^vmiAV~GioR9)y!H(zx518G#u8s6F8M~=-E=9%as0^Tcp1n^<$Wv9|KPG z&bOprZq$CKm;9Jy6zA0$iJs6-qrxy%a#5b#+4{=d7aU>1^oEga(zn* z{Rfr)@Sh5uiWTPwa4P@mlOD$po}=%*_T89*Cl}{x#W#LJfbZ*j^NcG|;BuYO zHwAo>KD$ZrwJXkLgyN*>w`ll%a&P1Gpy3|~<98}PtQ}=?2FL#-j<0tp{o#*FKD)KP z_Zt390e)B>JD*Ve$iE2idBwk^_%wZZ7{BLhKHmgR?K$)eY0s;a{_!_Rd$#8U&q^}q zS&C==SqK$&#uZj3@VtivNP5L_%AEI_CW!RKi>8~ei&A}(&WZd6t{C$_&Wwj^Bl#U$ok7PaO#(hYhS zt`d5iA0NL%%5zNh3e)SZRowXlDY)%Bc%kBJ>w+7fT&MWqn&38$W`J`!)n9FT>>HH6 zt@hW4FOhWbP<%-3V%m>Hzu!?jbHCjGx?G$G6rWapI8X6EWn6Cpmkp)g_p1V0znrmC z+QH7r;cv`<@w>-x?O&67w<$gy$^R8b z|L5}FFU!Sw4RCJfhomGoXuJKg(r+|`aFf!%Pw}<)3t;W=G2rCSvHey`?)*=tx4Xgh z>Rrj>dJp5!d-XgZ{-y?!wG98H3mA*OK*_&`J6sClzn2AL$~^ht35TCDVTskBU|F!`q`r)n8 zUtMkI^Y#e+&=ZB<(4WVF(|b35QF4BszPFu{ z&wtT;rave7d_~{!2H;8j^Ny5!zN7iHza%ewP#fZLw@7_Au9Eh*arZRES4V|@Ro{E9 z;b#gZ?VF|FPT*9pb)6q`0z0o#`n8`I;8w+dUh#cR0VWlHm*$gEKLdRmntq#Lr%>EW zG@jv7;Ou`>h0%EOS&FycEa_L|;*2X^`aJ>E40T@2xDo{}J*A(1l@x@(O^WB=g) zy!6v`AAu!lcv2oa-_P3fk{>5L{%ZiIcAK7({7r5gc&X&G{tan=YqvWXkLUA7r7yih zo>_h0kwSmJ(yzWk=-;GS{!t43-zdHFb|JKO`@0nSb5UUChZjh_UMtmb+KO-JIHhm5 z)9*gu)Q`^B<(~GR)9?2gm%oGm|78mPkmh6OU0$vH`P7$5{>Sv3M3Xbu0H<=U>HG*W z&c7Xsua8Oj>ATDHdzmZrrIO&bo^{*4+_IOTuj=fd<)oR{(p-$%LP8$xn%E>nC+ z`STLRpToG`1THr#{n0NAXm-bv;_H7X_!l)zU-7jisn9*p|LFJXl>FbGg5MAP3P~FJ z{|ihHIee+e6aG#cD88-u@Hd6r?61c`;iP__j?BMLV?2~K-#lCChyGHYsaSNzfb%$d zy|nWt?Jut?`Ir7o==I{P0w?-?&lh_0*WIe)clFysf5Dk@-LJTv-^t&W#G^Y5e}&N5 zc)trc$%m+(|2Cy})PHU={9eW-kKq4b1y1G6+!Ze8#zHc`JrxRGlAO6Z1z%+xe#takA^oXT;&p!ks=mk#;3;@?qx__qWw z{y%$3>U-q7lEX#X{;R-=PmaDuaG3x6d%NPpdQKghi+>-`eA;i8ay~)x*#`TW@>x}Z z_dcb+N%84-3SjbcLGgW#l>e`k{tm|VCU7D9obo>wSvR>y@r?-nVa2C4(j%|s{A^18 zUrE9LLGu|_eZ$)G{M)p?|69ucEolzt8H(Hauly}U)MHHX!(Wq1nSHcH@ilE=(|_g^ zpKb~LZf%RY;v0V{fcZC87+0de<$IRTy9Cs3aL(Q@1rBdPxfX|46}~g>jWsp64@; z@qUH$mzo()F@@e!dOI(ezZHvWt{DE;Qs;e=o%1TikNv9PXmyle(e$Dx-sW^SZnSR&G#rcTh$37m;=MOD^TyHU(w?+c1gpCR~D6kpZ+OFDmDu35fL>5q*Iy^Y7W^(6mx#DDUqz^R>&XdD{8FUr5~ zFs_*fE@$_JzI2WhM8%48IpYCB{WGWZN3ufyA;q^^K6l9rKc)@6CncZvD*a)d->u*2 z+s9P?wFSxl_cZ@cC_Z#RC~s8-`mANG@8yE)@|*Jv#houn2FB-k#WTkQzfALg5#xFj zxV$L^|4<74NzH%gnNt3uT%12sd`-uZ$&GI+zW$)R)Y{?9gHo@fpB22S@_7?*;;WMG z8!~->zA4Q8enjXml8f`JjO$I{@?FhmNDXoGS3Cm(gz_mx;y@-0|4pgTdvspDQE?|P z_&uBCB`*a|<=l6|A6^&0+UKK+w>5tIK^@qCVEMmV%4vGh)wgT=e_rzWjOL#OPW2i-OX#;~hqakr zi2|3OOTpJP|Md?_1|QY-xfl3_f%Nw0XMxjuk7(QoePWA#UwXOryB1J~vGaAtg(mbr z!#^wl*qDDa!0El~Gs2(7C;KrF2wziwg~`1Kft#$*EckNns+6a# z6HZYJbP3}@O8NT*z?16rGNuRL-YIlXlZ$ge@niQ3@EXNmt@u%0Ul>;WO~8}#|FY8W zi_F*GQhYih$Ie1Ih@V%#Cl&gz_V3dbZ%62VgmEoI@UpnrU%0kjFE`59c597x-5aU2 z+YXMOb-hCsuibNd<@vgoi6L3Xy?)2;Et_|^Znr=07G287<&4}wM(*C(-i}Gvt+i^+ zS}&H%Xbc&PA>%P5n~y=cY#hqPq4@i933*M#6HZ2}TWK{KttK~CrPJ&7`U?vq6{qTT zyv16#=XKm(!>!a?O|R>?Znfnu)?4%Cx?AnFI$gKiKja_-w06&{j!b68(V9s_?gCoP zEq6NQ71wL_IxEgXr`+(|YQND~L6X3oOYIvXU0iI`Q6%m^cVRT=E@a1@MYr24_dMtN z>v!&O=WZz#+}YyZg4}M|v2}akmY?q0yi=d zxURTs^A4&AzrN`mbbDU2+v>Q)(gK#N)$m<5cw4Ci?uelW2^)P>$j_!)TMpcHQ8zDm z@AU$AmG(;DuG4AP}Bn71hm43I^YK(H~q$k0$59o7&w+&FqMe<&* zb-^X=atGYh6=<AV)o%CPPV1n1An#_} zj<*1YZ&tk6vux_KvE!s0bJNpJhSFVs{Y}>e_?NqV*{fg>^#<+b17M%37c0ItiKgNT z9l%=>NV750tP9d?j1-lNkWzIMb46Waq$#;lp+YsJ3?H{r%EJ>x3oCHHE`2j{(@&gRCXl*l-{N%s7iM~Ca8tvVhu=c_d2a)nr>^M zhip=M*`6M!gftT*XU%g&dM}sjwfS0dN_+*Nkur`et>&_uP3a^CM0acWtGG9pQL6<5 z2+ACdUkvWT=#_4!P$(?Vxw8}QT*jTty7K0{%WPMJHcu~4dhf}+Hai)5ZLigK_lI99 z?|r1Jd*b(MWut77g$=ADnET^j6w6 zXxhQ%*Y19|j9YxtO?_*q8Y`HyR!b{u_zvI%0AR zuKbBg=R)UxgohdpzT*~dTha*3O~nS~Xxw5RCDz!H9p72l>CO~h;?8cJ**v%PhD;4` zB3_u-4==&(WE}6k{^kUd>~fIggan*N&R$=*dGn5F`t*l8#?jbNaJGy=N@TN;jJL<@ z9B+nvhU*;8H?bMA%d!0O<7pMo)CLugvNkp|$Dnw@#z;c$$c+gfO5MyH?k<(vo>nwC z0&W9$-p0H+ck?ccRxty!S$CGa1#DIpcg?z!Zn3!x=?bae8|(EUE{iwfRa5dRQYAb{ zV>7**5CF|`eP(-B=aPD>T-B#D*(^JZ=HWN-I<*=P?J2fTFxb7)QfYz3#KSF90~1~y z9tX8tF=c3(P)mp^?(uAHzTB-$2gB3ASJ9#b1x*k64k$ z6RManw`3D0m+bf$UOxwg59qxtcruw)}T2_kPhuyI7cOl=pk? zf&GWz^e6RHyhPVxt*lkSQ@)b!-Gl~vcNK792yBau1(H zjTRBZFq`Q&YX|zC+wOIW<`j{4(CfSoNb*&0q1>sUB*-<$2@~FqSOEz$Bw1IEudd((QKQU8b z%)oiOSZjL4Id>0yVTC<#{oy~jF!#)%u6yQ?cF)`x{WnhkP0)XEXYQGsqW{od^gpM` zlHXIgMBI|S7TK9-o#6a5E_QYyG;Bt4x*&`q!B?qFA_Ic+qNUh_wce6+o3anq(z)q# zQ`WIa+9k6bt}l!b1R>C9n=heZ{Jv))1MhR!L!ZMb8!CrnObag!7((Q=;TKE*sZ%Q@ zG`rZ#fwo7pO52myt^mj84A?fs{+`Xdvt#6LEkxTx7aaquO3c>cZ;)b=Na5n4{coTp z2xj%t_T>g-($+nDHg74q*^yC}Qz$|%Tdj2~9gJ-kf585q?~}MF)(Y7Sk6M?!5nFS` z6S+7S2BUBRwO=3?X8}SKE(0l8uLkEm$H0h_gKitTG7w%e!jJ}6f^(=t;Fcg^2lvAS z;wi9h1al&>cl@{tV-$W(rQLUz$_O}Fw$&;Hmb(VfdV=ebw>&JTgnTD?U;wSpNCM#6~8I6lR06bF|)n#AFaZzlFOo5RCKGRLaJb)(I{~@3D>aX zrVf|3)W$ni78=K-4Y^)u#D@k(TXvjXwIqk2P?2f`RR+UoCYKASXCj5WSQu}ICcOEU zY8t0nC}N9p8xpTdFAJ-bG-%zuD44HZk&@N*YNb+TCa82liR$!P{fLgr^|A3gk45g| zi9UBZ7YnKh>Y6M(!ckN_D_44lgq0_2tu9Zlq>hDT50wszYOABDs?eO{LK|8jw1tH> zqL*Z_(&7tcKd zu{ItMElgfSjj&7sc}Frw$A(GAU;~}3$G=3&6bNV(<~D&4w@mf{U;#L+>lAK64?Fv{x?H;Y)A z+r99(!bqrl5nGVo8j9Ld4GA%)8p}pig*iq|q3gEG==k|TTgc#VVexB;f+tU_n_{xT z?;jfRq2?YlC%i%2y1ZsYF)ikqaq<=vnwdfHQ0Z~qZogKFf{as1^JLD5|ISa z1ZI|+W=H#oFQdrks*9#NT@{8D2USQS3JMe&=qUjca}eo5L0Wzt8s!>PDrPr-Y)TM2 zdjJO7zXLrsG9$EQ#nZhFaq3~Fktq%nQJsQ$I%N@p2_A^?um>VV_T@5F1cj6G*aJta zVClj$An&0rFlHB|TV6&`3hZrELL48$vIx#>x4N}M#2VoB#YnNE@vf6ltNm$@d4;nH zj3r?u)6Nf;yJYM&bg~)HWo4L=-{H|KiDB3!c%I$fVA* z*FwrV?l2N;Hmi||Svv#=$C%8XP|tPtgp6B+SAb`hsI?=VOcITSCB6StCluDpz>bSe zig?MV%qkpiuMPMJRCBjA)+1>wMP_$uUc~W)7(3wm3Ux-@a@v|^Il(cSW;yYrWXlQJ zo!D|Zu8n{kLVD2Vkd08o!=0F;VUksP6p}SFTbr4Sbmo);;R0q!Vtqs-Qn(zZZa7h! z6|gq^OKqC2DNGP*d%51L!OK{lN03}n1{PV!lfTG1#NEMUU9MGR*G#ts6SG-`ueZ@} zVrV4g5?iImktV!{!G&7~+F3Y_$h=@cGewijtf}#g>b{wvnsk<76Uk#=)`9(L=LURb zr#sWBv5qK?uy#rO2i;K=Sl{IuW;-|>O*+7R$+XG#dOFk5lVqMd36^6slf}-YDz@-? zm@=YsMF*RFM9$a)_rNGmKhUJh^Ie_lny%NF4@@t%8AgQ0K(oUOntk&V(XId(eq)F^ z2a_Po7S6!7*FBCqG1VhfKt}KYBAio;ZkBNi;lnH;uHBcl-t>A=18uVVu?PGt6-w6akgC>vYUHWYPL z+7jc&gIEdKM+iTHei7jX+ECXrSB?4%u-CA%Kg=@QeryCAv_=pCedrJba;MuOqo}vy zF3a-#Kx5D#C)OCk3_X%n@2J{@7>wBb((QM2y^b85q46y0F&in|RV`8%#Em7hOe;Rg zi0~)AgtLcc6fd9+?_Y-5SXsj242N}BS}>f2qjZO}%qxMJ>^Kcn!sS!Vr=uLTugI0(7 z4HIqOE1G86rTv36i4Mj&JSjAZ(9$1;SHsW3PH&3BE)+)Ft+sDxRA_GkIrg#eL9YKw zs~cf5d3hELN~8@~w?*;UW*|kflc*cE{+Rs0F5$%jrbwZMmYSW?B_9lGpDJ;5r;4QG zsitF8PYTJzPox$$vTh?qIIeiJo&GSmA-0Sm&PNo_wm=-Oq^Yr_=qIvjr`mm5r2_94 zc9SX}vC90_1D3p|+BC3L^AIF+e9ZeZF1gWYEhVrh?$1rws_W9?Lr%SX92#&U-=0oh z`#s$?Lvu1X9Gkki$YwV-y%w))AeI*?uyv$O4k)=Jqq$-G0L{%pK80mn1+q-ATs4LT zu;pg6<_j0NQ=_t_H;U~2b!&`+@Vc~_N*D>Zis*1k%Tv9Z>#lgrYpAol&1Ue5$-Mt#a+HAGMdZ50Slqlv} zf0fp+sc!^w%`n&kR!Zs$J=8?VE6gu2bzyV*tHZRYz==rL51F}$^W-L=y_;N;^p)~| z!&UwQM>TVKvg7LK!Db0vT7pwu`vl@T4gjYsdgG zY?@XQ^Q&oN!L&BH&#k8uC;KtAYni36r5uKlb+N6~{p+hUVP+fXHpCxQPa9Oij@T)p z1CT?l;nLMIM`EpcYz3>!`N-hW3~YL5ks+=8AGUm86;$kT$R)&9@oK_2RueGKk(&yN zJMMPP!lci;yD(q}Q!3Q;K_>GwHBV4LJj7O=JX4tBo4w*Zn{BchArg=yszP2x!KItY zE{i%4;ogLEwF^zG>OxL)2M>nMggIU@G1VgGzRg-)ubw#A{(}t$Js4XI4^3F20xv?5 ztm60&a^9H=Vk;B%@(Lo?R0o6yiN>@9rRh?t?h+@v6COZX*N`_1bO3g3^H^?Sbro_8 z*08=zJS*5kxs!B9+Umou7UH^KG#xw`7SV(!u^>8%>?vKhyc{;{{BS205Nytfm%I`1 z;bbRN%GwreZiG`(1E7weP~-F#SkhP{>bQ02s;rIi8Xo=|S3QjUq=S9nbcdBVht}-| zoKG>2+*%YV(VDYBe27b87R zd<;-j$W|9)$k!RQYpJh!#fo|{VRlpnIhP|ssC8I;TS4Fv3lMa_1}z5im-29ZIUGzl zNSL!(7hZW7UhTnTLc5-W(OrD_iW*hEHcR%4tIm_C&_YzGvjAQqt2z>-dN3SOAaz1@ zDR_}0C0+Lr*+=1Y+|k@mZfl-)lu9yR>n7#V>K|I+jTsS=aL}kYrNyrpdb6o}sCCH` zQhbT>MPS$|F?+7ugJU`ab1|25KS)a7CZa!<}z8WAt6-^}FD>XX+Br3>%gr#!?< z)M}%U7+t=~3h4tkEPK;JFe#nvL)GRD1qzeVfJ3F)q-$`nLxA0B)nYK(sLXnVz@Q~@ z3XZfSn@l9lnyrLhq@^1g&77OIwnR5F;+q5TTn5Ymvrrb>eO@mQ3E?FO-lalepyW#% zH^@v2REX2~{^WmK4_304x5Xlv!;8!Q+#%r~(o{A?^6I*Uxg!rMD;0zVEB6PdgMlfS zG#ik!A{bK5tuOvVQW^Q=p#iV591GfkOddYtgvA)i;f1c}MSNmR4xhjANKDP40n9|f zu_CERI<|rW9b5JX@}BrfWR)LhZOqc`dZUsl7cG>>;KUCS&cXPC@+>?8I&zyFI>N<( z1KlsY>(1@mid6vK6rg+aTbL2xOa%=^{U*9x0)e?ojZkL}%%z>B2o$l!aT%5)v@w!I z&sc?IKKw2)A9o@ivWjCCXdaDv%gkr!k0J6y3`}a{Ac?6Uo_K^jTzT@P(4r&<6hS)* zX(dUKNNEf?n1Y}p*pVxPumd_iW?`X@w}l1E0tM)XA!)go5)_ZABfFOO6mT!g`6B_A z;E_2f7N0;w9>wt6IBmiKBr$&QYiXk+_H6{mqk(j$-zZuzM|)CA9(DxRr0dTNCT z3W)~H=83J9_YwV5m`PAFvNl$DkR0 z0lwwP`o>@iAlEx~OL5WsmOSZ0Xt+d_ww6aTA+)p-7Iw%NHOA>O;^(JG#7eo_3p1R& zJbm+kRy$CuJ_T{oTgPZlT7(IbUqJXy5!)2p#=K}?y@C#Ae=QNJR!Ha!1V8sn&~TGFDyGDL=hGLI`B5n`{u|jPMln$Aw;%s~ z+$aX*5Q<{RvAd(d%ktH7)KDV%g@8p`RXbFkuPtY3jjLOuJ#knpW{ZV)@`u*gm>+p3 z2?E{FNto$QRV?_s3mS-EPmPHodz!6Vuy8Ku3W4{8I@EvbU|AF~Vkr(t^f4Ad$inZg zG%Mg^+TjwA))GHxsxt+Mks2;)2n|LAS(?Q`EZ@O77hwFz!56{(Q;|J9BvK+FrIL9- z5%Sy9XT#*!19-J|!sLNx$@T$6D}%#_xSIx+7WtzP_y#!_o&rTsG{xc#YKmy$w19OL z3U-i*ohZRuJak@z;Iji_BEHWP34=-<>tT-$*Eu4_5aTnmT^0vftLH%x-lFCqVxJp3 zu)r#D^g*6Aw}-z@VY0Xe&V=kwc0RE!6z(v772NmDE(D@SyB@Ku8YT&kj+4|Dpg%`L zIQ%g|qfmHb;O$~wl1-^neOdY$=r-z3Q7wuBL3;2ZAu3aXozx>2(m79T`V3~sz>By^ zWS6GfZDY-zc+A#){fNRz`wJ>>P`h_ko-_j1vE>i)#hAe0@?ot= zSYP9fa~YhOQ-!_8d;Kf@4xJ3U60;3R)@3JX69Kxsiv0u#hl{++62cG zv?Cfb6f~$pntfQnI(6(9NOIlLX|u?z*D|Hg-?WYaWP;YJ1&%P9sQCygx& z3#sf1oiAV;+RS!%_%J+Emxuk8!N4Mch++s?*5oNv%M4>$jcP5(HsPq670@0-8Ux!{ z@cMDCgT+M`o60#t4*T$F`(Ma7lLhCMnpdxeX9-(XmRKk>Tau^O_dwF|AOjENKyNS|^kdx)e`?%0@%JToWTzgzgJ&jqFuR@0$_%nOFDIdcs5e;fHU}+;evkYz1 zrLgVTN`oH~&q@rA){qb}jD>mlDoh1e##9A|mmA1jAB3$32O`c>ps@AZb)=~KK1boc zkcNj_9Fn`$?PF)vc+A zY_oHSxrQf8U1KI+XLgLYUGtHY{cd?tcDjf6UrS!RtJuz;AR%i3Ai+gE{+fWvW>hZG zt)CfKq^E0PLLW4xQB~^5I)Ic}RudBUSh|SLn6J2%F0EXPAw0<-og;p6kA9CvA88Nh$gZXFD2M5>V6x?x4um}TbCc-cyw)}4D{ zUj$Alg)_a}qpShlail5ZoCo zVsC++tvJ7c^Q9ZXb%VD^B!5JmYIr_5Jlxp1y*aV8An8WkVyo-!hGT|UDnj>hM&z6XsmM^8BuhU9Cq7D)yYGC4eta;$06PM{$@DrC8V9wCygJvKwBzCA9 zoiYP|8lSyHGtJ2y4+Ob;e7_Y8XFYfMgdHmLTkKGUa z*UfH1)nIA=O8k(6XjBp%am2|(pU~jX*CcJ$3F*UJQSg=-bCF!(;sx3Z2v?i=fKDKt%1`T+e4k)`x;jKFUj#8UV5hERxwJOqS zDrMKIAHWp(pwB@5_+NXnI0@KZ%q`?u0G5Eg=-nH27>6 z=YgeI-~Fzyq6H$>7f8+xzM(;fl0l>+Fpf`3KOIiNn$?_Jx~^7&H42potI{P%YwQ5w zC`{f*)W8Dt?IK{+wVbD88DgGfh-o$(<3^)s6(o=JY&;${HrwR!ZM6lKJ@F|dO(v9N z8*pum4!m596h*pcuu{t*qj=?M_>YD6NL7rxkEK>PMa2#lGBJ=4Zb6<)imXc1ayUZf zr1Ttk4Cn+yKNuiMPeefN>B(8$351xA?n+}GhcGcWn*Qn*=O}pDzNd&lC;FP=9DR*a z2rcN#OnA71B?OHE3+QY;X)j{s4o=#@x_dCv0NWw4bR#(t2~Gi|!)YA%rY0=5syoAR z&k*_e66WZLT2SdL{=wq{!Oyhi(c+7-=MKDpQ*?Mk6ges;7HKHhX(s8LjotJsHR96Qp4obg2GI~_z##uq?1WWVBvwiM)kCjBohj(h@GYK zAr6f%WP1xj1`89m!45T&?>trHtFN@#R=HDS?ZcJ=tqqHeHnDmtwS?0STTj!BViD`Q z!MuXcg?6BO1-6)zrWBlG&7KMEI%hE{BEZ&3r9Nbb=~)B*WUQe`QlV(P)9FleVI?T& zZw|)ZkReJO8_=HEt_;UbX{k<9r>1T_uD&eU*`U%rj66@6ry#sl#H>-vzUTB#=FihAvWYCC_F!<6sLs;kk^RRAz}t`_Nf~_CeVMA_tu?6QR@y(}6PguYK}u zY77yM4Gj-OX>vH3XxnWzALe-IRs!fDMGtGDCIyGOElCN$d+6gnh@oILm-hE8l$Hx< z@=|c-gT}`09QJNvj#|=^qEduSN;f`&u5Bs#1$JUzL zZhsRKueu;wA7|*qS4m)R;xpFbZN|y+td0<`gn29w(uIszGfULVRO7M%gD=`?2a@@N zt>0c1M_A$*ztGq}m1#Xqouw)NB;0m5SsI7t_-?zvqEFv;f((QjeV{iqjg#F_J7v2; zg{4LpQA^qZ8vd);Fr#hf5yOmTD%n`WL9qvM2t>pQf$|Nu)O<_cD{{w~mGN2Ri{0!b(^SM@}-0vJ;H5fbg)jU*B{y zr-=v-4cf%3`73n3VtMl6u!Ye3{ct}LKTUVPeNb9m>7vj?W_kJ2qzXbiqXMT-N$=T7 zzPxZ0ZxUw&mMkK>a7B<2UzhAJ9ztzrq}l3uBRGcsT2eMf@Nv=+I{DHWsjf7USgtrX zil5*_QMOWpH!hxGBVU;k=wI!6&l%y>krDhkvWRmkM(7J@&IsEHBltcuo$)f_Ex|`i zACXlb*m-1rKZI<~z`rv!yLagycD*$68lnyUo{5`t@z2tKT>r4^aF7^(pB=f+;{JON zJ=C)FU(-MAnpb*DcU~m@3vmAg{Im4KX9&?;?3% z*PRzCp2T`2Eos+FVX~7etVBy_ki{Xf%I>=l9M}jUDNbbe)?zS7b?eD z#2=KmM7`;{Fd#{;6d>H>zdxKU-_b6 zGtR=^Z}}aLq<_m8jRwcD>l68I@L#n4KaWTBe(OJHTGHF~aos^$`fE6R>)eg|VEXfZ zL(kaJNP4?ovtlXbYVBs|eUbEQFA@s7)}L&Ya*e+KlYnW6>acZ& zJV`%vlUz^Ke}9a$n*!;t(e&47ddUR@`m=kx9s^G8XYbEw`i!Rc8D7&z`~Mrji9`JK z4aqrkt6Yb*V`=Qt&)#eK{3Nj8`%5jMD7kWd7pEHhXX)+wy-0e;2FyO8xFOQumd>sZ zMbh8U({xq2UNd+#|7Gb6J_m&-%%py@t~Ckd*$aAElff M=gC%3zKf^-zm*fAAOHXW From 22de6c5c4cf466694ce755b8ff1c6b67aa27deac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 18:09:49 +0300 Subject: [PATCH 18/87] upd .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index fd41f26cc..93ce82cce 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,7 @@ models-mnt /Pipfile /embd-input-test /gguf +/gguf-llama-simple /libllama.so build-info.h arm_neon.h From 42cc04d11d24b7b41e19ecaa38b46350faca141b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 18:49:08 +0300 Subject: [PATCH 19/87] gguf : calculate n_mult --- gguf-llama.cpp | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index d385ef79a..b88a2d8bf 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -514,16 +514,30 @@ struct ggml_context * ctx_data = NULL; return gguf_get_arr_n(gguf_ctx, i); } + int find_n_mult(const int n_ff, const int n_embd) { + int n_mults[3] = {8192, 1, -1}; + for (int i = 0; i < 3; ++i) { + int calc_ff = (((8 * n_embd) / 3 + n_mults[i] - 1) / n_mults[i]) * n_mults[i]; + if (calc_ff == n_ff) { + return n_mults[i]; + } + } + + throw std::runtime_error(format("failed to find n_mult for n_ff = %d and n_emb = %d\n", n_ff, n_embd)); + } + void read_hparams() { // TODO make keysconstants in header // TODO: read all hparams from file hparams.n_vocab = read_n_vocab(); + hparams.n_ctx = read_u32("llama.context_length"); hparams.n_embd = read_u32("llama.embedding_length"); - //hparams.n_mult = file.read_u32(); + uint32_t n_ff = read_u32("llama.feed_forward_length"); + hparams.n_mult = find_n_mult(n_ff, hparams.n_embd); hparams.n_head = read_u32("llama.attention.head_count"); hparams.n_layer = read_u32("llama.layer_count"); - //hparams.n_rot = file.read_u32(); + hparams.n_rot = hparams.n_embd / hparams.n_head; //hparams.ftype = (enum llama_ftype) file.read_u32(); // LLaMAv2 @@ -568,7 +582,7 @@ struct ggml_context * ctx_data = NULL; for (uint32_t j = 0; j < n_dims; ++j) { tensor.ne[j] = cur->ne[j]; } - + if (n_dims < 1 || n_dims > 2) { throw std::runtime_error(format("llama.cpp: tensor '%s' should not be %u-dimensional", name, n_dims)); } From cfb8e35b73bd8daca6fa5179d30c48a5db43bc31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 19:56:56 +0300 Subject: [PATCH 20/87] gguf : inference with 7B model working (WIP) --- gguf-llama.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index b88a2d8bf..0c4095714 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -493,6 +493,8 @@ struct ggml_context * ctx_data = NULL; gguf_ctx = gguf_init_from_file(fname, params); + read_hparams(); + read_vocab(); read_tensor_metadata(tensors_map); } @@ -523,7 +525,7 @@ struct ggml_context * ctx_data = NULL; } } - throw std::runtime_error(format("failed to find n_mult for n_ff = %d and n_emb = %d\n", n_ff, n_embd)); + throw std::runtime_error(format("failed to find n_mult for n_ff = %d and n_embd = %d\n", n_ff, n_embd)); } void read_hparams() { @@ -534,14 +536,14 @@ struct ggml_context * ctx_data = NULL; hparams.n_ctx = read_u32("llama.context_length"); hparams.n_embd = read_u32("llama.embedding_length"); uint32_t n_ff = read_u32("llama.feed_forward_length"); - hparams.n_mult = find_n_mult(n_ff, hparams.n_embd); + //hparams.n_mult = find_n_mult(n_ff, hparams.n_embd); hparams.n_head = read_u32("llama.attention.head_count"); hparams.n_layer = read_u32("llama.layer_count"); hparams.n_rot = hparams.n_embd / hparams.n_head; //hparams.ftype = (enum llama_ftype) file.read_u32(); // LLaMAv2 - hparams.n_head_kv = read_u32("llama.attention.head_count_kv"); + // hparams.n_head_kv = read_u32("llama.attention.head_count_kv"); } void read_vocab() { From f316b94c7c9a4f5a8cc5bc84f2fe7048a38b6ae9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 10 Aug 2023 20:20:22 +0300 Subject: [PATCH 21/87] gguf : rm deprecated function --- gguf-llama.h | 7 ------- 1 file changed, 7 deletions(-) diff --git a/gguf-llama.h b/gguf-llama.h index 20dcc9f63..6062f375e 100644 --- a/gguf-llama.h +++ b/gguf-llama.h @@ -222,13 +222,6 @@ extern "C" { struct llama_model * model, struct llama_context_params params); - // Various functions for loading a ggml llama model. - // Allocate (almost) all memory needed for the model. - // Return NULL on failure - LLAMA_API DEPRECATED(struct llama_context * llama_init_from_file( - const char * path_model, - struct llama_context_params params), - "please use llama_load_model_from_file combined with llama_new_context_with_model instead"); // Frees all allocated memory LLAMA_API void llama_free(struct llama_context * ctx); From 9ca4abed893685692f90413e4d43153af12342d9 Mon Sep 17 00:00:00 2001 From: DannyDaemonic Date: Thu, 10 Aug 2023 13:11:36 -0700 Subject: [PATCH 22/87] Handle `ENABLE_VIRTUAL_TERMINAL_PROCESSING` more gracefully on earlier versions of Windows. --- examples/console.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/examples/console.cpp b/examples/console.cpp index 8966b107f..8efa2a674 100644 --- a/examples/console.cpp +++ b/examples/console.cpp @@ -10,6 +10,9 @@ #include #include #include +#ifndef ENABLE_VIRTUAL_TERMINAL_PROCESSING +#define ENABLE_VIRTUAL_TERMINAL_PROCESSING 0x0004 +#endif #else #include #include @@ -68,9 +71,10 @@ namespace console { } } if (hConsole) { - // Enable ANSI colors on Windows 10+ - if (advanced_display && !(dwMode & ENABLE_VIRTUAL_TERMINAL_PROCESSING)) { - SetConsoleMode(hConsole, dwMode | ENABLE_VIRTUAL_TERMINAL_PROCESSING); + // Check conditions combined to reduce nesting + if (advanced_display && !(dwMode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) && + !SetConsoleMode(hConsole, dwMode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)) { + advanced_display = false; } // Set console output codepage to UTF8 SetConsoleOutputCP(CP_UTF8); From e7d346c37c3ccd10baf1bc6895887a6ab17ee378 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 09:52:01 +0300 Subject: [PATCH 23/87] gguf : start implementing gguf_file_saver (WIP) --- gguf-llama.cpp | 12 ++++++++++++ gguf-util.h | 45 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 0c4095714..9c0b5651a 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -626,20 +626,32 @@ struct gguf_file_saver { : file(fname, "wb"), any_file_loader(any_file_loader) { fprintf(stderr, "llama.cpp: saving model to %s\n", fname); write_magic(); + write_version(); write_hparams(new_ftype); write_vocab(); } + void write_magic() { + const int32_t magic = GGUF_MAGIC; + file.write_i32(magic); } + + void write_version() { + const int32_t version = GGUF_VERSION; + file.write_i32(version); + } + void write_hparams(enum llama_ftype new_ftype) { const llama_hparams & hparams = any_file_loader->hparams; GGML_UNUSED(hparams); GGML_UNUSED(new_ftype); } + void write_vocab() { uint32_t n_vocab = any_file_loader->hparams.n_vocab; GGML_UNUSED(n_vocab); } + void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { switch (new_type) { case GGML_TYPE_F32: diff --git a/gguf-util.h b/gguf-util.h index 74d6e61f7..94ea9006a 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -15,6 +15,7 @@ #include #include +#include #include #include @@ -61,6 +62,14 @@ static std::string format(const char * fmt, ...) { return std::string(buf.data(), size); } + +template +static std::string to_string(const T & val) { + std::stringstream ss; + ss << val; + return ss.str(); +} + // TODO: can we merge this one and gguf_context? struct gguf_file { // use FILE * so we don't have to re-open the file to mmap @@ -95,6 +104,42 @@ struct gguf_file { #endif GGML_ASSERT(ret == 0); // same } + + + void write_str(const std::string & val) { + const int32_t n = val.size(); + fwrite((const char *) &n, sizeof(n), 1, fp); + fwrite(val.c_str(), n, 1, fp); + } + + void write_i32(int32_t val) { + fwrite((const char *) &val, sizeof(val), 1, fp); + } + + void write_u64(size_t val) { + fwrite((const char *) &val, sizeof(val), 1, fp); + } + + template + void write_val(const std::string & key, enum gguf_type type, const T & val) { + write_str(key); + fwrite((const char *) &type, sizeof(type), 1, fp); + fwrite((const char *) &val, sizeof(val), 1, fp); + } + + template + void write_arr(const std::string & key, enum gguf_type type, const std::vector & val) { + write_str(key); + { + const enum gguf_type tarr = GGUF_TYPE_ARRAY; + fwrite((const char *) &tarr, sizeof(tarr), 1, fp); + } + + const int32_t n = val.size(); + fwrite((const char *) &type, sizeof(type), 1, fp); + fwrite((const char *) &n, sizeof(n), 1, fp); + fwrite(val.data(), sizeof(T), n, fp); + } }; #if defined(_WIN32) From a356b0e228b640a31cd9d1718d097a1aabcd7675 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 10:50:02 +0300 Subject: [PATCH 24/87] gguf : start implementing gguf_file_saver (WIP) --- gguf-llama.cpp | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 9c0b5651a..e88dc6a08 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -625,20 +625,25 @@ struct gguf_file_saver { gguf_file_saver(const char * fname, gguf_file_loader * any_file_loader, enum llama_ftype new_ftype) : file(fname, "wb"), any_file_loader(any_file_loader) { fprintf(stderr, "llama.cpp: saving model to %s\n", fname); - write_magic(); - write_version(); + write_header(); write_hparams(new_ftype); write_vocab(); } - void write_magic() { + // TODO: probably it's better to move these to gguf_file + + void write_header() { const int32_t magic = GGUF_MAGIC; file.write_i32(magic); - } - - void write_version() { + const int32_t version = GGUF_VERSION; file.write_i32(version); + + const int32_t n_tensors = gguf_get_n_tensors(any_file_loader->gguf_ctx); + file.write_i32(n_tensors); + + const int32_t n_kv = gguf_get_n_kv(any_file_loader->gguf_ctx); + file.write_i32(n_kv); } void write_hparams(enum llama_ftype new_ftype) { @@ -651,7 +656,7 @@ struct gguf_file_saver { uint32_t n_vocab = any_file_loader->hparams.n_vocab; GGML_UNUSED(n_vocab); } - + void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { switch (new_type) { case GGML_TYPE_F32: From b2440f1943f88243938e438acef18ad072b9004e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 11:29:50 +0300 Subject: [PATCH 25/87] gguf : start implementing gguf_file_saver (WIP) --- gguf-llama.cpp | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index e88dc6a08..08b700435 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -647,9 +647,14 @@ struct gguf_file_saver { } void write_hparams(enum llama_ftype new_ftype) { - const llama_hparams & hparams = any_file_loader->hparams; - GGML_UNUSED(hparams); - GGML_UNUSED(new_ftype); + const int32_t n_kv = gguf_get_n_kv(any_file_loader->gguf_ctx); + for (int i = 0; i < n_kv; ++i) { + const char * key = gguf_get_key(any_file_loader->gguf_ctx, i); + if (strcmp(key, "general.quantization_version") == 0) { + file.write_val("general.quantization_version", GGUF_TYPE_UINT32, new_ftype); + } + } + } void write_vocab() { @@ -658,6 +663,10 @@ struct gguf_file_saver { } void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { + GGML_UNUSED(tensor); + GGML_UNUSED(new_data); + GGML_UNUSED(new_size); + switch (new_type) { case GGML_TYPE_F32: case GGML_TYPE_F16: From eb8ca6996f5e3559505b3269d98e5d3ed860200e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 12:24:08 +0300 Subject: [PATCH 26/87] gguf : add gguf_get_kv_type --- ggml.c | 4 ++++ ggml.h | 9 +++++---- gguf-llama.cpp | 3 +++ 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/ggml.c b/ggml.c index df8dce7e8..ac45f12b0 100644 --- a/ggml.c +++ b/ggml.c @@ -19031,6 +19031,10 @@ const char * gguf_get_key(struct gguf_context * ctx, int i) { return ctx->header.kv[i].key.data; } +const enum gguf_type gguf_get_kv_type(struct gguf_context * ctx, int i) { + return ctx->header.kv[i].type; +} + const char * gguf_get_arr_str(struct gguf_context * ctx, int key_id, int i) { struct gguf_kv * kv = &ctx->header.kv[key_id]; struct gguf_str * str = &((struct gguf_str *) kv->value.arr.data)[i]; diff --git a/ggml.h b/ggml.h index f1673eed5..4490e076c 100644 --- a/ggml.h +++ b/ggml.h @@ -1744,10 +1744,11 @@ extern "C" { GGML_API size_t gguf_get_data_offset(struct gguf_context * ctx); GGML_API void * gguf_get_data (struct gguf_context * ctx); - GGML_API int gguf_get_n_kv(struct gguf_context * ctx); - GGML_API int gguf_find_key(struct gguf_context * ctx, const char * key); - GGML_API const char * gguf_get_key (struct gguf_context * ctx, int i); - GGML_API void gguf_get_val (struct gguf_context * ctx, int i, void * val); + GGML_API int gguf_get_n_kv(struct gguf_context * ctx); + GGML_API int gguf_find_key(struct gguf_context * ctx, const char * key); + GGML_API const char * gguf_get_key (struct gguf_context * ctx, int i); + GGML_API const enum gguf_type gguf_get_kv_type (struct gguf_context * ctx, int i); + GGML_API void gguf_get_val (struct gguf_context * ctx, int i, void * val); GGML_API const char * gguf_get_arr_str(struct gguf_context * ctx, int key_id, int i); GGML_API float gguf_get_arr_f32(struct gguf_context * ctx, int key_id, int i); diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 08b700435..8b928c364 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -652,6 +652,9 @@ struct gguf_file_saver { const char * key = gguf_get_key(any_file_loader->gguf_ctx, i); if (strcmp(key, "general.quantization_version") == 0) { file.write_val("general.quantization_version", GGUF_TYPE_UINT32, new_ftype); + } else { + const gguf_type vtype = gguf_get_kv_type(any_file_loader->gguf_ctx, i); + GGML_UNUSED(vtype); } } From e3a49609533646cbd1990f2d99668aef7a0cc420 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 13:03:23 +0300 Subject: [PATCH 27/87] gguf : add gguf_get_kv_type --- ggml.c | 2 +- ggml.h | 10 +++++----- gguf-llama.cpp | 20 +++++++++++++++++++- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/ggml.c b/ggml.c index ac45f12b0..e00f09fa4 100644 --- a/ggml.c +++ b/ggml.c @@ -19031,7 +19031,7 @@ const char * gguf_get_key(struct gguf_context * ctx, int i) { return ctx->header.kv[i].key.data; } -const enum gguf_type gguf_get_kv_type(struct gguf_context * ctx, int i) { +enum gguf_type gguf_get_kv_type(struct gguf_context * ctx, int i) { return ctx->header.kv[i].type; } diff --git a/ggml.h b/ggml.h index 4490e076c..9a266e175 100644 --- a/ggml.h +++ b/ggml.h @@ -1744,11 +1744,11 @@ extern "C" { GGML_API size_t gguf_get_data_offset(struct gguf_context * ctx); GGML_API void * gguf_get_data (struct gguf_context * ctx); - GGML_API int gguf_get_n_kv(struct gguf_context * ctx); - GGML_API int gguf_find_key(struct gguf_context * ctx, const char * key); - GGML_API const char * gguf_get_key (struct gguf_context * ctx, int i); - GGML_API const enum gguf_type gguf_get_kv_type (struct gguf_context * ctx, int i); - GGML_API void gguf_get_val (struct gguf_context * ctx, int i, void * val); + GGML_API int gguf_get_n_kv(struct gguf_context * ctx); + GGML_API int gguf_find_key(struct gguf_context * ctx, const char * key); + GGML_API const char * gguf_get_key (struct gguf_context * ctx, int i); + GGML_API enum gguf_type gguf_get_kv_type (struct gguf_context * ctx, int i); + GGML_API void gguf_get_val (struct gguf_context * ctx, int i, void * val); GGML_API const char * gguf_get_arr_str(struct gguf_context * ctx, int key_id, int i); GGML_API float gguf_get_arr_f32(struct gguf_context * ctx, int key_id, int i); diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 8b928c364..9ab770277 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -536,6 +536,7 @@ struct ggml_context * ctx_data = NULL; hparams.n_ctx = read_u32("llama.context_length"); hparams.n_embd = read_u32("llama.embedding_length"); uint32_t n_ff = read_u32("llama.feed_forward_length"); + GGML_UNUSED(n_ff); //hparams.n_mult = find_n_mult(n_ff, hparams.n_embd); hparams.n_head = read_u32("llama.attention.head_count"); hparams.n_layer = read_u32("llama.layer_count"); @@ -654,7 +655,21 @@ struct gguf_file_saver { file.write_val("general.quantization_version", GGUF_TYPE_UINT32, new_ftype); } else { const gguf_type vtype = gguf_get_kv_type(any_file_loader->gguf_ctx, i); - GGML_UNUSED(vtype); + switch(vtype) { + case GGUF_TYPE_BOOL: + case GGUF_TYPE_FLOAT32: + case GGUF_TYPE_INT16: + case GGUF_TYPE_INT32: + case GGUF_TYPE_INT8: + case GGUF_TYPE_STRING: + case GGUF_TYPE_UINT16: + case GGUF_TYPE_UINT32: + case GGUF_TYPE_UINT8: + case GGUF_TYPE_ARRAY: + break; + default: + throw std::runtime_error(format("cannot recognize value type for key %s\n", key)); + } } } @@ -3873,6 +3888,9 @@ bool llama_load_session_file(struct llama_context * ctx, const char * path_sessi bool llama_save_session_file(struct llama_context * ctx, const char * path_session, const llama_token * tokens, size_t n_token_count) { gguf_file file(path_session, "wb"); + GGML_UNUSED(ctx); + GGML_UNUSED(tokens); + GGML_UNUSED(n_token_count); // TODO: implement with GGUF format From 28abfc90fa9154be5ed9fded0870587aa6850820 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 13:27:58 +0300 Subject: [PATCH 28/87] gguf : write metadata in gguf_file_saver (WIP) --- gguf-llama.cpp | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 9ab770277..11dcb1cc5 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -655,11 +655,29 @@ struct gguf_file_saver { file.write_val("general.quantization_version", GGUF_TYPE_UINT32, new_ftype); } else { const gguf_type vtype = gguf_get_kv_type(any_file_loader->gguf_ctx, i); + + bool bool_val; + float f32_val; + int16_t i16_val; + int32_t i32_val; + switch(vtype) { case GGUF_TYPE_BOOL: + bool_val = gguf_get_val_bool(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_BOOL, bool_val); + break; case GGUF_TYPE_FLOAT32: + f32_val = gguf_get_val_f32(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_FLOAT32, f32_val); + break; case GGUF_TYPE_INT16: + i16_val = gguf_get_val_i16(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_INT16, i16_val); + break; case GGUF_TYPE_INT32: + i32_val = gguf_get_val_i32(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_INT32, i32_val); + break; case GGUF_TYPE_INT8: case GGUF_TYPE_STRING: case GGUF_TYPE_UINT16: From 781b9ec3f504e1d306bcb0027dac65aa8e4dfc70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 18:01:26 +0300 Subject: [PATCH 29/87] gguf : write metadata in gguf_file_saver (WIP) --- gguf-llama.cpp | 21 +++++++++++++++++++++ gguf-util.h | 27 +++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 11dcb1cc5..e70cae44c 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -660,6 +660,12 @@ struct gguf_file_saver { float f32_val; int16_t i16_val; int32_t i32_val; + int8_t i8_val; + std::string str_val; + uint16_t u16_val; + uint32_t u32_val; + uint8_t u8_val; + switch(vtype) { case GGUF_TYPE_BOOL: @@ -679,10 +685,25 @@ struct gguf_file_saver { file.write_val(key, GGUF_TYPE_INT32, i32_val); break; case GGUF_TYPE_INT8: + i8_val = gguf_get_val_i8(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_INT8, i8_val); + break; case GGUF_TYPE_STRING: + str_val = gguf_get_val_str(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_STRING, str_val); + break; case GGUF_TYPE_UINT16: + u16_val = gguf_get_val_u16(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_UINT16, u16_val); + break; case GGUF_TYPE_UINT32: + u32_val = gguf_get_val_u32(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_UINT32, u32_val); + break; case GGUF_TYPE_UINT8: + u8_val = gguf_get_val_u8(any_file_loader->gguf_ctx, i); + file.write_val(key, GGUF_TYPE_UINT8, u8_val); + break; case GGUF_TYPE_ARRAY: break; default: diff --git a/gguf-util.h b/gguf-util.h index 94ea9006a..9134019a4 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -140,6 +140,33 @@ struct gguf_file { fwrite((const char *) &n, sizeof(n), 1, fp); fwrite(val.data(), sizeof(T), n, fp); } + template<> + void write_val(const std::string & key, enum gguf_type type, const std::string & val) { + write_str(key); + fwrite((const char *) &type, sizeof(type), 1, fp); + + const int32_t n = val.size(); + fwrite((const char *) &n, sizeof(n), 1, fp); + fwrite(val.c_str(), n, 1, fp); + } + + template<> + void write_arr(const std::string & key, enum gguf_type type, const std::vector & val) { + write_str(key); + { + const enum gguf_type tarr = GGUF_TYPE_ARRAY; + fwrite((const char *) &tarr, sizeof(tarr), 1, fp); + } + + const int32_t n = val.size(); + fwrite((const char *) &type, sizeof(type), 1, fp); + fwrite((const char *) &n, sizeof(n), 1, fp); + for (int i = 0; i < n; ++i) { + const int32_t nstr = val[i].size(); + fwrite((const char *) &nstr, sizeof(nstr), 1, fp); + fwrite(val[i].c_str(), nstr, 1, fp); + } + } }; #if defined(_WIN32) From d09fd107138d311151546f42a6881548ffd766c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 20:07:43 +0300 Subject: [PATCH 30/87] gguf : write metadata in gguf_file_saver --- ggml.c | 4 ++++ ggml.h | 1 + gguf-llama.cpp | 34 +++++++++++++++++++++++++++++++++- 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index e00f09fa4..c8fa60328 100644 --- a/ggml.c +++ b/ggml.c @@ -19035,6 +19035,10 @@ enum gguf_type gguf_get_kv_type(struct gguf_context * ctx, int i) { return ctx->header.kv[i].type; } +enum gguf_type gguf_get_arr_type(struct gguf_context * ctx, int i) { + return ctx->header.kv[i].value.arr.type; +} + const char * gguf_get_arr_str(struct gguf_context * ctx, int key_id, int i) { struct gguf_kv * kv = &ctx->header.kv[key_id]; struct gguf_str * str = &((struct gguf_str *) kv->value.arr.data)[i]; diff --git a/ggml.h b/ggml.h index 9a266e175..fb3db10e2 100644 --- a/ggml.h +++ b/ggml.h @@ -1748,6 +1748,7 @@ extern "C" { GGML_API int gguf_find_key(struct gguf_context * ctx, const char * key); GGML_API const char * gguf_get_key (struct gguf_context * ctx, int i); GGML_API enum gguf_type gguf_get_kv_type (struct gguf_context * ctx, int i); + GGML_API enum gguf_type gguf_get_arr_type (struct gguf_context * ctx, int i); GGML_API void gguf_get_val (struct gguf_context * ctx, int i, void * val); GGML_API const char * gguf_get_arr_str(struct gguf_context * ctx, int key_id, int i); diff --git a/gguf-llama.cpp b/gguf-llama.cpp index e70cae44c..27e0b5d43 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -647,6 +647,28 @@ struct gguf_file_saver { file.write_i32(n_kv); } + void write_hparam_arr_str(const std::string & key, enum gguf_type type, int i, int n_arr) { + std::vector data(n_arr); + + for (int j = 0; j < n_arr; ++j) { + std::string val = gguf_get_arr_str(any_file_loader->gguf_ctx, i, j); + data[j] = val; + } + + file.write_arr(key, type, data); + } + + void write_hparam_arr_f32(const std::string & key, enum gguf_type type, int i, int n_arr) { + std::vector data(n_arr); + + for (int j = 0; j < n_arr; ++j) { + float val = gguf_get_arr_f32(any_file_loader->gguf_ctx, i, j); + data[j] = val; + } + + file.write_arr(key, type, data); + } + void write_hparams(enum llama_ftype new_ftype) { const int32_t n_kv = gguf_get_n_kv(any_file_loader->gguf_ctx); for (int i = 0; i < n_kv; ++i) { @@ -665,7 +687,8 @@ struct gguf_file_saver { uint16_t u16_val; uint32_t u32_val; uint8_t u8_val; - + gguf_type arr_type; + int n_arr; switch(vtype) { case GGUF_TYPE_BOOL: @@ -705,6 +728,15 @@ struct gguf_file_saver { file.write_val(key, GGUF_TYPE_UINT8, u8_val); break; case GGUF_TYPE_ARRAY: + arr_type = gguf_get_arr_type(any_file_loader->gguf_ctx, i); + n_arr = gguf_get_arr_n(any_file_loader->gguf_ctx, i); + if (arr_type == GGUF_TYPE_FLOAT32) { + write_hparam_arr_f32(key, arr_type, i, n_arr); + } else if (arr_type == GGUF_TYPE_STRING) { + write_hparam_arr_str(key, GGUF_TYPE_STRING, i, n_arr); + } else { + throw std::runtime_error("not implemented"); + } break; default: throw std::runtime_error(format("cannot recognize value type for key %s\n", key)); From 61919c1a8f12cb14c41e73019180c5950c355c83 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 20:36:11 +0300 Subject: [PATCH 31/87] gguf : rm references to old file formats --- gguf-llama.cpp | 36 ++++++++++-------------------------- 1 file changed, 10 insertions(+), 26 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 27e0b5d43..7bd8cef6a 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -467,13 +467,11 @@ struct llama_load_tensors_map { }; enum gguf_file_version { - gguf_file_VERSION_GGML, - gguf_file_VERSION_GGMF_V1, // added version field and scores in vocab - gguf_file_VERSION_GGJT_V1, // added padding - gguf_file_VERSION_GGJT_V2, // changed quantization format - gguf_file_VERSION_GGJT_V3, // changed Q4 and Q8 quantization format + GGUF_FILE_VERSION_V1 + }; + struct gguf_file_loader { gguf_file file; gguf_context * gguf_ctx; @@ -1069,12 +1067,8 @@ int64_t llama_time_us() { static const char *gguf_file_version_name(gguf_file_version version) { switch (version) { - case gguf_file_VERSION_GGML: return "'ggml' (old version with low tokenizer quality and no mmap support)"; - case gguf_file_VERSION_GGMF_V1: return "ggmf v1 (old version with no mmap support)"; - case gguf_file_VERSION_GGJT_V1: return "ggjt v1 (pre #1405)"; - case gguf_file_VERSION_GGJT_V2: return "ggjt v2 (pre #1508)"; - case gguf_file_VERSION_GGJT_V3: return "ggjt v3 (latest)"; - } + case GGUF_FILE_VERSION_V1: return "GGUF V1 (latest)"; + } return "unknown"; } @@ -1205,22 +1199,12 @@ static void llama_model_load_internal( fprintf(stderr, "%s: model size = %s\n", __func__, llama_model_type_name(model.type)); } - if (file_version < gguf_file_VERSION_GGJT_V2) { - if (hparams.ftype != LLAMA_FTYPE_ALL_F32 && - hparams.ftype != LLAMA_FTYPE_MOSTLY_F16 && - hparams.ftype != LLAMA_FTYPE_MOSTLY_Q8_0) { - throw std::runtime_error(format("this format is no longer supported (see https://github.com/ggerganov/llama.cpp/pull/1405)")); - } + if (hparams.ftype == LLAMA_FTYPE_MOSTLY_Q4_0 || + hparams.ftype == LLAMA_FTYPE_MOSTLY_Q4_1 || + hparams.ftype == LLAMA_FTYPE_MOSTLY_Q8_0) { + throw std::runtime_error(format("this format is no longer supported (see https://github.com/ggerganov/llama.cpp/pull/1508)")); } - - if (file_version < gguf_file_VERSION_GGJT_V3) { - if (hparams.ftype == LLAMA_FTYPE_MOSTLY_Q4_0 || - hparams.ftype == LLAMA_FTYPE_MOSTLY_Q4_1 || - hparams.ftype == LLAMA_FTYPE_MOSTLY_Q8_0) { - throw std::runtime_error(format("this format is no longer supported (see https://github.com/ggerganov/llama.cpp/pull/1508)")); - } - } - + if (vocab_only) { return; } From 7009cf581cbe958fcdeb94e58386646d0ebfa3dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 20:43:02 +0300 Subject: [PATCH 32/87] gguf : shorter name for member variable --- gguf-llama.cpp | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 7bd8cef6a..77ba7d880 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -620,9 +620,9 @@ struct ggml_context * ctx_data = NULL; struct gguf_file_saver { gguf_file file; - gguf_file_loader * any_file_loader; - gguf_file_saver(const char * fname, gguf_file_loader * any_file_loader, enum llama_ftype new_ftype) - : file(fname, "wb"), any_file_loader(any_file_loader) { + gguf_file_loader * fl; + gguf_file_saver(const char * fname, gguf_file_loader * fl, enum llama_ftype new_ftype) + : file(fname, "wb"), fl(fl) { fprintf(stderr, "llama.cpp: saving model to %s\n", fname); write_header(); write_hparams(new_ftype); @@ -638,10 +638,10 @@ struct gguf_file_saver { const int32_t version = GGUF_VERSION; file.write_i32(version); - const int32_t n_tensors = gguf_get_n_tensors(any_file_loader->gguf_ctx); + const int32_t n_tensors = gguf_get_n_tensors(fl->gguf_ctx); file.write_i32(n_tensors); - const int32_t n_kv = gguf_get_n_kv(any_file_loader->gguf_ctx); + const int32_t n_kv = gguf_get_n_kv(fl->gguf_ctx); file.write_i32(n_kv); } @@ -649,7 +649,7 @@ struct gguf_file_saver { std::vector data(n_arr); for (int j = 0; j < n_arr; ++j) { - std::string val = gguf_get_arr_str(any_file_loader->gguf_ctx, i, j); + std::string val = gguf_get_arr_str(fl->gguf_ctx, i, j); data[j] = val; } @@ -660,7 +660,7 @@ struct gguf_file_saver { std::vector data(n_arr); for (int j = 0; j < n_arr; ++j) { - float val = gguf_get_arr_f32(any_file_loader->gguf_ctx, i, j); + float val = gguf_get_arr_f32(fl->gguf_ctx, i, j); data[j] = val; } @@ -668,13 +668,13 @@ struct gguf_file_saver { } void write_hparams(enum llama_ftype new_ftype) { - const int32_t n_kv = gguf_get_n_kv(any_file_loader->gguf_ctx); + const int32_t n_kv = gguf_get_n_kv(fl->gguf_ctx); for (int i = 0; i < n_kv; ++i) { - const char * key = gguf_get_key(any_file_loader->gguf_ctx, i); + const char * key = gguf_get_key(fl->gguf_ctx, i); if (strcmp(key, "general.quantization_version") == 0) { file.write_val("general.quantization_version", GGUF_TYPE_UINT32, new_ftype); } else { - const gguf_type vtype = gguf_get_kv_type(any_file_loader->gguf_ctx, i); + const gguf_type vtype = gguf_get_kv_type(fl->gguf_ctx, i); bool bool_val; float f32_val; @@ -690,44 +690,44 @@ struct gguf_file_saver { switch(vtype) { case GGUF_TYPE_BOOL: - bool_val = gguf_get_val_bool(any_file_loader->gguf_ctx, i); + bool_val = gguf_get_val_bool(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_BOOL, bool_val); break; case GGUF_TYPE_FLOAT32: - f32_val = gguf_get_val_f32(any_file_loader->gguf_ctx, i); + f32_val = gguf_get_val_f32(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_FLOAT32, f32_val); break; case GGUF_TYPE_INT16: - i16_val = gguf_get_val_i16(any_file_loader->gguf_ctx, i); + i16_val = gguf_get_val_i16(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_INT16, i16_val); break; case GGUF_TYPE_INT32: - i32_val = gguf_get_val_i32(any_file_loader->gguf_ctx, i); + i32_val = gguf_get_val_i32(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_INT32, i32_val); break; case GGUF_TYPE_INT8: - i8_val = gguf_get_val_i8(any_file_loader->gguf_ctx, i); + i8_val = gguf_get_val_i8(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_INT8, i8_val); break; case GGUF_TYPE_STRING: - str_val = gguf_get_val_str(any_file_loader->gguf_ctx, i); + str_val = gguf_get_val_str(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_STRING, str_val); break; case GGUF_TYPE_UINT16: - u16_val = gguf_get_val_u16(any_file_loader->gguf_ctx, i); + u16_val = gguf_get_val_u16(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_UINT16, u16_val); break; case GGUF_TYPE_UINT32: - u32_val = gguf_get_val_u32(any_file_loader->gguf_ctx, i); + u32_val = gguf_get_val_u32(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_UINT32, u32_val); break; case GGUF_TYPE_UINT8: - u8_val = gguf_get_val_u8(any_file_loader->gguf_ctx, i); + u8_val = gguf_get_val_u8(fl->gguf_ctx, i); file.write_val(key, GGUF_TYPE_UINT8, u8_val); break; case GGUF_TYPE_ARRAY: - arr_type = gguf_get_arr_type(any_file_loader->gguf_ctx, i); - n_arr = gguf_get_arr_n(any_file_loader->gguf_ctx, i); + arr_type = gguf_get_arr_type(fl->gguf_ctx, i); + n_arr = gguf_get_arr_n(fl->gguf_ctx, i); if (arr_type == GGUF_TYPE_FLOAT32) { write_hparam_arr_f32(key, arr_type, i, n_arr); } else if (arr_type == GGUF_TYPE_STRING) { @@ -745,7 +745,7 @@ struct gguf_file_saver { } void write_vocab() { - uint32_t n_vocab = any_file_loader->hparams.n_vocab; + uint32_t n_vocab = fl->hparams.n_vocab; GGML_UNUSED(n_vocab); } From f44bbd3d88e2cdb68e59c965e28961afe4a0ad26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 21:00:51 +0300 Subject: [PATCH 33/87] gguf : rm redundant method --- gguf-llama.cpp | 5 ----- 1 file changed, 5 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 77ba7d880..a8bd242b0 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -626,7 +626,6 @@ struct gguf_file_saver { fprintf(stderr, "llama.cpp: saving model to %s\n", fname); write_header(); write_hparams(new_ftype); - write_vocab(); } // TODO: probably it's better to move these to gguf_file @@ -744,10 +743,6 @@ struct gguf_file_saver { } - void write_vocab() { - uint32_t n_vocab = fl->hparams.n_vocab; - GGML_UNUSED(n_vocab); - } void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { GGML_UNUSED(tensor); From e732423280ff72ece7bc8dfe8ec1fa7e3153714c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Fri, 11 Aug 2023 23:50:38 +0300 Subject: [PATCH 34/87] gguf : get rid of n_mult, read n_ff from file --- gguf-llama.cpp | 55 ++++++++++++++++++-------------------------------- 1 file changed, 20 insertions(+), 35 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index a8bd242b0..40d5ffd14 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -177,15 +177,12 @@ struct llama_hparams { uint32_t n_vocab = 32000; uint32_t n_ctx = 512; // this is provided as user input? uint32_t n_embd = 4096; - uint32_t n_mult = 256; uint32_t n_head = 32; uint32_t n_head_kv = 32; uint32_t n_layer = 32; uint32_t n_rot = 64; + uint32_t n_ff = 11008; - // LLaMAv2 - // TODO: load from model data hparams - float f_ffn_mult = 1.0f; float f_rms_norm_eps = LLAMA_DEFAULT_RMS_EPS; float rope_freq_base = 10000.0f; @@ -467,7 +464,7 @@ struct llama_load_tensors_map { }; enum gguf_file_version { - GGUF_FILE_VERSION_V1 + GGUF_FILE_VERSION_V1 = 1, }; @@ -490,6 +487,7 @@ struct ggml_context * ctx_data = NULL; }; gguf_ctx = gguf_init_from_file(fname, params); + file_version = (enum gguf_file_version) gguf_get_version(gguf_ctx); read_hparams(); read_vocab(); @@ -505,6 +503,15 @@ struct ggml_context * ctx_data = NULL; return gguf_get_val_u32(gguf_ctx, i); } + float read_f32(const char * key) { + int i = gguf_find_key(gguf_ctx, key); + if (i == -1) { + throw std::runtime_error(format("cannot find param with key %s\n", key)); + } + + return gguf_get_val_f32(gguf_ctx, i); + } + int read_n_vocab() { int i = gguf_find_key(gguf_ctx, "tokenizer.ggml.tokens"); if (i == -1) { @@ -514,18 +521,6 @@ struct ggml_context * ctx_data = NULL; return gguf_get_arr_n(gguf_ctx, i); } - int find_n_mult(const int n_ff, const int n_embd) { - int n_mults[3] = {8192, 1, -1}; - for (int i = 0; i < 3; ++i) { - int calc_ff = (((8 * n_embd) / 3 + n_mults[i] - 1) / n_mults[i]) * n_mults[i]; - if (calc_ff == n_ff) { - return n_mults[i]; - } - } - - throw std::runtime_error(format("failed to find n_mult for n_ff = %d and n_embd = %d\n", n_ff, n_embd)); - } - void read_hparams() { // TODO make keysconstants in header @@ -533,14 +528,12 @@ struct ggml_context * ctx_data = NULL; hparams.n_vocab = read_n_vocab(); hparams.n_ctx = read_u32("llama.context_length"); hparams.n_embd = read_u32("llama.embedding_length"); - uint32_t n_ff = read_u32("llama.feed_forward_length"); - GGML_UNUSED(n_ff); - //hparams.n_mult = find_n_mult(n_ff, hparams.n_embd); + hparams.n_ff = read_u32("llama.feed_forward_length"); hparams.n_head = read_u32("llama.attention.head_count"); hparams.n_layer = read_u32("llama.layer_count"); - hparams.n_rot = hparams.n_embd / hparams.n_head; - //hparams.ftype = (enum llama_ftype) file.read_u32(); - + hparams.n_rot = read_u32("llama.rope.dimension_count"); + hparams.f_rms_norm_eps = read_f32("llama.attention.layer_norm_rms_epsilon"); + // LLaMAv2 // hparams.n_head_kv = read_u32("llama.attention.head_count_kv"); } @@ -1125,6 +1118,7 @@ static void llama_model_load_internal( bool vocab_only, llama_progress_callback progress_callback, void * progress_callback_user_data) { + GGML_UNUSED(rms_norm_eps); // TODO: update function signature to remove this model.t_start_us = ggml_time_us(); @@ -1137,9 +1131,6 @@ static void llama_model_load_internal( auto & hparams = model.hparams; - // TODO: read from file - hparams.f_rms_norm_eps = rms_norm_eps; - { switch (hparams.n_layer) { case 26: model.type = e_model::MODEL_3B; break; @@ -1162,25 +1153,19 @@ static void llama_model_load_internal( if (model.type == e_model::MODEL_65B && n_gqa == 8) { fprintf(stderr, "%s: warning: assuming 70B model based on GQA == %d\n", __func__, n_gqa); model.type = e_model::MODEL_70B; - hparams.f_ffn_mult = 1.3f; // from the params.json of the 70B model - } + } hparams.rope_freq_base = rope_freq_base; hparams.rope_freq_scale = rope_freq_scale; } - // ref: https://github.com/facebookresearch/llama/blob/6c7fe276574e78057f917549435a2554000a876d/llama/model.py#L194-L199 - const uint32_t n_ff_raw = 2*(4*hparams.n_embd)/3; - const uint32_t n_ff_mult = hparams.f_ffn_mult*n_ff_raw; - const uint32_t n_ff = ((n_ff_mult + hparams.n_mult - 1)/hparams.n_mult)*hparams.n_mult; - //const uint32_t n_ff = 28672; - + const uint32_t n_ff = hparams.n_ff; + { fprintf(stderr, "%s: format = %s\n", __func__, gguf_file_version_name(file_version)); fprintf(stderr, "%s: n_vocab = %u\n", __func__, hparams.n_vocab); fprintf(stderr, "%s: n_ctx = %u\n", __func__, hparams.n_ctx); fprintf(stderr, "%s: n_embd = %u\n", __func__, hparams.n_embd); - fprintf(stderr, "%s: n_mult = %u\n", __func__, hparams.n_mult); fprintf(stderr, "%s: n_head = %u\n", __func__, hparams.n_head); fprintf(stderr, "%s: n_head_kv = %u\n", __func__, hparams.n_head_kv); fprintf(stderr, "%s: n_layer = %u\n", __func__, hparams.n_layer); From 2a5ac7af44c258a725ae10180dc5993db972e527 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Fri, 11 Aug 2023 23:08:48 +0200 Subject: [PATCH 35/87] Update gguf_tensor_map.py --- gguf_tensor_map.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/gguf_tensor_map.py b/gguf_tensor_map.py index 4fba633b2..644c5914d 100644 --- a/gguf_tensor_map.py +++ b/gguf_tensor_map.py @@ -26,15 +26,15 @@ def get_tensor_map( n_blocks : int): tensor_map["output"] = mapped_to # llama-pth # Attention and fee-forward layer blocks for i in range(0,n_blocks): - # Attention norm 1 - mapped_to = "transformer.blocks."+str(i)+".attn_norm_1" + # Attention norm + mapped_to = "transformer.blocks."+str(i)+".attn_norm" tensor_map["gpt_neox.layers."+str(i)+".input_layernorm"] = mapped_to # gptneox - tensor_map["transformer.h."+str(i)+".ln_1"] = mapped_to # gpt2 - tensor_map["transformer.blocks."+str(i)+".norm_1"] = mapped_to # mpt - tensor_map["transformer.h."+str(i)+".input_layernorm"] = mapped_to # falcon7b - tensor_map["transformer.h."+str(i)+".ln_attn"] = mapped_to # falcon40b - tensor_map["model.layers."+str(i)+".input_layernorm"] = mapped_to # llama-hf - tensor_map["layers."+str(i)+".attention_norm"] = mapped_to # llama-pth + tensor_map["transformer.h."+str(i)+".ln_1"] = mapped_to # gpt2 + tensor_map["transformer.blocks."+str(i)+".norm_1"] = mapped_to # mpt + tensor_map["transformer.h."+str(i)+".input_layernorm"] = mapped_to # falcon7b + tensor_map["transformer.h."+str(i)+".ln_attn"] = mapped_to # falcon40b + tensor_map["model.layers."+str(i)+".input_layernorm"] = mapped_to # llama-hf + tensor_map["layers."+str(i)+".attention_norm"] = mapped_to # llama-pth # Attention norm 2 mapped_to = "transformer.blocks."+str(i)+".attn_norm_2" tensor_map["transformer.h."+str(i)+".ln_mlp"] = mapped_to # falcon40b From e76c59d5245825ac1e1bf06bfc7f78ed1f6bcf16 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Fri, 11 Aug 2023 23:09:49 +0200 Subject: [PATCH 36/87] Update gptneox-main.cpp --- gptneox-main.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gptneox-main.cpp b/gptneox-main.cpp index 1667c4d54..f2be93e4b 100644 --- a/gptneox-main.cpp +++ b/gptneox-main.cpp @@ -565,8 +565,8 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 std::string blocknamestart = "transformer.blocks." + std::to_string(i) + "."; - layer.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm_1.weight" ); - layer.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm_1.bias" ); + layer.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); + layer.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); layer.c_attn_attn_w = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); layer.c_attn_attn_b = get_tensor_ex(ctx ,blocknamestart + "attn_qkv.bias" ); @@ -584,8 +584,8 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 layer.c_mlp_proj_b = get_tensor_ex(ctx, blocknamestart + "ffn_down.bias" ); // map by name - model.tensors[blocknamestart + "attn_norm_1.weight"] = layer.ln_1_g; - model.tensors[blocknamestart + "attn_norm_1.bias"] = layer.ln_1_b; + model.tensors[blocknamestart + "attn_norm.weight"] = layer.ln_1_g; + model.tensors[blocknamestart + "attn_norm.bias"] = layer.ln_1_b; model.tensors[blocknamestart + "attn_qkv.weight"] = layer.c_attn_attn_w; model.tensors[blocknamestart + "attn_qkv.bias"] = layer.c_attn_attn_b; From 53dc399472d5bd35ee739b865e843b1996bd3814 Mon Sep 17 00:00:00 2001 From: Equim Date: Sat, 12 Aug 2023 06:35:14 +0800 Subject: [PATCH 37/87] server: fixed wrong variable name in timing json (#2579) * server: fixed wrong variable name in timing json * remove redunct entry --- examples/server/server.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 637f6d6c2..2340f93ac 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1008,7 +1008,7 @@ static json format_timings(llama_server_context &llama) assert(timings.n_eval == llama.num_tokens_predicted); return json{ - {"prompt_n", timings.n_eval}, + {"prompt_n", timings.n_p_eval}, {"prompt_ms", timings.t_p_eval_ms}, {"prompt_per_token_ms", timings.t_p_eval_ms / timings.n_p_eval}, {"prompt_per_second", 1e3 / timings.t_p_eval_ms * timings.n_p_eval}, @@ -1037,7 +1037,6 @@ static json format_final_response(llama_server_context &llama, const std::string {"stopped_limit", llama.stopped_limit}, {"stopping_word", llama.stopping_word}, {"tokens_cached", llama.n_past}, - {"tokens_predicted", llama.num_tokens_predicted}, {"timings", format_timings(llama)}, }; From b19edd54d51cef5e3616c18b1d0d8626895b2cba Mon Sep 17 00:00:00 2001 From: byte-6174 <88070277+byte-6174@users.noreply.github.com> Date: Fri, 11 Aug 2023 19:17:25 -0400 Subject: [PATCH 38/87] Adding support for llama2.c models (#2559) --- .gitignore | 2 + Makefile | 7 +- examples/CMakeLists.txt | 1 + .../convert-llama2c-to-ggml/CMakeLists.txt | 5 + examples/convert-llama2c-to-ggml/README.md | 26 + .../convert-llama2c-to-ggml.cpp | 825 ++++++++++++++++++ 6 files changed, 864 insertions(+), 2 deletions(-) create mode 100644 examples/convert-llama2c-to-ggml/CMakeLists.txt create mode 100644 examples/convert-llama2c-to-ggml/README.md create mode 100644 examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp diff --git a/.gitignore b/.gitignore index c1ab6bb6d..e345e64ed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.o *.a *.so +*.bin .DS_Store .build/ .cache/ @@ -39,6 +40,7 @@ models-mnt /perplexity /embedding /train-text-from-scratch +/convert-llama2c-to-ggml /simple /benchmark-matmult /vdot diff --git a/Makefile b/Makefile index f01bf0c83..ce593edfc 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ # Define the default target now so that it is always the first target -BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch simple server embd-input-test +BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch convert-llama2c-to-ggml simple server embd-input-test # Binaries only useful for tests TEST_TARGETS = tests/test-double-float tests/test-grad0 tests/test-opt tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 @@ -345,7 +345,7 @@ libllama.so: llama.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -shared -fPIC -o $@ $^ $(LDFLAGS) clean: - rm -vf *.o *.so *.dll main quantize quantize-stats perplexity embedding benchmark-matmult save-load-state server simple vdot train-text-from-scratch embd-input-test build-info.h $(TEST_TARGETS) + rm -vf *.o *.so *.dll main quantize quantize-stats perplexity embedding benchmark-matmult save-load-state server simple vdot train-text-from-scratch convert-llama2c-to-ggml embd-input-test build-info.h $(TEST_TARGETS) # # Examples @@ -388,6 +388,9 @@ embd-input-test: $(LIB_PRE)embdinput$(DSO_EXT) examples/embd-input/embd-input-te train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp build-info.h ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp build-info.h ggml.o llama.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + build-info.h: $(wildcard .git/index) scripts/build-info.sh @sh scripts/build-info.sh > $@.tmp @if ! cmp -s $@.tmp $@; then \ diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index a7b26776a..b5d9bb29e 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -42,6 +42,7 @@ else() add_subdirectory(benchmark) add_subdirectory(baby-llama) add_subdirectory(train-text-from-scratch) + add_subdirectory(convert-llama2c-to-ggml) add_subdirectory(simple) add_subdirectory(embd-input) if (LLAMA_METAL) diff --git a/examples/convert-llama2c-to-ggml/CMakeLists.txt b/examples/convert-llama2c-to-ggml/CMakeLists.txt new file mode 100644 index 000000000..e262d44f9 --- /dev/null +++ b/examples/convert-llama2c-to-ggml/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET convert-llama2c-to-ggml) +add_executable(${TARGET} convert-llama2c-to-ggml.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/convert-llama2c-to-ggml/README.md b/examples/convert-llama2c-to-ggml/README.md new file mode 100644 index 000000000..868f57d6d --- /dev/null +++ b/examples/convert-llama2c-to-ggml/README.md @@ -0,0 +1,26 @@ +## Convert llama2.c model to ggml + +This example reads weights from project [llama2.c](https://github.com/karpathy/llama2.c) and saves them in ggml compatible format. The vocab that is available in `models/ggml-vocab.bin` is used by default. + +To convert the model first download the models from the [llma2.c](https://github.com/karpathy/llama2.c) repository: + +`$ make -j` + +After successful compilation, following usage options are available: +``` +usage: ./convert-llama2c-to-ggml [options] + +options: + -h, --help show this help message and exit + --copy-vocab-from-model FNAME model path from which to copy vocab (default 'models/ggml-vocab.bin') + --llama2c-model FNAME [REQUIRED] model path from which to load Karpathy's llama2.c model + --llama2c-output-model FNAME model path to save the converted llama2.c model (default ak_llama_model.bin') +``` + +An example command is as follows: + +`$ ./convert-llama2c-to-ggml --copy-vocab-from-model --llama2c-model --llama2c-output-model ` + +Now you can use the model with command like: + +`$ ./main -m -p "One day, Lily met a Shoggoth" -n 500 -c 256 -eps 1e-5` diff --git a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp new file mode 100644 index 000000000..1a238c4dd --- /dev/null +++ b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp @@ -0,0 +1,825 @@ +#include "ggml.h" +#include "llama.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +//////////////////////////////////////// llama2.c model structs and functions to load models, alloc memory etc. +typedef struct { + int dim; // transformer dimension + int hidden_dim; // for ffn layers + int n_layers; // number of layers + int n_heads; // number of query heads + int n_kv_heads; // number of key/value heads (can be < query heads because of multiquery) + int vocab_size; // vocabulary size, usually 256 (byte-level) + int seq_len; // max sequence length +} Config; + +typedef struct { + // token embedding table + float* token_embedding_table; // (vocab_size, dim) + // weights for rmsnorms + float* rms_att_weight; // (layer, dim) rmsnorm weights + float* rms_ffn_weight; // (layer, dim) + // weights for matmuls + float* wq; // (layer, dim, dim) + float* wk; // (layer, dim, dim) + float* wv; // (layer, dim, dim) + float* wo; // (layer, dim, dim) + // weights for ffn + float* w1; // (layer, hidden_dim, dim) + float* w2; // (layer, dim, hidden_dim) + float* w3; // (layer, hidden_dim, dim) + // final rmsnorm + float* rms_final_weight; // (dim,) + // freq_cis for RoPE relatively positional embeddings + // float* freq_cis_real; // (seq_len, dim/2) + // float* freq_cis_imag; // (seq_len, dim/2) + // (optional) classifier weights for the logits, on the last layer + //float* wcls; +} TransformerWeights; + +void malloc_weights(TransformerWeights* w, Config* p) { + // we calloc instead of malloc to keep valgrind happy + w->token_embedding_table = new float[p->vocab_size * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->token_embedding_table\n",__func__,p->vocab_size , p->dim, p->vocab_size * p->dim); + + w->rms_att_weight = new float[p->n_layers * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->rms_att_weight\n",__func__,p->n_layers, p->dim, p->n_layers * p->dim); + + w->rms_ffn_weight = new float[p->n_layers * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->rms_ffn_weight\n",__func__,p->n_layers , p->dim, p->n_layers * p->dim); + + w->wq = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wq\n",__func__,p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->wk = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wk\n",__func__,p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->wv = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wv\n",__func__, p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->wo = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wo\n",__func__,p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->w1 = new float[p->n_layers * p->hidden_dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->w1\n",__func__,p->n_layers, p->hidden_dim, p->dim, p->n_layers * p->hidden_dim * p->dim); + + w->w2 = new float[p->n_layers * p->hidden_dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->w2\n",__func__,p->n_layers, p->dim, p->hidden_dim, p->n_layers * p->hidden_dim * p->dim); + + w->w3 = new float[p->n_layers * p->hidden_dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->w3\n",__func__,p->n_layers, p->hidden_dim, p->dim, p->n_layers * p->hidden_dim * p->dim); + + w->rms_final_weight = new float[p->dim](); + printf("[%s:AK] Allocating [%d] float space for w->rms_final_weight\n",__func__,p->dim); +} + +int checkpoint_init_weights(TransformerWeights *w, Config* p, FILE* f) { + if (fread(w->token_embedding_table, sizeof(float), p->vocab_size * p->dim, f) != static_cast(p->vocab_size * p->dim)) return 1; + if (fread(w->rms_att_weight, sizeof(float), p->n_layers * p->dim, f) != static_cast(p->n_layers * p->dim)) return 1; + if (fread(w->wq, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->wk, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->wv, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->wo, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->rms_ffn_weight, sizeof(float), p->n_layers * p->dim, f) != static_cast(p->n_layers * p->dim)) return 1; + if (fread(w->w1, sizeof(float), p->n_layers * p->dim * p->hidden_dim, f) != static_cast(p->n_layers * p->dim * p->hidden_dim)) return 1; + if (fread(w->w2, sizeof(float), p->n_layers * p->hidden_dim * p->dim, f) != static_cast(p->n_layers * p->hidden_dim * p->dim)) return 1; + if (fread(w->w3, sizeof(float), p->n_layers * p->dim * p->hidden_dim, f) != static_cast(p->n_layers * p->dim * p->hidden_dim)) return 1; + if (fread(w->rms_final_weight, sizeof(float), p->dim, f) != static_cast(p->dim)) return 1; + return 0; +} + +void free_weights(TransformerWeights* w) { + delete w->token_embedding_table; + delete w->rms_att_weight; + delete w->rms_ffn_weight; + delete w->wq; + delete w->wk; + delete w->wv; + delete w->wo; + delete w->w1; + delete w->w2; + delete w->w3; + delete w->rms_final_weight; +} + +void print_sample_weights(TransformerWeights *w){ + printf("----- Quick print of first of the weight vales of all the variables\n"); + printf("%f\n", w->token_embedding_table[0]); + printf("%f\n", w->rms_att_weight[0]); + printf("%f\n", w->rms_ffn_weight[0]); + + printf("%f\n", w->wq[0]); + printf("%f\n", w->wk[0]); + printf("%f\n", w->wv[0]); + printf("%f\n", w->wo[0]); + printf("%f\n", w->w1[0]); + printf("%f\n", w->w2[0]); + printf("%f\n", w->w3[0]); + printf("%f\n", w->rms_att_weight[0]); +} +//////////////////////////////////////////////////////////////////////////////////////////////////////////// + +//////////////////////////////////////// ggml structs and functions required to load models, configs and save the model. + +struct llama_vocab { + using id = int32_t; + using token = std::string; + + struct token_score { + token tok; + float score; + }; + + std::unordered_map token_to_id; + std::vector id_to_token; +}; + +struct my_llama_hparams { + uint32_t n_vocab = 32000; + uint32_t n_ctx = 512; // this is provided as user input? + uint32_t n_embd = 4096; + uint32_t n_mult = 4; + uint32_t n_head = 32; + uint32_t n_layer = 32; + uint32_t n_rot = 64; + bool operator!=(const my_llama_hparams& other) const { + return memcmp(this, &other, sizeof(my_llama_hparams)); + } +}; + +struct my_llama_layer { + // normalization + struct ggml_tensor * attention_norm; + + // attention + struct ggml_tensor * wq; + struct ggml_tensor * wk; + struct ggml_tensor * wv; + struct ggml_tensor * wo; + + // normalization + struct ggml_tensor * ffn_norm; + + // ff + struct ggml_tensor * w1; + struct ggml_tensor * w2; + struct ggml_tensor * w3; +}; + +struct my_llama_model { + struct ggml_context * ctx = NULL; + + my_llama_hparams hparams; + + struct ggml_tensor * tok_embeddings; + + struct ggml_tensor * norm; + struct ggml_tensor * output; + + std::vector layers; + + uint32_t train_its = 0; + uint32_t train_samples = 0; + uint32_t train_tokens = 0; +}; + +struct train_params { + const char * fn_vocab_model; + const char * fn_llama2c_model; + const char * fn_llama2c_output_model; + const char * fn_train_data; + const char * fn_checkpoint_in; + const char * fn_checkpoint_out; + const char * fn_model_out; + + uint32_t seed; + + int n_ctx; + int n_embd; + int n_mult; + int n_head; + int n_layer; + int n_rotmax; + + int n_threads; + int n_batch; + int n_examples; + int n_predict; + + int print_info_interval; + int print_details_interval; + + bool samples_start_after_nl; + bool use_adam; + bool use_flash; + bool use_scratch; + + // only adam + int warmup; + int cos_decay_steps; + float cos_decay_restart; + float cos_decay_alpha; + + int lbfgs_n_iter; + int adam_n_iter; + float adam_alpha; + float adam_decay; + + int mem_model_gb; + int mem_compute_gb; + int mem_compute0_gb; + int mem_compute1_gb; +}; + +uint32_t get_n_ff(const struct my_llama_hparams* hparams) { + const uint32_t n_ff = ((2*(4*hparams->n_embd)/3 + hparams->n_mult - 1)/hparams->n_mult)*hparams->n_mult; + return n_ff; +} + +void print_params(struct my_llama_hparams * params) { + printf("%s: n_vocab: %d\n", __func__, params->n_vocab); + printf("%s: n_ctx: %d\n", __func__, params->n_ctx); + printf("%s: n_embd: %d\n", __func__, params->n_embd); + printf("%s: n_mult: %d\n", __func__, params->n_mult); + printf("%s: n_head: %d\n", __func__, params->n_head); + printf("%s: n_ff: %d\n", __func__, get_n_ff(params)); + printf("%s: n_layer: %d\n", __func__, params->n_layer); + printf("%s: n_rot: %d\n", __func__, params->n_rot); +} + +void init_model(struct my_llama_model * model) { + const auto & hparams = model->hparams; + + const uint32_t n_embd = hparams.n_embd; + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_vocab = hparams.n_vocab; + + const uint32_t n_ff = get_n_ff(&hparams); + struct ggml_context * ctx = model->ctx; + + model->train_its = 0; + model->train_samples = 0; + model->train_tokens = 0; + + model->tok_embeddings = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); + printf("[%s:GG] Allocating [%d] x [%d] = [%d] float space for model->tok_embeddings\n",__func__,n_embd , n_vocab, n_embd * n_vocab); + + model->norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); + printf("[%s:GG] Allocating [%d] float space for model->norm\n",__func__,n_embd); + + model->output = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for model->output\n",__func__,n_embd, n_vocab, n_embd * n_vocab); + + // printing the per-layer allocations here so we dont print in the for loop. + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wq for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wk for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wv for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wo for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + + printf("[%s:GG] Allocating [%d] float space for layer.ffn_norm for [%d] layers\n",__func__,n_embd, n_layer); + + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w1 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w2 for [%d] layers\n",__func__, n_embd, n_ff, n_ff * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w3 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + + ggml_set_name(model->tok_embeddings, "tok_embeddings.weight"); + ggml_set_name(model->norm, "norm.weight"); + ggml_set_name(model->output, "output.weight"); + + model->layers.resize(n_layer); + for (uint32_t i = 0; i < n_layer; ++i) { + auto & layer = model->layers[i]; + + std::string layers_i = "layers." + std::to_string(i); + + layer.attention_norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); + + layer.wq = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + layer.wk = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + layer.wv = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + layer.wo = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + + layer.ffn_norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); + + layer.w1 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + layer.w2 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); + layer.w3 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + + ggml_set_name(layer.attention_norm, (layers_i + ".attention_norm.weight").c_str()); + + ggml_set_name(layer.wq, (layers_i + ".attention.wq.weight").c_str()); + ggml_set_name(layer.wk, (layers_i + ".attention.wk.weight").c_str()); + ggml_set_name(layer.wv, (layers_i + ".attention.wv.weight").c_str()); + ggml_set_name(layer.wo, (layers_i + ".attention.wo.weight").c_str()); + + ggml_set_name(layer.ffn_norm, (layers_i + ".ffn_norm.weight").c_str()); + + ggml_format_name(layer.w1, "%s.feed_forward.w1.weight", layers_i.c_str()); + ggml_format_name(layer.w2, "%s.feed_forward.w2.weight", layers_i.c_str()); + ggml_format_name(layer.w3, "%s.feed_forward.w3.weight", layers_i.c_str()); + } +} + +float get_f32_2d(struct ggml_tensor * tensor, int64_t i0, int64_t i1) { + float * ptr = (float *) ((char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1]); + return *ptr; +} + +int32_t get_i32_2d(struct ggml_tensor * tensor, int64_t i0, int64_t i1) { + int32_t * ptr = (int32_t *) ((char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1]); + return *ptr; +} + +void print_row(struct ggml_tensor * probs, int i) { + for (int k = 0; k < probs->ne[0]; ++k) { + float p = get_f32_2d(probs, k, i); + printf(" %f", p); + } + printf("\n"); +} + +void print_matrix(struct ggml_tensor * probs) { + assert(probs->n_dims == 2); + for (int i = 0; i < probs->ne[1]; ++i) { + for (int k = 0; k < probs->ne[0]; ++k) { + float p = get_f32_2d(probs, k, i); + printf(" %.2f", p); + } + printf("\n"); + } +} + +#ifdef __GNUC__ +#ifdef __MINGW32__ +__attribute__((format(gnu_printf, 1, 2))) +#else +__attribute__((format(printf, 1, 2))) +#endif +#endif +static std::string format(const char * fmt, ...) { + va_list ap, ap2; + va_start(ap, fmt); + va_copy(ap2, ap); + int size = vsnprintf(NULL, 0, fmt, ap); + GGML_ASSERT(size >= 0 && size < INT_MAX); + std::vector buf(size + 1); + int size2 = vsnprintf(buf.data(), size + 1, fmt, ap2); + GGML_ASSERT(size2 == size); + va_end(ap2); + va_end(ap); + return std::string(buf.data(), size); +} + +struct llama_file { + // use FILE * so we don't have to re-open the file to mmap + FILE * fp; + size_t size; + + llama_file(const char * fname, const char * mode) { + fp = std::fopen(fname, mode); + if (fp == NULL) { + size = 0; + } else { + seek(0, SEEK_END); + size = tell(); + seek(0, SEEK_SET); + } + } + + size_t tell() const { +#ifdef _WIN32 + __int64 ret = _ftelli64(fp); +#else + long ret = std::ftell(fp); +#endif + GGML_ASSERT(ret != -1); // this really shouldn't fail + return (size_t) ret; + } + + void seek(size_t offset, int whence) { +#ifdef _WIN32 + int ret = _fseeki64(fp, (__int64) offset, whence); +#else + int ret = std::fseek(fp, (long) offset, whence); +#endif + GGML_ASSERT(ret == 0); // same + } + + void read_raw(void * ptr, size_t size) { + if (size == 0) { + return; + } + errno = 0; + std::size_t ret = std::fread(ptr, size, 1, fp); + if (ferror(fp)) { + throw std::runtime_error(format("read error: %s", strerror(errno))); + } + if (ret != 1) { + throw std::runtime_error(std::string("unexpectedly reached end of file")); + } + } + + std::uint32_t read_u32() { + std::uint32_t ret; + read_raw(&ret, sizeof(ret)); + return ret; + } + std::float_t read_f32() { + std::float_t ret; + read_raw(&ret, sizeof(ret)); + return ret; + } + + std::string read_string(std::uint32_t len) { + std::vector chars(len); + read_raw(chars.data(), len); + return std::string(chars.data(), len); + } + + void write_raw(const void * ptr, size_t size) { + if (size == 0) { + return; + } + errno = 0; + size_t ret = std::fwrite(ptr, size, 1, fp); + if (ret != 1) { + throw std::runtime_error(format("write error: %s", strerror(errno))); + } + } + + void write_u32(std::uint32_t val) { + write_raw(&val, sizeof(val)); + } + + ~llama_file() { + if (fp) { + std::fclose(fp); + } + } +}; + +void write_tensor(struct llama_file * file, struct ggml_tensor * tensor) { + if (tensor == NULL) { + file->write_u32(0); + file->write_u32(0); + file->write_u32(GGML_TYPE_F32); + file->seek((0-file->tell()) & 31, SEEK_CUR); + return; + } + const char * name = ggml_get_name(tensor); + uint32_t name_len = strlen(name); + uint32_t nd = tensor->n_dims; + uint32_t ne[4] = { (uint32_t)tensor->ne[0], + (uint32_t)tensor->ne[1], + (uint32_t)tensor->ne[2], + (uint32_t)tensor->ne[3] }; + file->write_u32(nd); + file->write_u32(name_len); + file->write_u32(tensor->type); + file->write_raw(ne, sizeof(ne[0]) * nd); + file->write_raw(name, name_len); + file->seek((0-file->tell()) & 31, SEEK_CUR); + file->write_raw(tensor->data, ggml_nbytes(tensor)); +} + +bool is_ggml_file(const char *filename) { + llama_file file(filename, "rb"); + if (file.size < 4) { + return false; + } + uint32_t magic = file.read_u32(); + return magic == LLAMA_FILE_MAGIC; +} + +void load_vocab(const char *filename, Config *config, struct llama_vocab *vocab) { + // heuristic to infer whether vocab is from ggml or from llama2.c vocabulary + if (is_ggml_file(filename)) { + + struct llama_context_params llama_params = llama_context_default_params(); + llama_params.vocab_only = true; + + struct llama_model * lmodel = llama_load_model_from_file(filename, llama_params); + struct llama_context * lctx = llama_new_context_with_model(lmodel, llama_params); + + std::vector strings; + std::vector scores; + int n_vocab = llama_n_vocab(lctx); + strings.resize(n_vocab, NULL); + scores.resize(n_vocab, 0); + n_vocab = llama_get_vocab(lctx, strings.data(), scores.data(), n_vocab); + GGML_ASSERT(n_vocab == llama_n_vocab(lctx)); + vocab->id_to_token.resize(n_vocab); + for (int i=0; iid_to_token[i].tok = tok; + vocab->id_to_token[i].score = score; + vocab->token_to_id.emplace(tok, i); + } + llama_free(lctx); + llama_free_model(lmodel); + } else { // assume llama2.c vocabulary + printf("Assuming llama2.c vocabulary since %s is not a ggml file\n", filename); + llama_file file(filename, "rb"); + uint32_t n_vocab = config->vocab_size; + /* uint32_t max_token_length = */ file.read_u32(); // unused + vocab->id_to_token.resize(n_vocab); + for (uint32_t i=0; iid_to_token[i].tok = tok; + vocab->id_to_token[i].score = score; + vocab->token_to_id.emplace(tok, i); + } + } +} + +void stuff_karpathy_weights_into_gg(struct ggml_tensor * gg_weights, float * karpathy_weights){ + int ct; + switch (gg_weights->n_dims){ + case 1: + ct = 0; + for (int i0 = 0; i0 < gg_weights->ne[0]; i0++){ + float * ptr = (float *) ((char *) gg_weights->data + i0*gg_weights->nb[0]); + *ptr = karpathy_weights[ct]; + ct++; + } + break; + case 2: + ct = 0; + for (int i1 = 0; i1 < gg_weights->ne[1]; i1++) { + for (int i0 = 0; i0 < gg_weights->ne[0]; i0++) { + float * ptr = (float *) ((char *) gg_weights->data + i0*gg_weights->nb[0] + i1*gg_weights->nb[1]); + *ptr = karpathy_weights[ct]; + ct++; + } + } + break; + case 3: + ct = 0; + for (int i2 = 0; i2 < gg_weights->ne[2]; i2++) { + for (int i1 = 0; i1 < gg_weights->ne[1]; i1++) { + for (int i0 = 0; i0 < gg_weights->ne[0]; i0++) { + float * ptr = (float *) ((char *) gg_weights->data + i0*gg_weights->nb[0] + i1*gg_weights->nb[1] + i2*gg_weights->nb[2]); + *ptr = karpathy_weights[ct]; + ct++; + } + } + } + break; + } +} + +void save_as_llama_model(struct llama_vocab * vocab, struct my_llama_model * model, TransformerWeights* w, const char * filename) { + struct llama_file file(filename, "wb"); + if (file.fp == NULL) { + return; + } + // write_magic + file.write_u32(LLAMA_FILE_MAGIC); // magic + file.write_u32(LLAMA_FILE_VERSION); // version + // write_hparams + file.write_u32(model->hparams.n_vocab); + file.write_u32(model->hparams.n_embd); + file.write_u32(model->hparams.n_mult); + file.write_u32(model->hparams.n_head); + file.write_u32(model->hparams.n_layer); + file.write_u32(model->hparams.n_rot); + file.write_u32(LLAMA_FTYPE_ALL_F32); + + // write_vocab - for now we are just writing the existing BPE voc. assuming karpathy's vocabulary is the same. idk. + uint32_t n_vocab = model->hparams.n_vocab; + for (uint32_t i = 0; i < n_vocab; i++) { + const auto & token_score = vocab->id_to_token.at(i); + file.write_u32((uint32_t) token_score.tok.size()); + file.write_raw(token_score.tok.data(), token_score.tok.size()); + file.write_raw(&token_score.score, sizeof(token_score.score)); + } + + // stuff AK weights into GG weights one by one. + // w->token_embedding_table -> model->tok_embeddings + // float* -> struct ggml_tensor + stuff_karpathy_weights_into_gg(model->tok_embeddings, w->token_embedding_table); + stuff_karpathy_weights_into_gg(model->output, w->token_embedding_table); + + stuff_karpathy_weights_into_gg(model->norm, w->rms_final_weight); + //print_row(model->norm, 0); + + // for rms-att-weight + int row_length = model->hparams.n_embd; + const auto & hparams = model->hparams; + //int n_ff = model->hparams.n_embd; + int n_ff = get_n_ff(&hparams); + + for (uint32_t i = 0; i < model->hparams.n_layer; ++i){ + auto & layer = model->layers[i]; + // 1d + stuff_karpathy_weights_into_gg(layer.attention_norm, &w->rms_att_weight[i*row_length]); + stuff_karpathy_weights_into_gg(layer.ffn_norm , &w->rms_ffn_weight[i*row_length]); + + // from 3d matrix layer x dim x dim to 2d matrix dim x dim + stuff_karpathy_weights_into_gg(layer.wq , &w->wq[i*row_length*row_length]); + stuff_karpathy_weights_into_gg(layer.wk , &w->wk[i*row_length*row_length]); + stuff_karpathy_weights_into_gg(layer.wv , &w->wv[i*row_length*row_length]); + stuff_karpathy_weights_into_gg(layer.wo , &w->wo[i*row_length*row_length]); + + stuff_karpathy_weights_into_gg(layer.w1 , &w->w1[i*row_length*n_ff]); + stuff_karpathy_weights_into_gg(layer.w2 , &w->w2[i*n_ff*row_length]); + stuff_karpathy_weights_into_gg(layer.w3 , &w->w3[i*row_length*n_ff]); + } + // write tensors + write_tensor(&file, model->tok_embeddings); + write_tensor(&file, model->norm); + write_tensor(&file, model->output); // ? + for (uint32_t i = 0; i < model->hparams.n_layer; ++i) { + auto & layer = model->layers[i]; + + write_tensor(&file, layer.attention_norm); + write_tensor(&file, layer.wq); + write_tensor(&file, layer.wk); + write_tensor(&file, layer.wv); + write_tensor(&file, layer.wo); + write_tensor(&file, layer.ffn_norm); + write_tensor(&file, layer.w1); + write_tensor(&file, layer.w2); + write_tensor(&file, layer.w3); + } +} + +struct train_params get_default_train_params() { + struct train_params params; + params.fn_vocab_model = "models/ggml-vocab.bin"; + params.fn_llama2c_output_model = "ak_llama_model.bin"; + params.fn_train_data = "shakespeare.txt"; + params.fn_checkpoint_in = "checkpoint.bin"; + params.fn_checkpoint_out = "checkpoint.bin"; + params.fn_model_out = "ggml-checkpoint-f32.bin"; + + params.seed = -1; + + params.n_ctx = 128; + params.n_embd = 256; + params.n_mult = 256; + params.n_head = 8; + params.n_layer = 16; + params.n_rotmax = 64; + + params.n_threads = 6; + params.n_batch = 8; + params.n_examples = 8; + params.n_predict = 1024; + + params.print_info_interval = 1; + params.print_details_interval = 2; + + params.samples_start_after_nl = false; + params.use_adam = true; + params.use_flash = true; + params.use_scratch = true; + + // only adam + params.warmup = 100; + params.cos_decay_steps = 1000; + params.cos_decay_restart = 1.1f; + params.cos_decay_alpha = 0.0f; + + params.lbfgs_n_iter = 16; + params.adam_n_iter = 16; + params.adam_alpha = 1e-3f; + params.adam_decay = 1e-3f; + + params.mem_model_gb = 2; + params.mem_compute_gb = 24; + params.mem_compute0_gb = 8; + params.mem_compute1_gb = 2; + + return params; +} + +void print_usage(int /*argc*/, char ** argv, const struct train_params * params) { + fprintf(stderr, "usage: %s [options]\n", argv[0]); + fprintf(stderr, "\n"); + fprintf(stderr, "options:\n"); + fprintf(stderr, " -h, --help show this help message and exit\n"); + fprintf(stderr, " --copy-vocab-from-model FNAME llama2.c vocabulary or ggml model path from which to copy vocab (default '%s')\n", params->fn_vocab_model); + fprintf(stderr, " --llama2c-model FNAME [REQUIRED] model path from which to load Karpathy's llama2.c model\n"); + fprintf(stderr, " --llama2c-output-model FNAME model path to save the converted llama2.c model (default %s')\n", params->fn_llama2c_output_model); + fprintf(stderr, "\n"); +} + +bool params_parse(int argc, char ** argv, struct train_params * params) { + bool invalid_param = false; + bool reqd_param_found = false; + std::string arg; + struct train_params default_params = get_default_train_params(); + const std::string arg_prefix = "--"; + + for (int i = 1; i < argc; i++) { + arg = argv[i]; + if (arg.compare(0, arg_prefix.size(), arg_prefix) == 0) { + std::replace(arg.begin(), arg.end(), '_', '-'); + } + + if (arg == "--copy-vocab-from-model") { + if (++i >= argc) { + invalid_param = true; + break; + } + params->fn_vocab_model = argv[i]; + } else if (arg == "--llama2c-model") { + if (++i >= argc) { + invalid_param = true; + break; + } + reqd_param_found = true; + params->fn_llama2c_model = argv[i]; + } else if (arg == "--llama2c-output-model") { + if (++i >= argc) { + invalid_param = true; + break; + } + params->fn_llama2c_output_model = argv[i]; + } else if (arg == "-h" || arg == "--help") { + print_usage(argc, argv, &default_params); + exit(0); + } else { + fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); + print_usage(argc, argv, &default_params); + exit(1); + } + } + if (invalid_param) { + fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); + print_usage(argc, argv, &default_params); + exit(1); + } + if (!reqd_param_found){ + fprintf(stderr, "error: please specify a llama2.c .bin file to be converted with argument --llama2c-model\n"); + print_usage(argc, argv, &default_params); + exit(1); + } + + return true; +} + +int main(int argc, char ** argv) { + struct train_params params = get_default_train_params(); + if (!params_parse(argc, argv, ¶ms)) { + return 1; + } + Config config; + TransformerWeights weights; + { + FILE *file = fopen(params.fn_llama2c_model, "rb"); + if (!file) { printf("Unable to open the checkpoint file %s!\n", params.fn_llama2c_model); return 1; } + // read in the config header + if(fread(&config, sizeof(Config), 1, file) != 1) { return 1; } + // read in the Transformer weights + malloc_weights(&weights, &config); + if(checkpoint_init_weights(&weights, &config, file)) { return 1; } + fclose(file); + } + + struct llama_vocab vocab; + load_vocab(params.fn_vocab_model, &config, &vocab); + + struct my_llama_model model; + model.hparams.n_vocab = config.vocab_size; //llama_n_vocab(lctx); + model.hparams.n_ctx = params.n_ctx; + model.hparams.n_embd = config.dim; //params.n_embd; + model.hparams.n_mult = 32;//params.n_mult; + model.hparams.n_head = config.n_heads; //params.n_head; + model.hparams.n_layer = config.n_layers; //params.n_layer; + model.hparams.n_rot = std::min((uint32_t)params.n_rotmax, model.hparams.n_embd / model.hparams.n_head); + print_params(&model.hparams); + struct ggml_init_params lcparams; + lcparams.mem_size = 1024ll*1024ll*1024ll*((size_t) params.mem_model_gb); + lcparams.mem_buffer = NULL; + lcparams.no_alloc = false; + + model.ctx = ggml_init(lcparams); + + init_model(&model); + save_as_llama_model(&vocab, &model, &weights, params.fn_llama2c_output_model); + + printf("Saving llama.c model file %s in ggml format at %s\n", params.fn_llama2c_model, params.fn_llama2c_output_model); + + ggml_free(model.ctx); + free_weights(&weights); + return 0; +} From 2f52008b203fc97e72d850e02b08c0cd49f56969 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 07:24:46 +0300 Subject: [PATCH 39/87] gguf : rm references to old file magics --- gguf-llama.cpp | 4 ---- gguf-llama.h | 12 ------------ 2 files changed, 16 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 40d5ffd14..f80f823a0 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -3367,10 +3367,6 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const { uint32_t magic; fin.read((char *) &magic, sizeof(magic)); - if (magic != LLAMA_FILE_MAGIC_GGLA) { - fprintf(stderr, "%s: bad file magic\n", __func__); - return 1; - } uint32_t format_version; fin.read((char *) &format_version, sizeof(format_version)); diff --git a/gguf-llama.h b/gguf-llama.h index 6062f375e..d3c0d6b87 100644 --- a/gguf-llama.h +++ b/gguf-llama.h @@ -34,18 +34,6 @@ # define DEPRECATED(func, hint) func #endif -#define LLAMA_FILE_MAGIC_GGJT 0x67676a74u // 'ggjt' -#define LLAMA_FILE_MAGIC_GGLA 0x67676c61u // 'ggla' -#define LLAMA_FILE_MAGIC_GGMF 0x67676d66u // 'ggmf' -#define LLAMA_FILE_MAGIC_GGML 0x67676d6cu // 'ggml' -#define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' - -#define LLAMA_FILE_VERSION 3 -#define LLAMA_FILE_MAGIC LLAMA_FILE_MAGIC_GGJT -#define LLAMA_FILE_MAGIC_UNVERSIONED LLAMA_FILE_MAGIC_GGML -#define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN -#define LLAMA_SESSION_VERSION 1 - #define LLAMA_DEFAULT_SEED 0xFFFFFFFF #if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) From 4fa017a1f9ed89c872ca007e0701f5d7ced4f8fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 10:40:56 +0300 Subject: [PATCH 40/87] gguf : start implementing quantization (WIP) --- gguf-llama.cpp | 4 ++++ gguf-util.h | 23 ++++++++++++++++------- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index f80f823a0..0581755b1 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -614,6 +614,7 @@ struct ggml_context * ctx_data = NULL; struct gguf_file_saver { gguf_file file; gguf_file_loader * fl; + size_t info_offset; gguf_file_saver(const char * fname, gguf_file_loader * fl, enum llama_ftype new_ftype) : file(fname, "wb"), fl(fl) { fprintf(stderr, "llama.cpp: saving model to %s\n", fname); @@ -734,6 +735,9 @@ struct gguf_file_saver { } } + info_offset = file.tell(); + size_t count = gguf_get_data_offset(fl->gguf_ctx) - info_offset; + file.write_zeros(count); } diff --git a/gguf-util.h b/gguf-util.h index 9134019a4..0964e6d02 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -106,18 +106,21 @@ struct gguf_file { } - void write_str(const std::string & val) { + size_t write_str(const std::string & val) { + size_t total_written = 0; const int32_t n = val.size(); - fwrite((const char *) &n, sizeof(n), 1, fp); - fwrite(val.c_str(), n, 1, fp); + total_written += fwrite((const char *) &n, sizeof(n), 1, fp); + total_written += fwrite(val.c_str(), n, 1, fp); + + return total_written; } - void write_i32(int32_t val) { - fwrite((const char *) &val, sizeof(val), 1, fp); + size_t write_i32(int32_t val) { + return fwrite((const char *) &val, sizeof(val), 1, fp); } - void write_u64(size_t val) { - fwrite((const char *) &val, sizeof(val), 1, fp); + size_t write_u64(size_t val) { + return fwrite((const char *) &val, sizeof(val), 1, fp); } template @@ -167,6 +170,12 @@ struct gguf_file { fwrite(val[i].c_str(), nstr, 1, fp); } } + + void write_zeros(size_t count) { + for (size_t i = 0; i < count; ++i) { + fputc(0, fp); + } + } }; #if defined(_WIN32) From 0e1a3c7e7dea7877840ed7a203d589bd841cadff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 11:32:34 +0300 Subject: [PATCH 41/87] gguf : start implementing quantization (WIP) --- gguf-llama.cpp | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 0581755b1..cf2c56955 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -615,6 +615,8 @@ struct gguf_file_saver { gguf_file file; gguf_file_loader * fl; size_t info_offset; + size_t tensor_offset = 0; + gguf_file_saver(const char * fname, gguf_file_loader * fl, enum llama_ftype new_ftype) : file(fname, "wb"), fl(fl) { fprintf(stderr, "llama.cpp: saving model to %s\n", fname); @@ -622,8 +624,6 @@ struct gguf_file_saver { write_hparams(new_ftype); } - // TODO: probably it's better to move these to gguf_file - void write_header() { const int32_t magic = GGUF_MAGIC; file.write_i32(magic); @@ -740,12 +740,26 @@ struct gguf_file_saver { file.write_zeros(count); } + size_t write_tensor_info(llama_load_tensor & tensor) { + size_t total_written = 0; + file.seek(0, info_offset); + total_written += file.write_str(tensor.name); + + int32_t n_dims = tensor.ne.size(); + file.write_i32(n_dims); + for (int32_t i = 0; i < n_dims; ++i) { + total_written += file.write_i32(i); + } + + total_written += file.write_u64(tensor_offset); + info_offset += total_written; + + file.seek(0, SEEK_END); + + return total_written; + } void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { - GGML_UNUSED(tensor); - GGML_UNUSED(new_data); - GGML_UNUSED(new_size); - switch (new_type) { case GGML_TYPE_F32: case GGML_TYPE_F16: @@ -763,6 +777,13 @@ struct gguf_file_saver { default: GGML_ASSERT(false); } + write_tensor_info(tensor); + // file.write_raw(new_data); + GGML_UNUSED(new_data); + size_t padded_size = GGML_PAD(new_size, GGUF_DEFAULT_ALIGNMENT); // TODO: handle custom alignment + size_t pad = padded_size - new_size; + file.write_zeros(pad); + tensor_offset += padded_size; } }; From c4f02b4f74c5fdb6493090c238891163709631e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 12:01:17 +0300 Subject: [PATCH 42/87] gguf : start implementing quantization (WIP) --- gguf-llama.cpp | 3 +-- gguf-util.h | 4 ++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index cf2c56955..defe26fe0 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -778,8 +778,7 @@ struct gguf_file_saver { } write_tensor_info(tensor); - // file.write_raw(new_data); - GGML_UNUSED(new_data); + file.write_raw(new_data, new_size); size_t padded_size = GGML_PAD(new_size, GGUF_DEFAULT_ALIGNMENT); // TODO: handle custom alignment size_t pad = padded_size - new_size; file.write_zeros(pad); diff --git a/gguf-util.h b/gguf-util.h index 0964e6d02..17f9dc968 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -123,6 +123,10 @@ struct gguf_file { return fwrite((const char *) &val, sizeof(val), 1, fp); } + void write_raw(const void * data, size_t size) { + fwrite(data, size, 1, fp); + } + template void write_val(const std::string & key, enum gguf_type type, const T & val) { write_str(key); From b2571af255e6a1ed42dffeb74ef51390f2cf5144 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 14:28:17 +0300 Subject: [PATCH 43/87] gguf : start implementing quantization (WIP) --- Makefile | 2 +- examples/gguf/gguf.cpp | 10 +++++++--- gguf-llama.cpp | 8 ++++++-- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index f5922c95d..304b3035d 100644 --- a/Makefile +++ b/Makefile @@ -393,7 +393,7 @@ $(LIB_PRE)embdinput$(DSO_EXT): examples/embd-input/embd-input.h examples/embd-in embd-input-test: $(LIB_PRE)embdinput$(DSO_EXT) examples/embd-input/embd-input-test.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %$(DSO_EXT),$(filter-out %.h,$(filter-out %.hpp,$^))) -o $@ $(LDFLAGS) -L. -lembdinput -gguf: examples/gguf/gguf.cpp build-info.h ggml.o $(OBJS) +gguf: examples/gguf/gguf.cpp build-info.h ggml.o gguf-llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) gguf-llama-simple: examples/gguf/gguf-llama-simple.cpp build-info.h ggml.o gguf-llama.o common.o $(OBJS) diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp index a1b8edc71..6f454a204 100644 --- a/examples/gguf/gguf.cpp +++ b/examples/gguf/gguf.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "gguf-util.h" +#include "gguf-llama.h" #include #include @@ -7,14 +8,14 @@ #include #include #include - +/* template static std::string to_string(const T & val) { std::stringstream ss; ss << val; return ss.str(); } - +*/ void gguf_ex_write_str(std::ofstream & fout, const std::string & val) { const int32_t n = val.size(); fout.write((const char *) &n, sizeof(n)); @@ -414,7 +415,7 @@ int main(int argc, char ** argv) { const std::string fname(argv[1]); const std::string mode (argv[2]); - GGML_ASSERT((mode == "r" || mode == "w") && "mode must be r or w"); + GGML_ASSERT((mode == "r" || mode == "w" || mode == "q") && "mode must be r, w or q"); if (mode == "w") { GGML_ASSERT(gguf_ex_write(fname) && "failed to write gguf file"); @@ -422,6 +423,9 @@ int main(int argc, char ** argv) { GGML_ASSERT(gguf_ex_read_0(fname) && "failed to read gguf file"); GGML_ASSERT(gguf_ex_read_1(fname) && "failed to read gguf file"); GGML_ASSERT(gguf_ex_read_2(fname) && "failed to read gguf file"); + } else if (mode == "q") { + llama_model_quantize_params params = llama_model_quantize_default_params(); + llama_model_quantize(fname.c_str(), "quant.gguf", ¶ms); } return 0; diff --git a/gguf-llama.cpp b/gguf-llama.cpp index defe26fe0..f1755fef5 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -738,15 +738,19 @@ struct gguf_file_saver { info_offset = file.tell(); size_t count = gguf_get_data_offset(fl->gguf_ctx) - info_offset; file.write_zeros(count); + printf("info_offset = %zu\n", info_offset); + file.seek(info_offset, SEEK_SET); + GGML_ASSERT(info_offset == file.tell()); } size_t write_tensor_info(llama_load_tensor & tensor) { size_t total_written = 0; - file.seek(0, info_offset); + file.seek(info_offset, SEEK_SET); + GGML_ASSERT(info_offset == file.tell()); total_written += file.write_str(tensor.name); int32_t n_dims = tensor.ne.size(); - file.write_i32(n_dims); + total_written += file.write_i32(n_dims); for (int32_t i = 0; i < n_dims; ++i) { total_written += file.write_i32(i); } From fa7c39540cff0ce6f6d245baeb15ac1ebe6cdd69 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 15:55:58 +0300 Subject: [PATCH 44/87] gguf : start implementing quantization (WIP) --- gguf-llama.cpp | 15 +++++++++++---- gguf-util.h | 12 ++++++++---- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index f1755fef5..eecefc0f6 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -525,6 +525,11 @@ struct ggml_context * ctx_data = NULL; // TODO make keysconstants in header // TODO: read all hparams from file + int q_ver_idx = gguf_find_key (gguf_ctx, "general.quantization_version"); + if (q_ver_idx != -1) { + hparams.ftype = gguf_get_val_u32(gguf_ctx, q_ver_idx); + } + hparams.n_vocab = read_n_vocab(); hparams.n_ctx = read_u32("llama.context_length"); hparams.n_embd = read_u32("llama.embedding_length"); @@ -738,27 +743,29 @@ struct gguf_file_saver { info_offset = file.tell(); size_t count = gguf_get_data_offset(fl->gguf_ctx) - info_offset; file.write_zeros(count); - printf("info_offset = %zu\n", info_offset); file.seek(info_offset, SEEK_SET); GGML_ASSERT(info_offset == file.tell()); } - size_t write_tensor_info(llama_load_tensor & tensor) { + size_t write_tensor_info(llama_load_tensor & tensor, enum ggml_type type) { size_t total_written = 0; file.seek(info_offset, SEEK_SET); GGML_ASSERT(info_offset == file.tell()); total_written += file.write_str(tensor.name); +printf("total_written = %zu, name = %s\n", total_written, tensor.name.c_str()); int32_t n_dims = tensor.ne.size(); total_written += file.write_i32(n_dims); for (int32_t i = 0; i < n_dims; ++i) { - total_written += file.write_i32(i); + total_written += file.write_i32(tensor.ne[i]); } + total_written += file.write_i32(type); total_written += file.write_u64(tensor_offset); info_offset += total_written; file.seek(0, SEEK_END); + printf("total_written = %zu\n", total_written); return total_written; } @@ -781,7 +788,7 @@ struct gguf_file_saver { default: GGML_ASSERT(false); } - write_tensor_info(tensor); + write_tensor_info(tensor, new_type); file.write_raw(new_data, new_size); size_t padded_size = GGML_PAD(new_size, GGUF_DEFAULT_ALIGNMENT); // TODO: handle custom alignment size_t pad = padded_size - new_size; diff --git a/gguf-util.h b/gguf-util.h index 17f9dc968..ed7d53f69 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -109,18 +109,22 @@ struct gguf_file { size_t write_str(const std::string & val) { size_t total_written = 0; const int32_t n = val.size(); - total_written += fwrite((const char *) &n, sizeof(n), 1, fp); - total_written += fwrite(val.c_str(), n, 1, fp); + fwrite((const char *) &n, sizeof(n), 1, fp); + total_written += sizeof(n); + fwrite(val.c_str(), n, 1, fp); + total_written += n; return total_written; } size_t write_i32(int32_t val) { - return fwrite((const char *) &val, sizeof(val), 1, fp); + fwrite((const char *) &val, sizeof(val), 1, fp); + return sizeof(val); } size_t write_u64(size_t val) { - return fwrite((const char *) &val, sizeof(val), 1, fp); + fwrite((const char *) &val, sizeof(val), 1, fp); + return sizeof(val); } void write_raw(const void * data, size_t size) { From 1fc3d30b71a707187eb1f995c4776db7aaa6265a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 16:09:47 +0300 Subject: [PATCH 45/87] gguf : start implementing quantization (WIP) --- examples/gguf/gguf.cpp | 2 +- gguf-llama.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp index 6f454a204..08f2b6322 100644 --- a/examples/gguf/gguf.cpp +++ b/examples/gguf/gguf.cpp @@ -421,7 +421,7 @@ int main(int argc, char ** argv) { GGML_ASSERT(gguf_ex_write(fname) && "failed to write gguf file"); } else if (mode == "r") { GGML_ASSERT(gguf_ex_read_0(fname) && "failed to read gguf file"); - GGML_ASSERT(gguf_ex_read_1(fname) && "failed to read gguf file"); + //GGML_ASSERT(gguf_ex_read_1(fname) && "failed to read gguf file"); GGML_ASSERT(gguf_ex_read_2(fname) && "failed to read gguf file"); } else if (mode == "q") { llama_model_quantize_params params = llama_model_quantize_default_params(); diff --git a/gguf-llama.cpp b/gguf-llama.cpp index eecefc0f6..ea721a0c7 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -527,7 +527,7 @@ struct ggml_context * ctx_data = NULL; // TODO: read all hparams from file int q_ver_idx = gguf_find_key (gguf_ctx, "general.quantization_version"); if (q_ver_idx != -1) { - hparams.ftype = gguf_get_val_u32(gguf_ctx, q_ver_idx); + hparams.ftype = (enum llama_ftype) gguf_get_val_u32(gguf_ctx, q_ver_idx); } hparams.n_vocab = read_n_vocab(); From 202eab04d3f5a06304b9fd43b4b6b079d3f76dfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 16:39:05 +0300 Subject: [PATCH 46/87] gguf : quantization is working --- examples/gguf/gguf.cpp | 2 +- gguf-llama.cpp | 7 ++----- gguf-util.h | 4 ++++ 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp index 08f2b6322..6f454a204 100644 --- a/examples/gguf/gguf.cpp +++ b/examples/gguf/gguf.cpp @@ -421,7 +421,7 @@ int main(int argc, char ** argv) { GGML_ASSERT(gguf_ex_write(fname) && "failed to write gguf file"); } else if (mode == "r") { GGML_ASSERT(gguf_ex_read_0(fname) && "failed to read gguf file"); - //GGML_ASSERT(gguf_ex_read_1(fname) && "failed to read gguf file"); + GGML_ASSERT(gguf_ex_read_1(fname) && "failed to read gguf file"); GGML_ASSERT(gguf_ex_read_2(fname) && "failed to read gguf file"); } else if (mode == "q") { llama_model_quantize_params params = llama_model_quantize_default_params(); diff --git a/gguf-llama.cpp b/gguf-llama.cpp index ea721a0c7..700d6009b 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -752,7 +752,6 @@ struct gguf_file_saver { file.seek(info_offset, SEEK_SET); GGML_ASSERT(info_offset == file.tell()); total_written += file.write_str(tensor.name); -printf("total_written = %zu, name = %s\n", total_written, tensor.name.c_str()); int32_t n_dims = tensor.ne.size(); total_written += file.write_i32(n_dims); @@ -765,8 +764,7 @@ printf("total_written = %zu, name = %s\n", total_written, tensor.name.c_str()); info_offset += total_written; file.seek(0, SEEK_END); - printf("total_written = %zu\n", total_written); - + return total_written; } @@ -936,8 +934,7 @@ struct llama_model_loader { } else { gguf_file & file = file_loader->file; file.seek(lt.file_off, SEEK_SET); - // TODO - //file.read_raw(lt.data, lt.size); + file.read_raw(lt.data, lt.size); } if (0) { diff --git a/gguf-util.h b/gguf-util.h index ed7d53f69..6395cf304 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -131,6 +131,10 @@ struct gguf_file { fwrite(data, size, 1, fp); } + void read_raw(void * data, size_t size) { + fread(data, size, 1, fp); + } + template void write_val(const std::string & key, enum gguf_type type, const T & val) { write_str(key); From 60d540831b626443caddad12e04792bce91d3b4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 12 Aug 2023 21:42:31 +0300 Subject: [PATCH 47/87] gguf : roper closing of file --- gguf-util.h | 41 ++++++++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/gguf-util.h b/gguf-util.h index 6395cf304..70673404e 100644 --- a/gguf-util.h +++ b/gguf-util.h @@ -105,7 +105,6 @@ struct gguf_file { GGML_ASSERT(ret == 0); // same } - size_t write_str(const std::string & val) { size_t total_written = 0; const int32_t n = val.size(); @@ -127,14 +126,6 @@ struct gguf_file { return sizeof(val); } - void write_raw(const void * data, size_t size) { - fwrite(data, size, 1, fp); - } - - void read_raw(void * data, size_t size) { - fread(data, size, 1, fp); - } - template void write_val(const std::string & key, enum gguf_type type, const T & val) { write_str(key); @@ -155,6 +146,7 @@ struct gguf_file { fwrite((const char *) &n, sizeof(n), 1, fp); fwrite(val.data(), sizeof(T), n, fp); } + template<> void write_val(const std::string & key, enum gguf_type type, const std::string & val) { write_str(key); @@ -188,6 +180,37 @@ struct gguf_file { fputc(0, fp); } } + + void read_raw(void * ptr, size_t len) const { + if (len == 0) { + return; + } + errno = 0; + std::size_t ret = std::fread(ptr, len, 1, fp); + if (ferror(fp)) { + throw std::runtime_error(format("read error: %s", strerror(errno))); + } + if (ret != 1) { + throw std::runtime_error(std::string("unexpectedly reached end of file")); + } + } + + void write_raw(const void * ptr, size_t len) const { + if (len == 0) { + return; + } + errno = 0; + size_t ret = std::fwrite(ptr, len, 1, fp); + if (ret != 1) { + throw std::runtime_error(format("write error: %s", strerror(errno))); + } + } + + ~gguf_file() { + if (fp) { + std::fclose(fp); + } + } }; #if defined(_WIN32) From 5d81a715d4a8e1e6d9dad2d978510d997d20595c Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sat, 12 Aug 2023 21:45:45 +0200 Subject: [PATCH 48/87] gguf.py : no need to convert tensors twice --- gguf.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gguf.py b/gguf.py index 5eb21ee05..0854418a6 100644 --- a/gguf.py +++ b/gguf.py @@ -179,20 +179,20 @@ class GGUFWriter: def ggml_pad(x: int, n: int) -> int: return ((x + n - 1) // n) * n - def add_tensor_info(self, name: str, tensor: np.ndarray): + def add_tensor_info(self, name: str, tensor_shape: np.ndarray, tensor_dtype: np.dtype, tensor_nbytes: int): encoded_name = name.encode("utf8") self.ti_data += struct.pack(" Date: Sat, 12 Aug 2023 21:48:58 +0200 Subject: [PATCH 49/87] convert-gptneox-h5-to-gguf.py : no need to convert tensors twice --- convert-gptneox-h5-to-gguf.py | 63 ++++++++++++++++++----------------- 1 file changed, 32 insertions(+), 31 deletions(-) diff --git a/convert-gptneox-h5-to-gguf.py b/convert-gptneox-h5-to-gguf.py index 22508bd3d..0c0cd6bb1 100644 --- a/convert-gptneox-h5-to-gguf.py +++ b/convert-gptneox-h5-to-gguf.py @@ -43,7 +43,6 @@ if len(sys.argv) < 3: # output in the same directory as the model dir_model = sys.argv[1] -fname_out = sys.argv[1] + "/ggml-model.bin" last_dir = os.path.basename(os.path.normpath(dir_model)) # possible tensor data types @@ -59,7 +58,8 @@ if len(sys.argv) > 2: if ftype < 0 or ftype > 1: print("Invalid ftype: " + str(ftype)) sys.exit(1) - fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" print("gguf: loading model "+last_dir) @@ -82,7 +82,6 @@ llm_arch = "gptneox" block_count = hparams["num_hidden_layers"] gguf_writer.add_name(last_dir) -gguf_writer.add_description("gguf test model") gguf_writer.add_architecture(llm_arch) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) @@ -201,22 +200,30 @@ for name in list_vars.keys(): sys.exit() n_dims = len(data.shape) + data_dtype = data.dtype - # ftype == 0 -> float32, ftype == 1 -> float16 - ftype_cur = 0 - if ftype != 0: - if name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - ftype_cur = 1 - else: - data = data.astype(np.float32) - ftype_cur = 0 - else: - if data.dtype != np.float32: - data = data.astype(np.float32) - ftype_cur = 0 +# print( name + " dims " + str(n_dims) + " dtype " + str(data.dtype) ) - gguf_writer.add_tensor_info(name, data) + if data.dtype != np.float16 and data.dtype != np.float32: + # convert any unsupported data types to float32 + data_dtype = np.float32 + elif ftype == 1 and data.dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + # if f16 desired, convert any float32 2-dim weight tensors to float16 + data_dtype = np.float16 + + nelements = 1 + + for i in range(n_dims): + nelements *= data.shape[n_dims - 1 - i] + + data_nbytes = 0 + if data_dtype == np.float16: + data_nbytes = nelements * 2 + elif data_dtype == np.float32: + data_nbytes = nelements * 4 + + + gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) print("gguf: write header") gguf_writer.write_header_to_file() @@ -226,7 +233,7 @@ print("gguf: write tensor metadata") gguf_writer.write_ti_data_to_file() # tensor data -print("gguf: write tensor data") +print("gguf: convert and write tensor data") for name in list_vars.keys(): data = list_vars[name].squeeze().numpy() @@ -236,20 +243,14 @@ for name in list_vars.keys(): continue n_dims = len(data.shape) + data_dtype = data.dtype - # ftype == 0 -> float32, ftype == 1 -> float16 - ftype_cur = 0 - if ftype != 0: - if name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - ftype_cur = 1 - else: - data = data.astype(np.float32) - ftype_cur = 0 - else: - if data.dtype != np.float32: - data = data.astype(np.float32) - ftype_cur = 0 + if data_dtype != np.float16 and data_dtype != np.float32: + # convert any unsupported data types to float32 + data = data.astype(np.float32) + elif ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + # if f16 desired, convert any float32 2-dim weight tensors to float16 + data = data.astype(np.float16) gguf_writer.write_tensor_to_file(data) From 4cef57c81a2bc7afff72869e6b9659177a11d334 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sat, 12 Aug 2023 21:50:24 +0200 Subject: [PATCH 50/87] convert-llama-h5-to-gguf.py : no need to convert tensors twice --- convert-llama-h5-to-gguf.py | 65 +++++++++++++++++++------------------ 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 0b477a133..519b73966 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -32,7 +32,6 @@ if len(sys.argv) < 3: # output in the same directory as the model dir_model = sys.argv[1] -fname_out = sys.argv[1] + "/ggml-model.bin" last_dir = os.path.basename(os.path.normpath(dir_model)) @@ -49,7 +48,8 @@ if len(sys.argv) > 2: if ftype < 0 or ftype > 1: print("Invalid ftype: " + str(ftype)) sys.exit(1) - fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" print("gguf: loading model "+last_dir) @@ -72,8 +72,7 @@ llm_arch = "llama" head_count = hparams["num_attention_heads"] block_count = hparams["num_hidden_layers"] -gguf_writer.add_name("llama2-7b") -gguf_writer.add_description("gguf test model") +gguf_writer.add_name(last_dir) gguf_writer.add_architecture(llm_arch) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) @@ -186,22 +185,30 @@ for name in list_vars.keys(): sys.exit() n_dims = len(data.shape) + data_dtype = data.dtype - # ftype == 0 -> float32, ftype == 1 -> float16 - ftype_cur = 0 - if ftype != 0: - if name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - ftype_cur = 1 - else: - data = data.astype(np.float32) - ftype_cur = 0 - else: - if data.dtype != np.float32: - data = data.astype(np.float32) - ftype_cur = 0 +# print( name + " dims " + str(n_dims) + " dtype " + str(data.dtype) ) - gguf_writer.add_tensor_info(name, data) + if data.dtype != np.float16 and data.dtype != np.float32: + # convert any unsupported data types to float32 + data_dtype = np.float32 + elif ftype == 1 and data.dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + # if f16 desired, convert any float32 2-dim weight tensors to float16 + data_dtype = np.float16 + + nelements = 1 + + for i in range(n_dims): + nelements *= data.shape[n_dims - 1 - i] + + data_nbytes = 0 + if data_dtype == np.float16: + data_nbytes = nelements * 2 + elif data_dtype == np.float32: + data_nbytes = nelements * 4 + + + gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) print("gguf: write header") @@ -212,7 +219,7 @@ print("gguf: write tensor metadata") gguf_writer.write_ti_data_to_file() # tensor data -print("gguf: write tensor data") +print("gguf: convert and write tensor data") for name in list_vars.keys(): data = list_vars[name].squeeze().numpy() @@ -226,20 +233,14 @@ for name in list_vars.keys(): data = permute(data, head_count) n_dims = len(data.shape) + data_dtype = data.dtype - # ftype == 0 -> float32, ftype == 1 -> float16 - ftype_cur = 0 - if ftype != 0: - if name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - ftype_cur = 1 - else: - data = data.astype(np.float32) - ftype_cur = 0 - else: - if data.dtype != np.float32: - data = data.astype(np.float32) - ftype_cur = 0 + if data_dtype != np.float16 and data_dtype != np.float32: + # convert any unsupported data types to float32 + data = data.astype(np.float32) + elif ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + # if f16 desired, convert any float32 2-dim weight tensors to float16 + data = data.astype(np.float16) gguf_writer.write_tensor_to_file(data) From f8218477b37d14b522e6422bdbb93302d8b8f421 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sat, 12 Aug 2023 22:29:35 +0200 Subject: [PATCH 51/87] convert-gptneox-h5-to-gguf.py : simplify nbytes --- convert-gptneox-h5-to-gguf.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/convert-gptneox-h5-to-gguf.py b/convert-gptneox-h5-to-gguf.py index 0c0cd6bb1..770a5d9ca 100644 --- a/convert-gptneox-h5-to-gguf.py +++ b/convert-gptneox-h5-to-gguf.py @@ -211,17 +211,7 @@ for name in list_vars.keys(): # if f16 desired, convert any float32 2-dim weight tensors to float16 data_dtype = np.float16 - nelements = 1 - - for i in range(n_dims): - nelements *= data.shape[n_dims - 1 - i] - - data_nbytes = 0 - if data_dtype == np.float16: - data_nbytes = nelements * 2 - elif data_dtype == np.float32: - data_nbytes = nelements * 4 - + data_nbytes = data.size * 2 if data_dtype == np.float16 else data.size * 4 gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) From e606ffeaeed3bca9d5a1d97774eaf1a3d602088f Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sat, 12 Aug 2023 22:30:35 +0200 Subject: [PATCH 52/87] convert-llama-h5-to-gguf.py : simplify nbytes --- convert-llama-h5-to-gguf.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 519b73966..bf6ff6aa7 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -196,17 +196,7 @@ for name in list_vars.keys(): # if f16 desired, convert any float32 2-dim weight tensors to float16 data_dtype = np.float16 - nelements = 1 - - for i in range(n_dims): - nelements *= data.shape[n_dims - 1 - i] - - data_nbytes = 0 - if data_dtype == np.float16: - data_nbytes = nelements * 2 - elif data_dtype == np.float32: - data_nbytes = nelements * 4 - + data_nbytes = data.size * 2 if data_dtype == np.float16 else data.size * 4 gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) From 5e58ffa1ed8a97297429367947206dbece94e30d Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sat, 12 Aug 2023 23:50:58 +0200 Subject: [PATCH 53/87] gptneox-main.cpp : n_layer --> n_block --- gptneox-main.cpp | 135 +++++++++++++++++++---------------------------- 1 file changed, 53 insertions(+), 82 deletions(-) diff --git a/gptneox-main.cpp b/gptneox-main.cpp index f2be93e4b..7420daf96 100644 --- a/gptneox-main.cpp +++ b/gptneox-main.cpp @@ -24,13 +24,13 @@ struct gpt_neox_hparams { uint32_t n_ctx = 0; uint32_t n_embd = 0; uint32_t n_head = 0; - uint32_t n_layer = 0; + uint32_t n_block = 0; uint32_t n_rot = 0; // rotary_pct * (n_embd / n_head) bool par_res = true; float norm_eps = 1e-5; }; -struct gpt_neox_layer { +struct gpt_neox_block { // pre normalization struct ggml_tensor * ln_1_g; struct ggml_tensor * ln_1_b; @@ -65,7 +65,7 @@ struct gpt_neox_model { struct ggml_tensor * lmh_g; // language model head - std::vector layers; + std::vector blocks; // key + value memory struct ggml_tensor * memory_k; @@ -415,7 +415,7 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 if (keyidx != -1) { hparams.n_head = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.layer_count"); - if (keyidx != -1) { hparams.n_layer = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + if (keyidx != -1) { hparams.n_block = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.rope.dimension_count"); if (keyidx != -1) { hparams.n_rot = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } @@ -434,7 +434,7 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 printf("%s: n_ctx = %d\n", __func__, hparams.n_ctx); printf("%s: n_embd = %d\n", __func__, hparams.n_embd); printf("%s: n_head = %d\n", __func__, hparams.n_head); - printf("%s: n_layer = %d\n", __func__, hparams.n_layer); + printf("%s: n_block = %d\n", __func__, hparams.n_block); printf("%s: n_rot = %d\n", __func__, hparams.n_rot); printf("%s: par_res = %d\n", __func__, hparams.par_res); printf("%s: norm_eps = %g\n", __func__, hparams.norm_eps); @@ -545,9 +545,9 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 // prepare memory for the weights { - const int n_layer = model.hparams.n_layer; + const int n_block = model.hparams.n_block; - model.layers.resize(n_layer); + model.blocks.resize(n_block); model.wte = ggml_get_tensor(ctx, "transformer.token_embd.weight"); model.ln_f_g = ggml_get_tensor(ctx, "transformer.output_norm.weight"); @@ -560,47 +560,47 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 model.tensors["transformer.output_norm.bias"] = model.ln_f_b; model.tensors["transformer.output.weight"] = model.lmh_g; - for (int i = 0; i < n_layer; ++i) { - auto & layer = model.layers[i]; + for (int i = 0; i < n_block; ++i) { + auto & block = model.blocks[i]; std::string blocknamestart = "transformer.blocks." + std::to_string(i) + "."; - layer.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); - layer.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); + block.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); + block.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); - layer.c_attn_attn_w = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); - layer.c_attn_attn_b = get_tensor_ex(ctx ,blocknamestart + "attn_qkv.bias" ); + block.c_attn_attn_w = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); + block.c_attn_attn_b = get_tensor_ex(ctx ,blocknamestart + "attn_qkv.bias" ); - layer.c_attn_proj_w = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); - layer.c_attn_proj_b = get_tensor_ex(ctx, blocknamestart + "attn_output.bias" ); + block.c_attn_proj_w = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); + block.c_attn_proj_b = get_tensor_ex(ctx, blocknamestart + "attn_output.bias" ); - layer.ln_2_g = get_tensor_ex(ctx, blocknamestart + "ffn_norm.weight" ); - layer.ln_2_b = get_tensor_ex(ctx, blocknamestart + "ffn_norm.bias"); + block.ln_2_g = get_tensor_ex(ctx, blocknamestart + "ffn_norm.weight" ); + block.ln_2_b = get_tensor_ex(ctx, blocknamestart + "ffn_norm.bias"); - layer.c_mlp_fc_w = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); - layer.c_mlp_fc_b = get_tensor_ex(ctx, blocknamestart + "ffn_up.bias" ); + block.c_mlp_fc_w = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); + block.c_mlp_fc_b = get_tensor_ex(ctx, blocknamestart + "ffn_up.bias" ); - layer.c_mlp_proj_w = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); - layer.c_mlp_proj_b = get_tensor_ex(ctx, blocknamestart + "ffn_down.bias" ); + block.c_mlp_proj_w = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); + block.c_mlp_proj_b = get_tensor_ex(ctx, blocknamestart + "ffn_down.bias" ); // map by name - model.tensors[blocknamestart + "attn_norm.weight"] = layer.ln_1_g; - model.tensors[blocknamestart + "attn_norm.bias"] = layer.ln_1_b; + model.tensors[blocknamestart + "attn_norm.weight"] = block.ln_1_g; + model.tensors[blocknamestart + "attn_norm.bias"] = block.ln_1_b; - model.tensors[blocknamestart + "attn_qkv.weight"] = layer.c_attn_attn_w; - model.tensors[blocknamestart + "attn_qkv.bias"] = layer.c_attn_attn_b; + model.tensors[blocknamestart + "attn_qkv.weight"] = block.c_attn_attn_w; + model.tensors[blocknamestart + "attn_qkv.bias"] = block.c_attn_attn_b; - model.tensors[blocknamestart + "attn_output.weight"] = layer.c_attn_proj_w; - model.tensors[blocknamestart + "attn_output.bias"] = layer.c_attn_proj_b; + model.tensors[blocknamestart + "attn_output.weight"] = block.c_attn_proj_w; + model.tensors[blocknamestart + "attn_output.bias"] = block.c_attn_proj_b; - model.tensors[blocknamestart + "ffn_norm.weight"] = layer.ln_2_g; - model.tensors[blocknamestart + "ffn_norm.bias"] = layer.ln_2_b; + model.tensors[blocknamestart + "ffn_norm.weight"] = block.ln_2_g; + model.tensors[blocknamestart + "ffn_norm.bias"] = block.ln_2_b; - model.tensors[blocknamestart + "ffn_up.weight"] = layer.c_mlp_fc_w; - model.tensors[blocknamestart + "ffn_up.bias"] = layer.c_mlp_fc_b; + model.tensors[blocknamestart + "ffn_up.weight"] = block.c_mlp_fc_w; + model.tensors[blocknamestart + "ffn_up.bias"] = block.c_mlp_fc_b; - model.tensors[blocknamestart + "ffn_down.weight"] = layer.c_mlp_proj_w; - model.tensors[blocknamestart + "ffn_down.bias"] = layer.c_mlp_proj_b; + model.tensors[blocknamestart + "ffn_down.weight"] = block.c_mlp_proj_w; + model.tensors[blocknamestart + "ffn_down.bias"] = block.c_mlp_proj_b; } } @@ -610,10 +610,10 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 const auto & hparams = model.hparams; const int n_embd = hparams.n_embd; - const int n_layer = hparams.n_layer; + const int n_block = hparams.n_block; const int n_ctx = hparams.n_ctx; - const int64_t n_mem = n_layer*n_ctx; + const int64_t n_mem = n_block*n_ctx; const int64_t n_elements = n_embd*n_mem; // create the ggml context @@ -647,37 +647,23 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 // feed-forward network ggml_tensor * gpt_neox_ff( - const gpt_neox_layer &layer, + const gpt_neox_block &block, ggml_context * ctx0, ggml_tensor * inp) { ggml_tensor * cur = ggml_norm(ctx0, inp); - cur = ggml_add(ctx0, - ggml_mul(ctx0, - ggml_repeat(ctx0, layer.ln_2_g, cur), - cur), - ggml_repeat(ctx0, layer.ln_2_b, cur)); - - cur = ggml_mul_mat(ctx0, - layer.c_mlp_fc_w, - cur); - - cur = ggml_add(ctx0, - ggml_repeat(ctx0, layer.c_mlp_fc_b, cur), - cur); + cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, block.ln_2_g, cur), cur), ggml_repeat(ctx0, block.ln_2_b, cur)); + cur = ggml_mul_mat(ctx0, block.c_mlp_fc_w, cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, block.c_mlp_fc_b, cur), cur); // GELU activation cur = ggml_gelu(ctx0, cur); // projection // cur = proj_w*cur + proj_b - cur = ggml_mul_mat(ctx0, - layer.c_mlp_proj_w, - cur); + cur = ggml_mul_mat(ctx0, block.c_mlp_proj_w, cur); - cur = ggml_add(ctx0, - ggml_repeat(ctx0, layer.c_mlp_proj_b, cur), - cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, block.c_mlp_proj_b, cur), cur); return cur; } @@ -701,7 +687,7 @@ bool gpt_neox_eval( const auto & hparams = model.hparams; const int n_embd = hparams.n_embd; - const int n_layer = hparams.n_layer; + const int n_block = hparams.n_block; const int n_ctx = hparams.n_ctx; const int n_head = hparams.n_head; const int n_vocab = hparams.n_vocab; @@ -747,7 +733,7 @@ bool gpt_neox_eval( // wte struct ggml_tensor * inpL = ggml_get_rows(ctx0, model.wte, embd); - for (int il = 0; il < n_layer; ++il) { + for (int il = 0; il < n_block; ++il) { struct ggml_tensor * cur; ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); @@ -758,22 +744,15 @@ bool gpt_neox_eval( cur = ggml_norm(ctx0, inpL); cur = ggml_add(ctx0, - ggml_mul(ctx0, - ggml_repeat(ctx0, model.layers[il].ln_1_g, cur), - cur), - ggml_repeat(ctx0, model.layers[il].ln_1_b, cur)); + ggml_mul(ctx0, ggml_repeat(ctx0, model.blocks[il].ln_1_g, cur), cur), + ggml_repeat(ctx0, model.blocks[il].ln_1_b, cur)); } // compute QKV { - cur = ggml_mul_mat(ctx0, - model.layers[il].c_attn_attn_w, - cur); - - cur = ggml_add(ctx0, - ggml_repeat(ctx0, model.layers[il].c_attn_attn_b, cur), - cur); + cur = ggml_mul_mat(ctx0, model.blocks[il].c_attn_attn_w, cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, model.blocks[il].c_attn_attn_b, cur), cur); } struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 0*sizeof(float)*n_embd/n_head)); @@ -798,10 +777,7 @@ bool gpt_neox_eval( } // Q = Qcur.contiguous().view(n_embd/n_head, n_head, N).permute(0, 2, 1, 3) - struct ggml_tensor * Q = - ggml_permute(ctx0, - Qcur, - 0, 2, 1, 3); + struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); // K = Kmem.view(n_embd/n_head, n_head, n_past + N).permute(0, 2, 1, 3) struct ggml_tensor * K = @@ -842,17 +818,12 @@ bool gpt_neox_eval( struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); // cur = KQV_merged.contiguous().view(n_embd, N) - cur = ggml_cpy(ctx0, - KQV_merged, - ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); + cur = ggml_cpy(ctx0, KQV_merged, ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); // projection { - cur = ggml_mul_mat(ctx0, - model.layers[il].c_attn_proj_w, - cur); - - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].c_attn_proj_b, cur), cur); + cur = ggml_mul_mat(ctx0, model.blocks[il].c_attn_proj_w, cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, model.blocks[il].c_attn_proj_b, cur), cur); } } @@ -861,7 +832,7 @@ bool gpt_neox_eval( if (hparams.par_res == 0) { struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpL); - cur = gpt_neox_ff(model.layers[il], ctx0, inpFF); + cur = gpt_neox_ff(model.blocks[il], ctx0, inpFF); // input for next layer inpL = ggml_add(ctx0, cur, inpFF); @@ -870,7 +841,7 @@ bool gpt_neox_eval( // this is independent of the self-attention result, so it could be done in parallel to the self-attention // note here we pass inpL instead of cur - cur = gpt_neox_ff(model.layers[il], ctx0, inpL); + cur = gpt_neox_ff(model.blocks[il], ctx0, inpL); // layer input + FF cur = ggml_add(ctx0, cur, inpFF); From 8b5f0c506708b06fde1362fc2cbb464222cf44aa Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 00:00:32 +0200 Subject: [PATCH 54/87] constants.py : n_layer --> n_block --- constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/constants.py b/constants.py index ae6e719fb..7fa238a73 100644 --- a/constants.py +++ b/constants.py @@ -18,7 +18,7 @@ KEY_GENERAL_SOURCE_HF_REPO = "general.source.hugginface.repository" # LLM KEY_LLM_CONTEXT_LENGTH = "{llm}.context_length" KEY_LLM_EMBEDDING_LENGTH = "{llm}.embedding_length" -KEY_LLM_LAYER_COUNT = "{llm}.layer_count" +KEY_LLM_BLOCK_COUNT = "{llm}.block_count" KEY_LLM_FEED_FORWARD_LENGTH = "{llm}.feed_forward_length" KEY_LLM_USE_PARALLEL_RESIDUAL = "{llm}.use_parallel_residual" KEY_LLM_TENSOR_DATA_LAYOUT = "{llm}.tensor_data_layout" From d2ce9cfe8d0c75ef4166bac28e93854319219390 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 00:01:20 +0200 Subject: [PATCH 55/87] gguf.py : n_layer --> n_block --- gguf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gguf.py b/gguf.py index 0854418a6..75ba2870a 100644 --- a/gguf.py +++ b/gguf.py @@ -253,9 +253,9 @@ class GGUFWriter: self.add_uint32( constants.KEY_LLM_EMBEDDING_LENGTH.format(llm=llm), length) - def add_layer_count(self, llm: str, length: int): + def add_block_count(self, llm: str, length: int): self.add_uint32( - constants.KEY_LLM_LAYER_COUNT.format(llm=llm), length) + constants.KEY_LLM_BLOCK_COUNT.format(llm=llm), length) def add_feed_forward_length(self, llm: str, length: int): self.add_uint32( From 489616e12652235ecb5c7f3839a2f5e6b617bb82 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 00:02:04 +0200 Subject: [PATCH 56/87] convert-gptneox-h5-to-gguf.py : n_layer --> n_block --- convert-gptneox-h5-to-gguf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/convert-gptneox-h5-to-gguf.py b/convert-gptneox-h5-to-gguf.py index 770a5d9ca..ac4b2ff88 100644 --- a/convert-gptneox-h5-to-gguf.py +++ b/convert-gptneox-h5-to-gguf.py @@ -85,7 +85,7 @@ gguf_writer.add_name(last_dir) gguf_writer.add_architecture(llm_arch) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) -gguf_writer.add_layer_count(llm_arch, block_count) +gguf_writer.add_block_count(llm_arch, block_count) gguf_writer.add_feed_forward_length(llm_arch, hparams["intermediate_size"]) gguf_writer.add_rope_dimension_count(llm_arch, int( hparams["rotary_pct"]*(hparams["hidden_size"]//hparams["num_attention_heads"])) ) gguf_writer.add_head_count(llm_arch, hparams["num_attention_heads"]) @@ -116,7 +116,7 @@ if Path(dir_model + "/tokenizer.json").is_file(): vocab_size = len( tokenizer_json["model"]["vocab"] ) - # from ggllm.cpp falcon_convert.py + # ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py tokenizer = AutoTokenizer.from_pretrained(dir_model) reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} From e91a2224e49aeed6e1382643dcb053ec6c168440 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 00:02:44 +0200 Subject: [PATCH 57/87] convert-llama-h5-to-gguf.py : n_layer --> n_block --- convert-llama-h5-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index bf6ff6aa7..055b6b78d 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -76,7 +76,7 @@ gguf_writer.add_name(last_dir) gguf_writer.add_architecture(llm_arch) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) -gguf_writer.add_layer_count(llm_arch, block_count) +gguf_writer.add_block_count(llm_arch, block_count) gguf_writer.add_feed_forward_length(llm_arch, hparams["intermediate_size"]) gguf_writer.add_rope_dimension_count(llm_arch, hparams["hidden_size"] // hparams["num_attention_heads"]) gguf_writer.add_head_count(llm_arch, head_count) From c7bd8c147ccd7411d1cd9a2a394d2d9b743d7294 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 00:03:32 +0200 Subject: [PATCH 58/87] gptneox-main.cpp : n_layer --> n_block --- gptneox-main.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gptneox-main.cpp b/gptneox-main.cpp index 7420daf96..4773dfd69 100644 --- a/gptneox-main.cpp +++ b/gptneox-main.cpp @@ -414,7 +414,7 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.attention.head_count"); if (keyidx != -1) { hparams.n_head = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.layer_count"); + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.block_count"); if (keyidx != -1) { hparams.n_block = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.rope.dimension_count"); From f64d44a9b9581cd58f7ec40f4fa1c3ca5ca18e1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 13 Aug 2023 00:24:45 +0200 Subject: [PATCH 59/87] CUDA: Fixed OpenLLaMA 3b mmq, reduced compile time (#2590) --- CMakeLists.txt | 2 - ggml-cuda.cu | 976 +++++++++++++++++++++++++++++-------------------- 2 files changed, 587 insertions(+), 391 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d085bc835..dff4942cd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -69,7 +69,6 @@ option(LLAMA_BLAS "llama: use BLAS" set(LLAMA_BLAS_VENDOR "Generic" CACHE STRING "llama: BLAS library vendor") option(LLAMA_CUBLAS "llama: use CUDA" OFF) #option(LLAMA_CUDA_CUBLAS "llama: use cuBLAS for prompt processing" OFF) -set(LLAMA_CUDA_MMQ_Y "64" CACHE STRING "llama: y tile size for mmq CUDA kernels") option(LLAMA_CUDA_FORCE_DMMV "llama: use dmmv instead of mmvq CUDA kernels" OFF) set(LLAMA_CUDA_DMMV_X "32" CACHE STRING "llama: x stride for dmmv CUDA kernels") set(LLAMA_CUDA_MMV_Y "1" CACHE STRING "llama: y block size for mmv CUDA kernels") @@ -256,7 +255,6 @@ if (LLAMA_CUBLAS) # if (LLAMA_CUDA_CUBLAS) # add_compile_definitions(GGML_CUDA_CUBLAS) # endif() - add_compile_definitions(GGML_CUDA_MMQ_Y=${LLAMA_CUDA_MMQ_Y}) if (LLAMA_CUDA_FORCE_DMMV) add_compile_definitions(GGML_CUDA_FORCE_DMMV) endif() diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 6390b1158..11f67aec8 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1399,6 +1399,7 @@ template static __device__ __forceinline__ float vec_dot_q4_0_q8_1_imp // second part effectively subtracts 8 from each quant value return d4 * (sumi * ds8f.x - (8*vdr/QI4_0) * ds8f.y); #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1436,6 +1437,7 @@ template static __device__ __forceinline__ float vec_dot_q4_1_q8_1_imp // scale second part of sum by QI8_1/(vdr * QR4_1) to compensate for multiple threads adding it return sumi * d4d8 + m4s8 / (QI8_1 / (vdr * QR4_1)); #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1471,6 +1473,7 @@ template static __device__ __forceinline__ float vec_dot_q5_0_q8_1_imp // second part effectively subtracts 16 from each quant value return d5 * (sumi * ds8f.x - (16*vdr/QI5_0) * ds8f.y); #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1516,6 +1519,7 @@ template static __device__ __forceinline__ float vec_dot_q5_1_q8_1_imp return sumi*d5d8 + m5s8 / (QI5_1 / vdr); #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1537,6 +1541,7 @@ template static __device__ __forceinline__ float vec_dot_q8_0_q8_1_imp return d8_0*d8_1 * sumi; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1567,6 +1572,7 @@ template static __device__ __forceinline__ float vec_dot_q8_1_q8_1_imp // scale second part of sum by QI8_1/ vdr to compensate for multiple threads adding it return sumi*d8d8 + m8s8 / (QI8_1 / vdr); #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1602,6 +1608,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_impl_mmvq( return dm2f.x*sumf_d - dm2f.y*sumf_m; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1639,6 +1646,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_impl_mmq( return d8 * (dm2f.x*sumi_d - dm2f.y*sumi_m); #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1679,6 +1687,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_impl_mmvq( return d3 * sumf; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1704,6 +1713,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_impl_mmq( return d3*d8 * sumi; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1737,6 +1747,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_impl_vmmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1772,6 +1783,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_impl_mmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1812,6 +1824,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1_impl( return dm5f.x*sumf_d - dm5f.y*sumf_m; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1842,6 +1855,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_impl_mmvq( return d*sumf; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -1873,6 +1887,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_impl_mmq( return d6 * sumf_d; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2722,6 +2737,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1( return dall * sumf_d - dmin * sumf_m; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A @@ -2905,6 +2921,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1( return d * sumf_d; #else + assert(false); return 0.0f; // only to satisfy the compiler #endif // __CUDA_ARCH__ >= MIN_CC_DP4A @@ -3135,7 +3152,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_mul_mat( template -static __global__ void mul_mat_q( +static __device__ __forceinline__ void mul_mat_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { @@ -3150,7 +3167,6 @@ static __global__ void mul_mat_q( const int row_dst_0 = blockIdx.x*mmq_y; const int & row_x_0 = row_dst_0; - const int row_dst = row_dst_0 + threadIdx.x; const int col_dst_0 = blockIdx.y*mmq_x; const int & col_y_0 = col_dst_0; @@ -3223,11 +3239,7 @@ static __global__ void mul_mat_q( } } - - if (row_dst >= nrows_dst) { - return; - } - +#pragma unroll for (int j = 0; j < mmq_x; j += nwarps) { const int col_dst = col_dst_0 + j + threadIdx.y; @@ -3235,12 +3247,359 @@ static __global__ void mul_mat_q( return; } +#pragma unroll for (int i = 0; i < mmq_y; i += WARP_SIZE) { - dst[col_dst*nrows_dst + row_dst + i] = sum[i/WARP_SIZE][j/nwarps]; + const int row_dst = row_dst_0 + threadIdx.x + i; + + if (row_dst >= nrows_dst) { + continue; + } + + dst[col_dst*nrows_dst + row_dst] = sum[i/WARP_SIZE][j/nwarps]; } } } +#define MMQ_X_Q4_0_AMPERE 64 +#define MMQ_Y_Q4_0_AMPERE 128 +#define NWARPS_Q4_0_AMPERE 4 +#define MMQ_X_Q4_0_PASCAL 64 +#define MMQ_Y_Q4_0_PASCAL 64 +#define NWARPS_Q4_0_PASCAL 8 + +template static __global__ void mul_mat_q4_0( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q4_0_AMPERE; + const int mmq_y = MMQ_Y_Q4_0_AMPERE; + const int nwarps = NWARPS_Q4_0_AMPERE; + + mul_mat_q, + load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q4_0_PASCAL; + const int mmq_y = MMQ_Y_Q4_0_PASCAL; + const int nwarps = NWARPS_Q4_0_PASCAL; + + mul_mat_q, + load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q4_0_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q4_1_AMPERE 64 +#define MMQ_Y_Q4_1_AMPERE 128 +#define NWARPS_Q4_1_AMPERE 4 +#define MMQ_X_Q4_1_PASCAL 64 +#define MMQ_Y_Q4_1_PASCAL 64 +#define NWARPS_Q4_1_PASCAL 8 + +template static __global__ void mul_mat_q4_1( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q4_1_AMPERE; + const int mmq_y = MMQ_Y_Q4_1_AMPERE; + const int nwarps = NWARPS_Q4_1_AMPERE; + + mul_mat_q, + load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q4_1_PASCAL; + const int mmq_y = MMQ_Y_Q4_1_PASCAL; + const int nwarps = NWARPS_Q4_1_PASCAL; + + mul_mat_q, + load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q4_1_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q5_0_AMPERE 128 +#define MMQ_Y_Q5_0_AMPERE 64 +#define NWARPS_Q5_0_AMPERE 4 +#define MMQ_X_Q5_0_PASCAL 64 +#define MMQ_Y_Q5_0_PASCAL 64 +#define NWARPS_Q5_0_PASCAL 8 + +template static __global__ void mul_mat_q5_0( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q5_0_AMPERE; + const int mmq_y = MMQ_Y_Q5_0_AMPERE; + const int nwarps = NWARPS_Q5_0_AMPERE; + + mul_mat_q, + load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q5_0_PASCAL; + const int mmq_y = MMQ_Y_Q5_0_PASCAL; + const int nwarps = NWARPS_Q5_0_PASCAL; + + mul_mat_q, + load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q5_0_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q5_1_AMPERE 128 +#define MMQ_Y_Q5_1_AMPERE 64 +#define NWARPS_Q5_1_AMPERE 4 +#define MMQ_X_Q5_1_PASCAL 64 +#define MMQ_Y_Q5_1_PASCAL 64 +#define NWARPS_Q5_1_PASCAL 8 + +template static __global__ void mul_mat_q5_1( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q5_1_AMPERE; + const int mmq_y = MMQ_Y_Q5_1_AMPERE; + const int nwarps = NWARPS_Q5_1_AMPERE; + + mul_mat_q, + load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q5_1_PASCAL; + const int mmq_y = MMQ_Y_Q5_1_PASCAL; + const int nwarps = NWARPS_Q5_1_PASCAL; + + mul_mat_q, + load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q5_1_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q8_0_AMPERE 128 +#define MMQ_Y_Q8_0_AMPERE 64 +#define NWARPS_Q8_0_AMPERE 4 +#define MMQ_X_Q8_0_PASCAL 64 +#define MMQ_Y_Q8_0_PASCAL 64 +#define NWARPS_Q8_0_PASCAL 8 + +template static __global__ void mul_mat_q8_0( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q8_0_AMPERE; + const int mmq_y = MMQ_Y_Q8_0_AMPERE; + const int nwarps = NWARPS_Q8_0_AMPERE; + + mul_mat_q, + load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q8_0_PASCAL; + const int mmq_y = MMQ_Y_Q8_0_PASCAL; + const int nwarps = NWARPS_Q8_0_PASCAL; + + mul_mat_q, + load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q8_0_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q2_K_AMPERE 64 +#define MMQ_Y_Q2_K_AMPERE 128 +#define NWARPS_Q2_K_AMPERE 4 +#define MMQ_X_Q2_K_PASCAL 64 +#define MMQ_Y_Q2_K_PASCAL 64 +#define NWARPS_Q2_K_PASCAL 8 + +template static __global__ void mul_mat_q2_K( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q2_K_AMPERE; + const int mmq_y = MMQ_Y_Q2_K_AMPERE; + const int nwarps = NWARPS_Q2_K_AMPERE; + + mul_mat_q, + load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q2_K_PASCAL; + const int mmq_y = MMQ_Y_Q2_K_PASCAL; + const int nwarps = NWARPS_Q2_K_PASCAL; + + mul_mat_q, + load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q2_K_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q3_K_AMPERE 128 +#define MMQ_Y_Q3_K_AMPERE 128 +#define NWARPS_Q3_K_AMPERE 4 +#define MMQ_X_Q3_K_PASCAL 64 +#define MMQ_Y_Q3_K_PASCAL 64 +#define NWARPS_Q3_K_PASCAL 8 + +template static __global__ void mul_mat_q3_K( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q3_K_AMPERE; + const int mmq_y = MMQ_Y_Q3_K_AMPERE; + const int nwarps = NWARPS_Q3_K_AMPERE; + + mul_mat_q, + load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q3_K_PASCAL; + const int mmq_y = MMQ_Y_Q3_K_PASCAL; + const int nwarps = NWARPS_Q3_K_PASCAL; + + mul_mat_q, + load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q3_K_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q4_K_AMPERE 64 +#define MMQ_Y_Q4_K_AMPERE 128 +#define NWARPS_Q4_K_AMPERE 4 +#define MMQ_X_Q4_K_PASCAL 32 +#define MMQ_Y_Q4_K_PASCAL 64 +#define NWARPS_Q4_K_PASCAL 8 + +template static __global__ void mul_mat_q4_K( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q4_K_AMPERE; + const int mmq_y = MMQ_Y_Q4_K_AMPERE; + const int nwarps = NWARPS_Q4_K_AMPERE; + + mul_mat_q, + load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q4_K_PASCAL; + const int mmq_y = MMQ_Y_Q4_K_PASCAL; + const int nwarps = NWARPS_Q4_K_PASCAL; + + mul_mat_q, + load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q4_K_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q5_K_AMPERE 64 +#define MMQ_Y_Q5_K_AMPERE 128 +#define NWARPS_Q5_K_AMPERE 4 +#define MMQ_X_Q5_K_PASCAL 64 +#define MMQ_Y_Q5_K_PASCAL 64 +#define NWARPS_Q5_K_PASCAL 8 + +template static __global__ void mul_mat_q5_K( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q5_K_AMPERE; + const int mmq_y = MMQ_Y_Q5_K_AMPERE; + const int nwarps = NWARPS_Q5_K_AMPERE; + + mul_mat_q, + load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q5_K_PASCAL; + const int mmq_y = MMQ_Y_Q5_K_PASCAL; + const int nwarps = NWARPS_Q5_K_PASCAL; + + mul_mat_q, + load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q5_K_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + +#define MMQ_X_Q6_K_AMPERE 64 +#define MMQ_Y_Q6_K_AMPERE 64 +#define NWARPS_Q6_K_AMPERE 4 +#define MMQ_X_Q6_K_PASCAL 32 +#define MMQ_Y_Q6_K_PASCAL 64 +#define NWARPS_Q6_K_PASCAL 8 + +template static __global__ void mul_mat_q6_K( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst) { + +#if __CUDA_ARCH__ >= CC_TURING + const int mmq_x = MMQ_X_Q6_K_AMPERE; + const int mmq_y = MMQ_Y_Q6_K_AMPERE; + const int nwarps = NWARPS_Q6_K_AMPERE; + + mul_mat_q, + load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + +#elif __CUDA_ARCH__ >= MIN_CC_DP4A + const int mmq_x = MMQ_X_Q6_K_PASCAL; + const int mmq_y = MMQ_Y_Q6_K_PASCAL; + const int nwarps = NWARPS_Q6_K_PASCAL; + + mul_mat_q, + load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); +#else + (void) vec_dot_q6_K_q8_1_mul_mat; + assert(false); +#endif // __CUDA_ARCH__ >= CC_TURING +} + template static __global__ void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows) { const int row = blockIdx.y*blockDim.y + threadIdx.y; @@ -3942,48 +4301,32 @@ static void ggml_mul_mat_q4_0_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 64; - const int mmq_y = 128; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q4_0_AMPERE; + mmq_y = MMQ_Y_Q4_0_AMPERE; + nwarps = NWARPS_Q4_0_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q4_0_PASCAL; + mmq_y = MMQ_Y_Q4_0_PASCAL; + nwarps = NWARPS_Q4_0_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 4; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q4_0, VDR_Q4_0_Q8_1_MMQ, vec_dot_q4_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q4_0<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q4_0<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -3995,49 +4338,32 @@ static void ggml_mul_mat_q4_1_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 64; - const int mmq_y = 128; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q4_1_AMPERE; + mmq_y = MMQ_Y_Q4_1_AMPERE; + nwarps = NWARPS_Q4_1_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q4_1_PASCAL; + mmq_y = MMQ_Y_Q4_1_PASCAL; + nwarps = NWARPS_Q4_1_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q4_1, VDR_Q4_1_Q8_1_MMQ, vec_dot_q4_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q4_1<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q4_1<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4049,48 +4375,32 @@ static void ggml_mul_mat_q5_0_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 128; - const int mmq_y = 64; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q5_0_AMPERE; + mmq_y = MMQ_Y_Q5_0_AMPERE; + nwarps = NWARPS_Q5_0_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q5_0_PASCAL; + mmq_y = MMQ_Y_Q5_0_PASCAL; + nwarps = NWARPS_Q5_0_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q5_0, VDR_Q5_0_Q8_1_MMQ, vec_dot_q5_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q5_0<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q5_0<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4102,48 +4412,32 @@ static void ggml_mul_mat_q5_1_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 128; - const int mmq_y = 64; - const int nwarps = 8; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q5_1_AMPERE; + mmq_y = MMQ_Y_Q5_1_AMPERE; + nwarps = NWARPS_Q5_1_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q5_1_PASCAL; + mmq_y = MMQ_Y_Q5_1_PASCAL; + nwarps = NWARPS_Q5_1_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q5_1, VDR_Q5_1_Q8_1_MMQ, vec_dot_q5_1_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q5_1<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q5_1<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4155,48 +4449,32 @@ static void ggml_mul_mat_q8_0_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 128; - const int mmq_y = 64; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q8_0_AMPERE; + mmq_y = MMQ_Y_Q8_0_AMPERE; + nwarps = NWARPS_Q8_0_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q8_0_PASCAL; + mmq_y = MMQ_Y_Q8_0_PASCAL; + nwarps = NWARPS_Q8_0_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q8_0, VDR_Q8_0_Q8_1_MMQ, vec_dot_q8_0_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q8_0<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q8_0<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4208,48 +4486,32 @@ static void ggml_mul_mat_q2_K_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 64; - const int mmq_y = 128; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q2_K_AMPERE; + mmq_y = MMQ_Y_Q2_K_AMPERE; + nwarps = NWARPS_Q2_K_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q2_K_PASCAL; + mmq_y = MMQ_Y_Q2_K_PASCAL; + nwarps = NWARPS_Q2_K_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q2_K, VDR_Q2_K_Q8_1_MMQ, vec_dot_q2_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q2_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q2_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4261,48 +4523,32 @@ static void ggml_mul_mat_q3_K_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 128; - const int mmq_y = 128; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q3_K_AMPERE; + mmq_y = MMQ_Y_Q3_K_AMPERE; + nwarps = NWARPS_Q3_K_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q3_K_PASCAL; + mmq_y = MMQ_Y_Q3_K_PASCAL; + nwarps = NWARPS_Q3_K_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q3_K, VDR_Q3_K_Q8_1_MMQ, vec_dot_q3_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q3_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q3_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4314,48 +4560,32 @@ static void ggml_mul_mat_q4_K_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 64; - const int mmq_y = 128; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q4_K_AMPERE; + mmq_y = MMQ_Y_Q4_K_AMPERE; + nwarps = NWARPS_Q4_K_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q4_K_PASCAL; + mmq_y = MMQ_Y_Q4_K_PASCAL; + nwarps = NWARPS_Q4_K_PASCAL; } else { - const int mmq_x = 32; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q4_K, VDR_Q4_K_Q8_1_MMQ, vec_dot_q4_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q4_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q4_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4367,48 +4597,32 @@ static void ggml_mul_mat_q5_K_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 64; - const int mmq_y = 128; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q5_K_AMPERE; + mmq_y = MMQ_Y_Q5_K_AMPERE; + nwarps = NWARPS_Q5_K_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q5_K_PASCAL; + mmq_y = MMQ_Y_Q5_K_PASCAL; + nwarps = NWARPS_Q5_K_PASCAL; } else { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q5_K, VDR_Q5_K_Q8_1_MMQ, vec_dot_q5_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q5_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q5_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } @@ -4420,48 +4634,32 @@ static void ggml_mul_mat_q6_K_q8_1_cuda( CUDA_CHECK(cudaGetDevice(&id)); const int compute_capability = g_compute_capabilities[id]; + int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_TURING) { - const int mmq_x = 64; - const int mmq_y = 64; - const int nwarps = 4; - - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); - - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + mmq_x = MMQ_X_Q6_K_AMPERE; + mmq_y = MMQ_Y_Q6_K_AMPERE; + nwarps = NWARPS_Q6_K_AMPERE; + } else if (compute_capability >= MIN_CC_DP4A) { + mmq_x = MMQ_X_Q6_K_PASCAL; + mmq_y = MMQ_Y_Q6_K_PASCAL; + nwarps = NWARPS_Q6_K_PASCAL; } else { - const int mmq_x = 32; - const int mmq_y = 64; - const int nwarps = 8; + GGML_ASSERT(false); + } - const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; - const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; - const dim3 block_nums(block_num_x, block_num_y, 1); - const dim3 block_dims(WARP_SIZE, nwarps, 1); + const int block_num_x = (nrows_x + mmq_y - 1) / mmq_y; + const int block_num_y = (ncols_y + mmq_x - 1) / mmq_x; + const dim3 block_nums(block_num_x, block_num_y, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); - if (nrows_x % mmq_y == 0) { - const bool need_check = false; - mul_mat_q, - load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } else { - const bool need_check = true; - mul_mat_q, - load_tiles_q6_K, VDR_Q6_K_Q8_1_MMQ, vec_dot_q6_K_q8_1_mul_mat> - <<>>(vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); - } + if (nrows_x % mmq_y == 0) { + const bool need_check = false; + mul_mat_q6_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); + } else { + const bool need_check = true; + mul_mat_q6_K<<>> + (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); } } From 9bf5a7efcb55fb96c08e1fab44e2ebe61964dba6 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 01:27:38 +0200 Subject: [PATCH 60/87] Update gguf_tensor_map.py --- gguf_tensor_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gguf_tensor_map.py b/gguf_tensor_map.py index 644c5914d..d73788bb4 100644 --- a/gguf_tensor_map.py +++ b/gguf_tensor_map.py @@ -68,7 +68,7 @@ def get_tensor_map( n_blocks : int): mapped_to = "transformer.blocks."+str(i)+".ffn_norm" tensor_map["gpt_neox.layers."+str(i)+".post_attention_layernorm"] = mapped_to # gptneox tensor_map["transformer.h."+str(i)+".ln_2"] = mapped_to # gpt2 - tensor_map[" transformer.blocks."+str(i)+".norm_2"] = mapped_to # mpt + tensor_map["transformer.blocks."+str(i)+".norm_2"] = mapped_to # mpt tensor_map["model.layers."+str(i)+".post_attention_layernorm"] = mapped_to # llama-hf tensor_map["layers."+str(i)+".ffn_norm"] = mapped_to # llama-pth # Feed-forward up From e3d1f07eb1d18e2545a5f1a2bafc86b1fd1db053 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 12:18:34 +0200 Subject: [PATCH 61/87] convert-gptneox-h5-to-gguf.py : load model in parts to save memory --- convert-gptneox-h5-to-gguf.py | 147 ++++++++++++++++++++++++---------- 1 file changed, 106 insertions(+), 41 deletions(-) diff --git a/convert-gptneox-h5-to-gguf.py b/convert-gptneox-h5-to-gguf.py index ac4b2ff88..a8a0c7e2d 100644 --- a/convert-gptneox-h5-to-gguf.py +++ b/convert-gptneox-h5-to-gguf.py @@ -1,4 +1,4 @@ -# Quick and dirty HF gptneox--> gguf conversion +# HF gptneox--> gguf conversion import gguf import gguf_tensor_map as tmap @@ -9,7 +9,8 @@ import json import numpy as np from typing import Any, List from pathlib import Path -from transformers import AutoTokenizer, AutoModelForCausalLM +import torch +from transformers import AutoTokenizer # ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py def bytes_to_unicode(): @@ -33,6 +34,15 @@ def bytes_to_unicode(): cs = [chr(n) for n in cs] return dict(zip(bs, cs)) +def count_model_parts(dir_model: str) -> int: + num_parts = 0 + for filename in os.listdir(dir_model): + if filename.startswith("pytorch_model-"): + num_parts += 1 + + if num_parts > 0: + print("gguf: found " + str(num_parts) + " model parts") + return num_parts if len(sys.argv) < 3: print("Usage: convert-h5-to-ggml.py dir-model ftype\n") @@ -70,9 +80,8 @@ if hparams["architectures"][0] != "GPTNeoXForCausalLM": print("Model architecture not supported: " + hparams["architectures"][0] ) sys.exit() - -model = AutoModelForCausalLM.from_pretrained(dir_model, low_cpu_mem_usage=True, trust_remote_code=True) -list_vars = model.state_dict() +# get number of model parts +num_parts = count_model_parts(dir_model) gguf_writer = gguf.GGUFWriter.open(fname_out) @@ -183,37 +192,58 @@ tensor_map = tmap.get_tensor_map(block_count) # tensor info print("gguf: get tensor metadata") -for name in list_vars.keys(): - data = list_vars[name].squeeze().numpy() +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) - # we don't need these - if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): - continue +for part_name in part_names: + print("gguf: loading model part '"+ part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") - # map tensor names - if name.endswith(".weight") and name[:-7] in tensor_map: - name = tensor_map[name[:-7]] + ".weight" - elif name.endswith(".bias") and name[:-5] in tensor_map: - name = tensor_map[name[:-5]] + ".bias" - else: - print( "Can not map tensor '" + name + "'" ) - sys.exit() + for name in model_part.keys(): + data = model_part[name] - n_dims = len(data.shape) - data_dtype = data.dtype + # we don't need these + if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): + continue -# print( name + " dims " + str(n_dims) + " dtype " + str(data.dtype) ) - - if data.dtype != np.float16 and data.dtype != np.float32: # convert any unsupported data types to float32 - data_dtype = np.float32 - elif ftype == 1 and data.dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + data = data.squeeze().numpy() + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print( "Can not map tensor '" + name + "'" ) + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data.dtype == np.float16: + data_dtype = np.float32 + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data.dtype == np.float16 and n_dims == 1: + data_dtype = np.float32 + # if f16 desired, convert any float32 2-dim weight tensors to float16 - data_dtype = np.float16 + if ftype == 1 and data.dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data_dtype = np.float16 - data_nbytes = data.size * 2 if data_dtype == np.float16 else data.size * 4 + data_nbytes = data.size * 2 if data_dtype == np.float16 else data.size * 4 + + gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) - gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) print("gguf: write header") gguf_writer.write_header_to_file() @@ -225,24 +255,59 @@ gguf_writer.write_ti_data_to_file() # tensor data print("gguf: convert and write tensor data") -for name in list_vars.keys(): - data = list_vars[name].squeeze().numpy() +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) - # we don't need these - if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): - continue +for part_name in part_names: + print("gguf: loading model part '"+ part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") - n_dims = len(data.shape) - data_dtype = data.dtype + for name in model_part.keys(): + data = model_part[name] + + old_dtype = data.dtype + + # we don't need these + if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): + continue - if data_dtype != np.float16 and data_dtype != np.float32: # convert any unsupported data types to float32 - data = data.astype(np.float32) - elif ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - # if f16 desired, convert any float32 2-dim weight tensors to float16 - data = data.astype(np.float16) + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) - gguf_writer.write_tensor_to_file(data) + data = data.squeeze().numpy() + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print( "Can not map tensor '" + name + "'" ) + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data.dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print( name + ", shape " + str(len(data.shape)) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + + gguf_writer.write_tensor_to_file(data) gguf_writer.close() From 17800cd80fec468411481dc34a51d42a936442f1 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 12:20:02 +0200 Subject: [PATCH 62/87] convert-llama-h5-to-gguf.py : load model in parts to save memory --- convert-llama-h5-to-gguf.py | 157 +++++++++++++++++++++++++----------- 1 file changed, 111 insertions(+), 46 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 055b6b78d..98a14db9c 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -1,4 +1,4 @@ -# Quick and dirty HF llama --> gguf conversion, GQA/70b wont work +# HF llama --> gguf conversion, GQA/70b not supported import gguf import gguf_tensor_map as tmap @@ -9,7 +9,7 @@ import json import numpy as np from typing import Any, List from pathlib import Path -from transformers import AutoModelForCausalLM +import torch from sentencepiece import SentencePieceProcessor @@ -22,6 +22,15 @@ def permute(weights: NDArray, n_head: int) -> NDArray: .swapaxes(1, 2) .reshape(weights.shape)) +def count_model_parts(dir_model: str) -> int: + num_parts = 0 + for filename in os.listdir(dir_model): + if filename.startswith("pytorch_model-"): + num_parts += 1 + + if num_parts > 0: + print("gguf: found " + str(num_parts) + " model parts") + return num_parts if len(sys.argv) < 3: print("Usage: convert-h5-to-ggml.py dir-model ftype\n") @@ -60,8 +69,8 @@ if hparams["architectures"][0] != "LlamaForCausalLM": print("Model architecture not supported: " + hparams["architectures"][0] ) sys.exit() -model = AutoModelForCausalLM.from_pretrained(dir_model, low_cpu_mem_usage=True, trust_remote_code=True) -list_vars = model.state_dict() +# get number of model parts +num_parts = count_model_parts(dir_model) gguf_writer = gguf.GGUFWriter.open(fname_out) @@ -164,41 +173,62 @@ tensor_map = tmap.get_tensor_map(block_count) # tensor info print("gguf: get tensor metadata") -for name in list_vars.keys(): - data = list_vars[name].squeeze().numpy() - # we don't need these - if name.endswith(".rotary_emb.inv_freq"): - continue +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) - # permute these - if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data,head_count) +for part_name in part_names: + print("gguf: loading model part '"+ part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") - # map tensor names - if name.endswith(".weight") and name[:-7] in tensor_map: - name = tensor_map[name[:-7]] + ".weight" - elif name.endswith(".bias") and name[:-5] in tensor_map: - name = tensor_map[name[:-5]] + ".bias" - else: - print( "Can not map tensor '" + name + "'" ) - sys.exit() + for name in model_part.keys(): + data = model_part[name] - n_dims = len(data.shape) - data_dtype = data.dtype + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue -# print( name + " dims " + str(n_dims) + " dtype " + str(data.dtype) ) - - if data.dtype != np.float16 and data.dtype != np.float32: # convert any unsupported data types to float32 - data_dtype = np.float32 - elif ftype == 1 and data.dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + data = data.squeeze().numpy() + + # permute these + if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): + data = permute(data,head_count) + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print( "Can not map tensor '" + name + "'" ) + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data.dtype == np.float16: + data_dtype = np.float32 + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data_dtype = np.float32 + # if f16 desired, convert any float32 2-dim weight tensors to float16 - data_dtype = np.float16 + if ftype == 1 and data.dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data_dtype = np.float16 - data_nbytes = data.size * 2 if data_dtype == np.float16 else data.size * 4 + data_nbytes = data.size * 2 if data_dtype == np.float16 else data.size * 4 - gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) + gguf_writer.add_tensor_info(name, data.shape, data_dtype, data_nbytes) print("gguf: write header") @@ -211,28 +241,63 @@ gguf_writer.write_ti_data_to_file() # tensor data print("gguf: convert and write tensor data") -for name in list_vars.keys(): - data = list_vars[name].squeeze().numpy() +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) - # we don't need these - if name.endswith(".rotary_emb.inv_freq"): - continue +for part_name in part_names: + print("gguf: loading model part '"+ part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") - # permute these - if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data, head_count) + for name in model_part.keys(): + data = model_part[name] - n_dims = len(data.shape) - data_dtype = data.dtype + old_dtype = data.dtype + + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue - if data_dtype != np.float16 and data_dtype != np.float32: # convert any unsupported data types to float32 - data = data.astype(np.float32) - elif ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - # if f16 desired, convert any float32 2-dim weight tensors to float16 - data = data.astype(np.float16) + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) - gguf_writer.write_tensor_to_file(data) + data = data.squeeze().numpy() + + # permute these + if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): + data = permute(data, head_count) + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print( "Can not map tensor '" + name + "'" ) + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data.dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print( name + ", shape " + str(len(data.shape)) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + + gguf_writer.write_tensor_to_file(data) gguf_writer.close() From 91d4bfd536cee322c26a3d70ac41d486a3fbb7b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sun, 13 Aug 2023 13:29:46 +0300 Subject: [PATCH 63/87] convert : write more metadata for LLaMA --- convert-llama-h5-to-gguf.py | 24 +++++++++++++++--------- gguf.py | 23 +++++------------------ 2 files changed, 20 insertions(+), 27 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 055b6b78d..9526bde84 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -17,6 +17,7 @@ from sentencepiece import SentencePieceProcessor # compatible with python < 3.9 NDArray: 'TypeAlias' = 'np.ndarray[Any, Any]' + def permute(weights: NDArray, n_head: int) -> NDArray: return (weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) .swapaxes(1, 2) @@ -52,12 +53,12 @@ if len(sys.argv) > 2: fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" print("gguf: loading model "+last_dir) - + with open(dir_model + "/config.json", "r", encoding="utf-8") as f: hparams = json.load(f) if hparams["architectures"][0] != "LlamaForCausalLM": - print("Model architecture not supported: " + hparams["architectures"][0] ) + print("Model architecture not supported: " + hparams["architectures"][0]) sys.exit() model = AutoModelForCausalLM.from_pretrained(dir_model, low_cpu_mem_usage=True, trust_remote_code=True) @@ -68,18 +69,23 @@ gguf_writer = gguf.GGUFWriter.open(fname_out) print("gguf: get model metadata") -llm_arch = "llama" -head_count = hparams["num_attention_heads"] +llm_arch = "llama" +hf_repo = hparams["_name_or_path"] +head_count = hparams["num_attention_heads"] +head_count_kv = hparams["num_key_value_heads"] block_count = hparams["num_hidden_layers"] gguf_writer.add_name(last_dir) gguf_writer.add_architecture(llm_arch) +gguf_writer.add_quantization_version(ftype) +guff_writer.add_source_hf_repo(hf_repo) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) gguf_writer.add_block_count(llm_arch, block_count) gguf_writer.add_feed_forward_length(llm_arch, hparams["intermediate_size"]) gguf_writer.add_rope_dimension_count(llm_arch, hparams["hidden_size"] // hparams["num_attention_heads"]) gguf_writer.add_head_count(llm_arch, head_count) +gguf_writer.add_head_count_kv(llm_arch, head_count_kv) gguf_writer.add_layer_norm_rms_eps(llm_arch, hparams["rms_norm_eps"]) @@ -173,7 +179,7 @@ for name in list_vars.keys(): # permute these if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data,head_count) + data = permute(data, head_count) # map tensor names if name.endswith(".weight") and name[:-7] in tensor_map: @@ -181,11 +187,11 @@ for name in list_vars.keys(): elif name.endswith(".bias") and name[:-5] in tensor_map: name = tensor_map[name[:-5]] + ".bias" else: - print( "Can not map tensor '" + name + "'" ) + print("Can not map tensor '" + name + "'") sys.exit() n_dims = len(data.shape) - data_dtype = data.dtype + data_dtype = data.dtype # print( name + " dims " + str(n_dims) + " dtype " + str(data.dtype) ) @@ -223,7 +229,7 @@ for name in list_vars.keys(): data = permute(data, head_count) n_dims = len(data.shape) - data_dtype = data.dtype + data_dtype = data.dtype if data_dtype != np.float16 and data_dtype != np.float32: # convert any unsupported data types to float32 @@ -237,5 +243,5 @@ for name in list_vars.keys(): gguf_writer.close() -print("gguf: model successfully exported to '" + fname_out + "'" ) +print("gguf: model successfully exported to '" + fname_out + "'") print("") diff --git a/gguf.py b/gguf.py index 75ba2870a..de3e5bbfb 100644 --- a/gguf.py +++ b/gguf.py @@ -12,23 +12,10 @@ from typing import Any, IO, List import numpy as np import sys + class GGMLQuantizationType(IntEnum): F32 = 0 F16 = 1 - Q4_0 = 2 - Q4_1 = 3 - # Q4_2 = 4 # support has been removed - # Q4_3 = 5 # support has been removed - Q5_0 = 6 - Q5_1 = 7 - Q8_0 = 8 - Q8_1 = 9 - Q2_K = 10 - Q3_K = 11 - Q4_K = 12 - Q5_K = 13 - Q6_K = 14 - Q8_K = 15 class GGUFValueType(IntEnum): @@ -143,7 +130,7 @@ class GGUFWriter: if add_vtype: self.kv_data += struct.pack(" Date: Sun, 13 Aug 2023 14:38:53 +0300 Subject: [PATCH 64/87] convert : rm quantization version --- convert-llama-h5-to-gguf.py | 95 +++++++++++++++++++------------------ 1 file changed, 48 insertions(+), 47 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 53378e47c..574788ee0 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -23,6 +23,7 @@ def permute(weights: NDArray, n_head: int) -> NDArray: .swapaxes(1, 2) .reshape(weights.shape)) + def count_model_parts(dir_model: str) -> int: num_parts = 0 for filename in os.listdir(dir_model): @@ -33,6 +34,7 @@ def count_model_parts(dir_model: str) -> int: print("gguf: found " + str(num_parts) + " model parts") return num_parts + if len(sys.argv) < 3: print("Usage: convert-h5-to-ggml.py dir-model ftype\n") print(" ftype == 0 -> float32") @@ -86,7 +88,6 @@ block_count = hparams["num_hidden_layers"] gguf_writer.add_name(last_dir) gguf_writer.add_architecture(llm_arch) -gguf_writer.add_quantization_version(ftype) guff_writer.add_source_hf_repo(hf_repo) gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) @@ -187,7 +188,7 @@ else: ) for part_name in part_names: - print("gguf: loading model part '"+ part_name + "'") + print("gguf: loading model part '" + part_name + "'") model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") for name in model_part.keys(): @@ -205,7 +206,7 @@ for part_name in part_names: # permute these if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data,head_count) + data = permute(data, head_count) # map tensor names if name.endswith(".weight") and name[:-7] in tensor_map: @@ -213,11 +214,11 @@ for part_name in part_names: elif name.endswith(".bias") and name[:-5] in tensor_map: name = tensor_map[name[:-5]] + ".bias" else: - print( "Can not map tensor '" + name + "'" ) + print("Can not map tensor '" + name + "'") sys.exit() n_dims = len(data.shape) - data_dtype = data.dtype + data_dtype = data.dtype # if f32 desired, convert any float16 to float32 if ftype == 0 and data.dtype == np.float16: @@ -254,60 +255,60 @@ else: ) for part_name in part_names: - print("gguf: loading model part '"+ part_name + "'") + print("gguf: loading model part '" + part_name + "'") model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") for name in model_part.keys(): data = model_part[name] -<<<<<<< HEAD +<< << << < HEAD + n_dims = len(data.shape) + data_dtype = data.dtype +== == == = + old_dtype = data.dtype + + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue +>>>>>> > 17800cd80fec468411481dc34a51d42a936442f1 + + # convert any unsupported data types to float32 + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + data = data.squeeze().numpy() + + # permute these + if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): + data = permute(data, head_count) + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print("Can not map tensor '" + name + "'" ) + sys.exit() + n_dims = len(data.shape) data_dtype = data.dtype -======= - old_dtype = data.dtype - # we don't need these - if name.endswith(".rotary_emb.inv_freq"): - continue ->>>>>>> 17800cd80fec468411481dc34a51d42a936442f1 + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data.dtype == np.float16: + data = data.astype(np.float32) - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) - data = data.squeeze().numpy() + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) - # permute these - if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data, head_count) + print(name + ", shape " + str(len(data.shape)) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - # map tensor names - if name.endswith(".weight") and name[:-7] in tensor_map: - name = tensor_map[name[:-7]] + ".weight" - elif name.endswith(".bias") and name[:-5] in tensor_map: - name = tensor_map[name[:-5]] + ".bias" - else: - print( "Can not map tensor '" + name + "'" ) - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data.dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - - print( name + ", shape " + str(len(data.shape)) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - - gguf_writer.write_tensor_to_file(data) + gguf_writer.write_tensor_to_file(data) gguf_writer.close() From 2827b840e4235e02c32f93ca80af19cb0790d3c7 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 13:54:10 +0200 Subject: [PATCH 65/87] convert-gptneox-h5-to-gguf.py : add file_type key --- convert-gptneox-h5-to-gguf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/convert-gptneox-h5-to-gguf.py b/convert-gptneox-h5-to-gguf.py index a8a0c7e2d..ba6e90e42 100644 --- a/convert-gptneox-h5-to-gguf.py +++ b/convert-gptneox-h5-to-gguf.py @@ -90,8 +90,9 @@ print("gguf: get model metadata") llm_arch = "gptneox" block_count = hparams["num_hidden_layers"] -gguf_writer.add_name(last_dir) gguf_writer.add_architecture(llm_arch) +gguf_writer.add_name(last_dir) +gguf_writer.add_file_type( "All tensors F32" if ftype == 0 else "Most tensors F16, some F32") gguf_writer.add_context_length(llm_arch, hparams["max_position_embeddings"]) gguf_writer.add_embedding_length(llm_arch, hparams["hidden_size"]) gguf_writer.add_block_count(llm_arch, block_count) From 6beebf3fd92aa2ab5c8b060b6d25c5e1d3d12e95 Mon Sep 17 00:00:00 2001 From: klosax <131523366+klosax@users.noreply.github.com> Date: Sun, 13 Aug 2023 14:11:01 +0200 Subject: [PATCH 66/87] gptneox-main.cpp : add file_type key --- gptneox-main.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gptneox-main.cpp b/gptneox-main.cpp index 4773dfd69..63ee5e61c 100644 --- a/gptneox-main.cpp +++ b/gptneox-main.cpp @@ -379,6 +379,8 @@ bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2 if (keyidx != -1) { fprintf(stdout, "%s: model license = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } keyidx = gguf_find_key(ggufctx, "general.architecture"); if (keyidx != -1) { fprintf(stdout, "%s: model architecture = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.file_type"); + if (keyidx != -1) { fprintf(stdout, "%s: model file type = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } } // check required metadata From 24f48833ab5a1fcacb15c1908e13a80b35fa0f31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sun, 13 Aug 2023 16:55:42 +0300 Subject: [PATCH 67/87] fix conflicts --- convert-llama-h5-to-gguf.py | 82 +++++++++++++++++-------------------- 1 file changed, 38 insertions(+), 44 deletions(-) diff --git a/convert-llama-h5-to-gguf.py b/convert-llama-h5-to-gguf.py index 574788ee0..cf9f6f802 100644 --- a/convert-llama-h5-to-gguf.py +++ b/convert-llama-h5-to-gguf.py @@ -23,7 +23,6 @@ def permute(weights: NDArray, n_head: int) -> NDArray: .swapaxes(1, 2) .reshape(weights.shape)) - def count_model_parts(dir_model: str) -> int: num_parts = 0 for filename in os.listdir(dir_model): @@ -34,7 +33,6 @@ def count_model_parts(dir_model: str) -> int: print("gguf: found " + str(num_parts) + " model parts") return num_parts - if len(sys.argv) < 3: print("Usage: convert-h5-to-ggml.py dir-model ftype\n") print(" ftype == 0 -> float32") @@ -188,7 +186,7 @@ else: ) for part_name in part_names: - print("gguf: loading model part '" + part_name + "'") + print("gguf: loading model part '"+ part_name + "'") model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") for name in model_part.keys(): @@ -206,7 +204,7 @@ for part_name in part_names: # permute these if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data, head_count) + data = permute(data,head_count) # map tensor names if name.endswith(".weight") and name[:-7] in tensor_map: @@ -214,11 +212,11 @@ for part_name in part_names: elif name.endswith(".bias") and name[:-5] in tensor_map: name = tensor_map[name[:-5]] + ".bias" else: - print("Can not map tensor '" + name + "'") + print( "Can not map tensor '" + name + "'" ) sys.exit() n_dims = len(data.shape) - data_dtype = data.dtype + data_dtype = data.dtype # if f32 desired, convert any float16 to float32 if ftype == 0 and data.dtype == np.float16: @@ -255,60 +253,56 @@ else: ) for part_name in part_names: - print("gguf: loading model part '" + part_name + "'") + print("gguf: loading model part '"+ part_name + "'") model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") for name in model_part.keys(): data = model_part[name] -<< << << < HEAD - n_dims = len(data.shape) - data_dtype = data.dtype -== == == = - old_dtype = data.dtype + + old_dtype = data.dtype - # we don't need these - if name.endswith(".rotary_emb.inv_freq"): - continue ->>>>>> > 17800cd80fec468411481dc34a51d42a936442f1 + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) + # convert any unsupported data types to float32 + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) - data = data.squeeze().numpy() + data = data.squeeze().numpy() - # permute these - if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): - data = permute(data, head_count) + # permute these + if name.endswith(".q_proj.weight") or name.endswith(".k_proj.weight"): + data = permute(data, head_count) - # map tensor names - if name.endswith(".weight") and name[:-7] in tensor_map: - name = tensor_map[name[:-7]] + ".weight" - elif name.endswith(".bias") and name[:-5] in tensor_map: - name = tensor_map[name[:-5]] + ".bias" - else: - print("Can not map tensor '" + name + "'" ) - sys.exit() + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print( "Can not map tensor '" + name + "'" ) + sys.exit() - n_dims = len(data.shape) - data_dtype = data.dtype + n_dims = len(data.shape) + data_dtype = data.dtype - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data.dtype == np.float16: - data = data.astype(np.float32) + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data.dtype == np.float16: + data = data.astype(np.float32) - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) - print(name + ", shape " + str(len(data.shape)) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + print( name + ", shape " + str(len(data.shape)) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - gguf_writer.write_tensor_to_file(data) + gguf_writer.write_tensor_to_file(data) gguf_writer.close() From ee77efea2a1e3f7d153976b0934522b6bbaa62e6 Mon Sep 17 00:00:00 2001 From: drbh Date: Sun, 13 Aug 2023 10:00:48 -0400 Subject: [PATCH 68/87] test : add simple grammar parsing tests (#2594) * adds simple grammar parsing tests * adds cassert header --- .gitignore | 1 + Makefile | 5 +- tests/CMakeLists.txt | 1 + tests/test-grammar-parser.cpp | 249 ++++++++++++++++++++++++++++++++++ 4 files changed, 255 insertions(+), 1 deletion(-) create mode 100644 tests/test-grammar-parser.cpp diff --git a/.gitignore b/.gitignore index e345e64ed..743b8a8b6 100644 --- a/.gitignore +++ b/.gitignore @@ -70,6 +70,7 @@ poetry.lock poetry.toml # Test binaries +tests/test-grammar-parser tests/test-double-float tests/test-grad0 tests/test-opt diff --git a/Makefile b/Makefile index ce593edfc..070ae1242 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch convert-llama2c-to-ggml simple server embd-input-test # Binaries only useful for tests -TEST_TARGETS = tests/test-double-float tests/test-grad0 tests/test-opt tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 +TEST_TARGETS = tests/test-grammar-parser tests/test-double-float tests/test-grad0 tests/test-opt tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 default: $(BUILD_TARGETS) @@ -412,6 +412,9 @@ benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.h ggml.o vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) +tests/test-grammar-parser: tests/test-grammar-parser.cpp examples/grammar-parser.cpp build-info.h ggml.o llama.o common.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.txt,$^) -o $@ $(LDFLAGS) + tests/test-double-float: tests/test-double-float.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.txt,$^) -o $@ $(LDFLAGS) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 1a40edbec..689fb6f2a 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -11,5 +11,6 @@ llama_add_test(test-quantize-fns.cpp) llama_add_test(test-quantize-perf.cpp) llama_add_test(test-sampling.cpp) llama_add_test(test-tokenizer-0.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab.bin) +llama_add_test(test-grammar-parser.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../examples/grammar-parser.cpp) llama_add_test(test-grad0.cpp) # SLOW # llama_add_test(test-opt.cpp) # SLOW diff --git a/tests/test-grammar-parser.cpp b/tests/test-grammar-parser.cpp new file mode 100644 index 000000000..7022988b4 --- /dev/null +++ b/tests/test-grammar-parser.cpp @@ -0,0 +1,249 @@ +#ifdef NDEBUG +#undef NDEBUG +#endif + +#include "llama.h" +#include "examples/grammar-parser.cpp" +#include + +int main() +{ + grammar_parser::parse_state parsed_grammar; + + const char *grammar_bytes = R"""(root ::= (expr "=" term "\n")+ +expr ::= term ([-+*/] term)* +term ::= [0-9]+)"""; + + parsed_grammar = grammar_parser::parse(grammar_bytes); + + std::vector> expected = { + {"expr", 2}, + {"expr_5", 5}, + {"expr_6", 6}, + {"root", 0}, + {"root_1", 1}, + {"root_4", 4}, + {"term", 3}, + {"term_7", 7}, + }; + + uint32_t index = 0; + for (auto it = parsed_grammar.symbol_ids.begin(); it != parsed_grammar.symbol_ids.end(); ++it) + { + std::string key = it->first; + uint32_t value = it->second; + std::pair expected_pair = expected[index]; + + // pretty print error message before asserting + if (expected_pair.first != key || expected_pair.second != value) + { + fprintf(stderr, "expected_pair: %s, %d\n", expected_pair.first.c_str(), expected_pair.second); + fprintf(stderr, "actual_pair: %s, %d\n", key.c_str(), value); + fprintf(stderr, "expected_pair != actual_pair\n"); + } + + assert(expected_pair.first == key && expected_pair.second == value); + + index++; + } + std::vector expected_rules = { + {LLAMA_GRETYPE_RULE_REF, 4}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 2}, + {LLAMA_GRETYPE_CHAR, 61}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_CHAR, 10}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_RULE_REF, 6}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 7}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 1}, + {LLAMA_GRETYPE_RULE_REF, 4}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_RULE_REF, 1}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 45}, + {LLAMA_GRETYPE_CHAR_ALT, 43}, + {LLAMA_GRETYPE_CHAR_ALT, 42}, + {LLAMA_GRETYPE_CHAR_ALT, 47}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 5}, + {LLAMA_GRETYPE_RULE_REF, 6}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 48}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 57}, + {LLAMA_GRETYPE_RULE_REF, 7}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_CHAR, 48}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 57}, + {LLAMA_GRETYPE_END, 0}, + }; + + index = 0; + for (auto rule : parsed_grammar.rules) + { + // compare rule to expected rule + for (uint32_t i = 0; i < rule.size(); i++) + { + llama_grammar_element element = rule[i]; + llama_grammar_element expected_element = expected_rules[index]; + + // pretty print error message before asserting + if (expected_element.type != element.type || expected_element.value != element.value) + { + fprintf(stderr, "index: %d\n", index); + fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %d\n", element.type, element.value); + fprintf(stderr, "expected_element != actual_element\n"); + } + + assert(expected_element.type == element.type && expected_element.value == element.value); + index++; + } + } + + const char *longer_grammar_bytes = R"""( + root ::= (expr "=" ws term "\n")+ + expr ::= term ([-+*/] term)* + term ::= ident | num | "(" ws expr ")" ws + ident ::= [a-z] [a-z0-9_]* ws + num ::= [0-9]+ ws + ws ::= [ \t\n]* + )"""; + + parsed_grammar = grammar_parser::parse(longer_grammar_bytes); + + expected = { + {"expr", 2}, + {"expr_6", 6}, + {"expr_7", 7}, + {"ident", 8}, + {"ident_10", 10}, + {"num", 9}, + {"num_11", 11}, + {"root", 0}, + {"root_1", 1}, + {"root_5", 5}, + {"term", 4}, + {"ws", 3}, + {"ws_12", 12}, + }; + + index = 0; + for (auto it = parsed_grammar.symbol_ids.begin(); it != parsed_grammar.symbol_ids.end(); ++it) + { + std::string key = it->first; + uint32_t value = it->second; + std::pair expected_pair = expected[index]; + + // pretty print error message before asserting + if (expected_pair.first != key || expected_pair.second != value) + { + fprintf(stderr, "expected_pair: %s, %d\n", expected_pair.first.c_str(), expected_pair.second); + fprintf(stderr, "actual_pair: %s, %d\n", key.c_str(), value); + fprintf(stderr, "expected_pair != actual_pair\n"); + } + + assert(expected_pair.first == key && expected_pair.second == value); + + index++; + } + expected_rules = { + {LLAMA_GRETYPE_RULE_REF, 5}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 2}, + {LLAMA_GRETYPE_CHAR, 61}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_RULE_REF, 4}, + {LLAMA_GRETYPE_CHAR, 10}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 4}, + {LLAMA_GRETYPE_RULE_REF, 7}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 12}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 8}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_RULE_REF, 9}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_CHAR, 40}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_RULE_REF, 2}, + {LLAMA_GRETYPE_CHAR, 41}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 1}, + {LLAMA_GRETYPE_RULE_REF, 5}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_RULE_REF, 1}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 45}, + {LLAMA_GRETYPE_CHAR_ALT, 43}, + {LLAMA_GRETYPE_CHAR_ALT, 42}, + {LLAMA_GRETYPE_CHAR_ALT, 47}, + {LLAMA_GRETYPE_RULE_REF, 4}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 6}, + {LLAMA_GRETYPE_RULE_REF, 7}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 97}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 122}, + {LLAMA_GRETYPE_RULE_REF, 10}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_RULE_REF, 11}, + {LLAMA_GRETYPE_RULE_REF, 3}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 97}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 122}, + {LLAMA_GRETYPE_CHAR_ALT, 48}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 57}, + {LLAMA_GRETYPE_CHAR_ALT, 95}, + {LLAMA_GRETYPE_RULE_REF, 10}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 48}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 57}, + {LLAMA_GRETYPE_RULE_REF, 11}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_CHAR, 48}, + {LLAMA_GRETYPE_CHAR_RNG_UPPER, 57}, + {LLAMA_GRETYPE_END, 0}, + {LLAMA_GRETYPE_CHAR, 32}, + {LLAMA_GRETYPE_CHAR_ALT, 9}, + {LLAMA_GRETYPE_CHAR_ALT, 10}, + {LLAMA_GRETYPE_RULE_REF, 12}, + {LLAMA_GRETYPE_ALT, 0}, + {LLAMA_GRETYPE_END, 0}, + }; + + index = 0; + for (auto rule : parsed_grammar.rules) + { + // compare rule to expected rule + for (uint32_t i = 0; i < rule.size(); i++) + { + llama_grammar_element element = rule[i]; + llama_grammar_element expected_element = expected_rules[index]; + + // pretty print error message before asserting + if (expected_element.type != element.type || expected_element.value != element.value) + { + fprintf(stderr, "index: %d\n", index); + fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %d\n", element.type, element.value); + fprintf(stderr, "expected_element != actual_element\n"); + } + + assert(expected_element.type == element.type && expected_element.value == element.value); + index++; + } + } + + return 0; +} From f31b5397143009d682db90fd2a6cde83f1ef00eb Mon Sep 17 00:00:00 2001 From: vxiiduu <73044267+vxiiduu@users.noreply.github.com> Date: Mon, 14 Aug 2023 13:59:16 +1000 Subject: [PATCH 69/87] Enhance Windows 7 and below compatibility. (#2592) * Enhance Windows 7 compatibility. * Clean away unnecessary preprocessor conditional --- llama-util.h | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/llama-util.h b/llama-util.h index 6e9e39ddb..75e19c50c 100644 --- a/llama-util.h +++ b/llama-util.h @@ -271,20 +271,29 @@ struct llama_mmap { throw std::runtime_error(format("MapViewOfFile failed: %s", llama_format_win_err(error).c_str())); } - #if _WIN32_WINNT >= _WIN32_WINNT_WIN8 if (prefetch) { - // Advise the kernel to preload the mapped memory - WIN32_MEMORY_RANGE_ENTRY range; - range.VirtualAddress = addr; - range.NumberOfBytes = (SIZE_T)size; - if (!PrefetchVirtualMemory(GetCurrentProcess(), 1, &range, 0)) { - fprintf(stderr, "warning: PrefetchVirtualMemory failed: %s\n", - llama_format_win_err(GetLastError()).c_str()); + // The PrefetchVirtualMemory API is only present on Windows 8 and above, so we + // will dynamically load it using GetProcAddress. + BOOL (WINAPI *pPrefetchVirtualMemory) (HANDLE, ULONG_PTR, PWIN32_MEMORY_RANGE_ENTRY, ULONG); + HMODULE hKernel32; + + // This call is guaranteed to succeed. + hKernel32 = GetModuleHandleW(L"kernel32.dll"); + + // This call may fail if on a pre-Win8 system. + pPrefetchVirtualMemory = reinterpret_cast (GetProcAddress(hKernel32, "PrefetchVirtualMemory")); + + if (pPrefetchVirtualMemory) { + // Advise the kernel to preload the mapped memory. + WIN32_MEMORY_RANGE_ENTRY range; + range.VirtualAddress = addr; + range.NumberOfBytes = (SIZE_T)size; + if (!pPrefetchVirtualMemory(GetCurrentProcess(), 1, &range, 0)) { + fprintf(stderr, "warning: PrefetchVirtualMemory failed: %s\n", + llama_format_win_err(GetLastError()).c_str()); + } } } - #else - #pragma message("warning: You are building for pre-Windows 8; prefetch not supported") - #endif // _WIN32_WINNT >= _WIN32_WINNT_WIN8 } ~llama_mmap() { From 196b50fee74625528fae91e7125dc1e9cc07cecd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Mon, 14 Aug 2023 08:50:47 +0300 Subject: [PATCH 70/87] gguf : add todos and comments --- gguf-llama.cpp | 64 ++++++++++++++++++++++++++++---------------------- 1 file changed, 36 insertions(+), 28 deletions(-) diff --git a/gguf-llama.cpp b/gguf-llama.cpp index 700d6009b..826af549b 100644 --- a/gguf-llama.cpp +++ b/gguf-llama.cpp @@ -258,6 +258,11 @@ struct llama_kv_cache { }; struct llama_vocab { + // TODO: convert to this gguf_vocab + // add a vector of merges + // add members for bos/eos/pad/sep tokens + // so that we can pass it to different types of tokenizers with a common interface + using id = int32_t; using token = std::string; @@ -447,7 +452,7 @@ static size_t llama_calc_tensor_size(const std::vector & ne, enum ggml return size / ggml_blck_size(type); } -struct llama_load_tensor { +struct gguf_load_tensor { std::string name; enum ggml_type type = GGML_TYPE_F32; std::vector ne; @@ -457,9 +462,9 @@ struct llama_load_tensor { uint8_t * data; }; -struct llama_load_tensors_map { +struct gguf_load_tensors_map { // tensors is kept in a separate vector to preserve file order - std::vector tensors; + std::vector tensors; std::unordered_map name_to_idx; }; @@ -477,7 +482,7 @@ struct gguf_file_loader { llama_vocab vocab; struct ggml_context * ctx_data = NULL; - gguf_file_loader(const char * fname, llama_load_tensors_map & tensors_map) + gguf_file_loader(const char * fname, gguf_load_tensors_map & tensors_map) : file(fname, "rb") { fprintf(stderr, "llama.cpp: loading model from %s\n", fname); @@ -523,13 +528,9 @@ struct ggml_context * ctx_data = NULL; void read_hparams() { - // TODO make keysconstants in header + // TODO define keys as constants in header // TODO: read all hparams from file - int q_ver_idx = gguf_find_key (gguf_ctx, "general.quantization_version"); - if (q_ver_idx != -1) { - hparams.ftype = (enum llama_ftype) gguf_get_val_u32(gguf_ctx, q_ver_idx); - } - + hparams.n_vocab = read_n_vocab(); hparams.n_ctx = read_u32("llama.context_length"); hparams.n_embd = read_u32("llama.embedding_length"); @@ -567,11 +568,11 @@ struct ggml_context * ctx_data = NULL; } } - void read_tensor_metadata(llama_load_tensors_map & tensors_map) { + void read_tensor_metadata(gguf_load_tensors_map & tensors_map) { const int n_tensors = gguf_get_n_tensors(gguf_ctx); for (int i = 0; i < n_tensors; ++i) { - llama_load_tensor tensor; + gguf_load_tensor tensor; const char * name = gguf_get_tensor_name(gguf_ctx, i); struct ggml_tensor * cur = ggml_get_tensor(ctx_data, name); @@ -617,6 +618,13 @@ struct ggml_context * ctx_data = NULL; }; struct gguf_file_saver { + // TODO + // this implementation now assumes that the data section is of the same length as the unquantized model. + // this is needed to write tensor metadata and weights in a single pass by seeking to appropriate positions in the file. + // this may not be true when we add quantization version and change ftype description (currently it's string according to the specs, + // but better to have it as uint32). + // we need to calculate the delta in number of bytes written with a counter as a struct member. + gguf_file file; gguf_file_loader * fl; size_t info_offset; @@ -747,7 +755,7 @@ struct gguf_file_saver { GGML_ASSERT(info_offset == file.tell()); } - size_t write_tensor_info(llama_load_tensor & tensor, enum ggml_type type) { + size_t write_tensor_info(gguf_load_tensor & tensor, enum ggml_type type) { size_t total_written = 0; file.seek(info_offset, SEEK_SET); GGML_ASSERT(info_offset == file.tell()); @@ -761,14 +769,14 @@ struct gguf_file_saver { total_written += file.write_i32(type); total_written += file.write_u64(tensor_offset); - info_offset += total_written; + info_offset += total_written; // position to write info of the next tensor file.seek(0, SEEK_END); return total_written; } - void write_tensor(llama_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { + void write_tensor(gguf_load_tensor & tensor, enum ggml_type new_type, const void * new_data, size_t new_size) { switch (new_type) { case GGML_TYPE_F32: case GGML_TYPE_F16: @@ -791,13 +799,13 @@ struct gguf_file_saver { size_t padded_size = GGML_PAD(new_size, GGUF_DEFAULT_ALIGNMENT); // TODO: handle custom alignment size_t pad = padded_size - new_size; file.write_zeros(pad); - tensor_offset += padded_size; + tensor_offset += padded_size; // offset of the next tensor } }; struct llama_model_loader { std::unique_ptr file_loader; - llama_load_tensors_map tensors_map; + gguf_load_tensors_map tensors_map; bool use_mmap; size_t num_ggml_tensors_created = 0; struct ggml_context * ggml_ctx = NULL; @@ -813,7 +821,7 @@ struct llama_model_loader { void calc_sizes(size_t * ctx_size_p, size_t * mmapped_size_p) const { *ctx_size_p = *mmapped_size_p = 0; - for (const llama_load_tensor & lt : tensors_map.tensors) { + for (const gguf_load_tensor & lt : tensors_map.tensors) { *ctx_size_p += sizeof(struct ggml_tensor) + GGML_OBJECT_SIZE; *(use_mmap ? mmapped_size_p : ctx_size_p) += lt.size + 16; } @@ -824,7 +832,7 @@ struct llama_model_loader { if (it == tensors_map.name_to_idx.end()) { throw std::runtime_error(std::runtime_error(format("llama.cpp: tensor '%s' is missing from model", name.c_str()))); } - llama_load_tensor & lt = tensors_map.tensors.at(it->second); + gguf_load_tensor & lt = tensors_map.tensors.at(it->second); if (lt.ne != ne) { throw std::runtime_error(format("llama.cpp: tensor '%s' has wrong shape; expected %s, got %s", name.c_str(), llama_format_tensor_shape(ne).c_str(), llama_format_tensor_shape(lt.ne).c_str())); @@ -833,7 +841,7 @@ struct llama_model_loader { return get_tensor_for(lt, backend); } - struct ggml_tensor * get_tensor_for(llama_load_tensor & lt, ggml_backend backend) { + struct ggml_tensor * get_tensor_for(gguf_load_tensor & lt, ggml_backend backend) { struct ggml_tensor * tensor; if (backend != GGML_BACKEND_CPU) { ggml_set_no_alloc(ggml_ctx, true); @@ -866,7 +874,7 @@ struct llama_model_loader { size_t data_size = 0; size_t prefetch_size = 0; size_t lock_size = 0; - for (const llama_load_tensor & lt : tensors_map.tensors) { + for (const gguf_load_tensor & lt : tensors_map.tensors) { data_size += lt.size; if (lt.ggml_tensor->backend == GGML_BACKEND_CPU) { prefetch_size += lt.size; @@ -881,7 +889,7 @@ struct llama_model_loader { } size_t done_size = 0; - for (llama_load_tensor & lt : tensors_map.tensors) { + for (gguf_load_tensor & lt : tensors_map.tensors) { if (progress_callback) { progress_callback((float) done_size / data_size, progress_callback_user_data); } @@ -928,7 +936,7 @@ struct llama_model_loader { } } - void load_data_for(llama_load_tensor & lt) { + void load_data_for(gguf_load_tensor & lt) { if (use_mmap) { lt.data = (uint8_t *) mapping->addr + lt.file_off; } else { @@ -942,7 +950,7 @@ struct llama_model_loader { } } - static void print_checksum(llama_load_tensor & lt) { + static void print_checksum(gguf_load_tensor & lt) { uint32_t sum = 0; for (size_t i = 0; i < lt.size; i++) { uint8_t byte = lt.data[i]; @@ -1421,7 +1429,7 @@ static void llama_model_load_internal( } // populate `tensors_by_name` - for (llama_load_tensor & lt : ml->tensors_map.tensors) { + for (gguf_load_tensor & lt : ml->tensors_map.tensors) { model.tensors_by_name.emplace_back(lt.name, lt.ggml_tensor); } @@ -2896,7 +2904,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar // quantization // -static void llama_convert_tensor_internal(const llama_load_tensor & tensor, gguf_buffer & output, const int nelements, const int nthread) { +static void llama_convert_tensor_internal(const gguf_load_tensor & tensor, gguf_buffer & output, const int nelements, const int nthread) { if (output.size < nelements * sizeof(float)) { output.resize(nelements * sizeof(float)); } @@ -3018,7 +3026,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s }; size_t idx = 0; - for (llama_load_tensor & tensor : model_loader->tensors_map.tensors) { + for (gguf_load_tensor & tensor : model_loader->tensors_map.tensors) { gguf_buffer read_data; read_data.resize(tensor.size); tensor.data = read_data.addr; @@ -3570,7 +3578,7 @@ int llama_apply_lora_from_file_internal(const struct llama_model & model, const return 1; } size_t idx = model_loader->tensors_map.name_to_idx[base_name]; - llama_load_tensor & lt = model_loader->tensors_map.tensors[idx]; + gguf_load_tensor & lt = model_loader->tensors_map.tensors[idx]; base_t = model_loader->get_tensor(base_name, { (uint32_t)dest_t->ne[0], (uint32_t)dest_t->ne[1] }, GGML_BACKEND_CPU); lt.data = (uint8_t *) lt.ggml_tensor->data; model_loader->load_data_for(lt); From 5517d6e69214cdead000a76983b9fe175c3f8329 Mon Sep 17 00:00:00 2001 From: Jhen-Jie Hong Date: Mon, 14 Aug 2023 15:16:54 +0800 Subject: [PATCH 71/87] server : implement json-schema-to-grammar.mjs & add grammar param in the UI (#2588) * server : implement json-schema-to-grammar.mjs by follow python impl * server : add grammar support in chat.mjs * server : implement grammer param in the UI * server : generate .hpp * server : remove trailing whitespaces * server : generate .hpp * server : fix sort of prop pairs * server : optimize regex & iteration --- examples/server/chat.mjs | 30 + examples/server/index.html.hpp | 1920 +++++----- examples/server/index.js.hpp | 3397 +++++++++-------- .../server/json-schema-to-grammar.mjs.hpp | 311 ++ examples/server/public/index.html | 29 + examples/server/public/index.js | 2 +- .../server/public/json-schema-to-grammar.mjs | 112 + 7 files changed, 3208 insertions(+), 2593 deletions(-) create mode 100644 examples/server/json-schema-to-grammar.mjs.hpp create mode 100644 examples/server/public/json-schema-to-grammar.mjs diff --git a/examples/server/chat.mjs b/examples/server/chat.mjs index 8269e2592..87f4d2926 100644 --- a/examples/server/chat.mjs +++ b/examples/server/chat.mjs @@ -1,5 +1,34 @@ import * as readline from 'node:readline' import { stdin, stdout } from 'node:process' +import { readFileSync } from 'node:fs' +import { SchemaConverter } from './public/json-schema-to-grammar.mjs' + +const args = process.argv.slice(2); +const grammarJsonSchemaFile = args.find( + (_, index) => args[index - 1] === "--grammar-json-schema" +); +const grammarFile = args.find((_, index) => args[index - 1] === "--grammar"); + +// Example usage: function,arguments +const grammarJsonSchemaPropOrder = args.find( + (_, index) => args[index - 1] === "--grammar-json-schema-prop-order" +); +const propOrder = grammarJsonSchemaPropOrder + ? grammarJsonSchemaPropOrder + .split(",") + .reduce((acc, cur, index) => ({ ...acc, [cur]: index }), {}) + : {}; + +let grammar = null +if (grammarJsonSchemaFile) { + const schema = JSON.parse(readFileSync(grammarJsonSchemaFile, 'utf-8')) + const converter = new SchemaConverter(propOrder) + converter.visit(schema, '') + grammar = converter.formatGrammar() +} +if (grammarFile) { + grammar = readFileSync(grammarFile, 'utf-8') +} const API_URL = 'http://127.0.0.1:8080' @@ -48,6 +77,7 @@ async function chat_completion(question) { n_keep: n_keep, n_predict: 256, stop: ["\n### Human:"], // stop completion after generating this + grammar, stream: true, }) }) diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index bd0b47870..42304ab7e 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -207,946 +207,1054 @@ unsigned char index_html[] = { 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, - 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, - 0x77, 0x65, 0x65, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, - 0x64, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, - 0x72, 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, - 0x62, 0x6f, 0x74, 0x2e, 0x20, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x64, - 0x20, 0x69, 0x6e, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, - 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, - 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, - 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, - 0x73, 0x65, 0x72, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, - 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, - 0x2e, 0x37, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, - 0x20, 0x32, 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, - 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, - 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, - 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, - 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, - 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x7b, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x27, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x2e, 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, + 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6e, 0x64, + 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, 0x74, 0x2e, 0x20, + 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x73, + 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, + 0x77, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, + 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, + 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, + 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, + 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, + 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, + 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, + 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, 0x20, 0x22, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, + 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, + 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, + 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x2c, + 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, + 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, + 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, + 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, + 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, + 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, + 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, - 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, - 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, - 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, - 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, - 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x20, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, - 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, - 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, - 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, - 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, - 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, - 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, - 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, - 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, - 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, - 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, - 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, - 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, - 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x28, - 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x7d, 0x29, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, - 0x5c, 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, - 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, - 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, - 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, - 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, - 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, - 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x20, 0x6c, 0x65, 0x61, 0x64, - 0x69, 0x6e, 0x67, 0x20, 0x77, 0x68, 0x69, 0x74, 0x65, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x20, 0x3d, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2b, 0x2f, 0x2c, 0x20, 0x22, - 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x63, 0x68, - 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5d, 0x5d, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, - 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, - 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, - 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, - 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, - 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, - 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, - 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, - 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, - 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, - 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, + 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, + 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, + 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, + 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, + 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, - 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, - 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, - 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x20, 0x6f, 0x6e, 0x6b, 0x65, 0x79, - 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, 0x65, 0x6e, 0x74, 0x65, - 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x7d, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, - 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, - 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x20, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, - 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, - 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, - 0x6b, 0x3d, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, - 0x65, 0x73, 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, + 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, + 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, + 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, + 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, + 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, + 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, + 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x28, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, + 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, + 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, + 0x61, 0x70, 0x28, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x29, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, + 0x27, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, + 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, + 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, + 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, + 0x2b, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x20, + 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x77, 0x68, 0x69, 0x74, + 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2b, + 0x2f, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, + 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x22, + 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, + 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, + 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2c, 0x20, 0x22, + 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, + 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, + 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, + 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, - 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, - 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, - 0x65, 0x64, 0x65, 0x64, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, - 0x54, 0x6f, 0x70, 0x20, 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x2e, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, - 0x74, 0x20, 0x2b, 0x20, 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, - 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, - 0x28, 0x30, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, - 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, - 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x5b, 0x75, 0x73, - 0x65, 0x72, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, - 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, - 0x3e, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, - 0x29, 0x7d, 0x3a, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, - 0x20, 0x3c, 0x24, 0x7b, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, - 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x6d, 0x73, 0x67, - 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x73, 0x74, 0x6f, 0x70, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, + 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x5d, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x2e, 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, + 0x31, 0x33, 0x20, 0x26, 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x75, 0x62, 0x6d, 0x69, 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, - 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x66, 0x6c, - 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, - 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, - 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, - 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, - 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, - 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, - 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, - 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, - 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, + 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, + 0x69, 0x74, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x20, 0x6f, + 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, + 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, + 0x7d, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x20, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, + 0x61, 0x79, 0x20, 0x73, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, + 0x2e, 0x2e, 0x2e, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, + 0x67, 0x68, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, + 0x69, 0x74, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x20, 0x3e, 0x53, + 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x7d, 0x3e, + 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, + 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, + 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, + 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x20, + 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x28, 0x69, + 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x45, 0x6c, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, + 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, + 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, 0x2b, 0x20, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, 0x33, 0x30, 0x30, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, + 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, + 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, + 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, 0x20, + 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, - 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, - 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, - 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, + 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, + 0x6d, 0x73, 0x67, 0x7d, 0x3e, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, + 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, 0x3c, 0x2f, 0x73, 0x74, 0x72, + 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x3c, 0x24, 0x7b, 0x4d, 0x61, 0x72, 0x6b, + 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x28, 0x6d, 0x73, 0x67, 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, + 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, + 0x65, 0x72, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, + 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, + 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, + 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, + 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, + 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, + 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, + 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, + 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, + 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, + 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, + 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, + 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, + 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, + 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, + 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x2e, 0x2e, 0x2e, 0x61, + 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, + 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x7d, 0x29, 0x2c, 0x20, 0x7b, + 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, + 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, + 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, + 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, + 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, + 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, + 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, + 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, + 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, + 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, + 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, + 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, + 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, + 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, + 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, + 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, + 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, + 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, - 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, - 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, - 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, - 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, - 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, - 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, - 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, - 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, - 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, - 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, - 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, - 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, - 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, - 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, + 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, + 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, - 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, - 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, - 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, - 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x74, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, - 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x7d, 0x29, 0x7d, 0x0a, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, + 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, - 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, - 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, - 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, - 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, - 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, - 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, - 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x7d, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, + 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, + 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, 0x66, 0x20, 0x6f, + 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, + 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, 0x2d, + 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, 0x6f, + 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, + 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, + 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, + 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, + 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, + 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, + 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, - 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, - 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, - 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, - 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, - 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, - 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, - 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, - 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, - 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, + 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x35, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, + 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, + 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, + 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, + 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, 0x4e, + 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, + 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x5f, 0x6e, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, + 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, 0x61, 0x6d, 0x70, + 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, + 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, + 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, - 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, - 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, - 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, - 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, - 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, - 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, - 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, - 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, - 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, + 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x5f, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, - 0x65, 0x74, 0x61, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, - 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, - 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, - 0x2f, 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, - 0x2c, 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, - 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, - 0x23, 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, - 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, - 0x31, 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, - 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, - 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, - 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, - 0x2f, 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, - 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, - 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, - 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, - 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, - 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, - 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, - 0x2f, 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, - 0x73, 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, - 0x2c, 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, - 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, - 0x2f, 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, - 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, - 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, - 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, - 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, - 0x20, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, - 0x7d, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x21, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, - 0x65, 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, - 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, - 0x20, 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, - 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, - 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, - 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, - 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, - 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, - 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, - 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, - 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, - 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, - 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, - 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, + 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, + 0x70, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, 0x6f, 0x72, 0x65, + 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, 0x2f, 0x73, 0x75, + 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, + 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, + 0x5f, 0x7a, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, + 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, + 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, + 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x70, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, + 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, + 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, + 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, + 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, + 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, + 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, + 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, + 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, + 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, + 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, + 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, + 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x31, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, + 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, + 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, + 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, + 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, + 0x75, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x22, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, + 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, - 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, - 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, - 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, - 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, - 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, + 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, + 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, + 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, + 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, + 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, + 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, + 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, + 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, + 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, + 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, + 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, + 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, + 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, + 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, + 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, + 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, + 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, + 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, + 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, + 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, + 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, + 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, + 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, + 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, + 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, + 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, + 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, + 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, + 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, + 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, + 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, + 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, + 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, + 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, + 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, + 0x7b, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, + 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, + 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, + 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, - 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, - 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, - 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, - 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x62, 0x6f, 0x64, 0x79, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, - 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, - 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, - 0x74, 0x6d, 0x6c, 0x3e, 0x0a + 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, + 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, + 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, + 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, + 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, + 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x62, 0x6f, 0x64, 0x79, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, + 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, + 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a }; -unsigned int index_html_len = 13781; +unsigned int index_html_len = 15084; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index a3b5be6d8..c9dc078b7 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -4,1555 +4,1578 @@ unsigned char index_js[] = { 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6f, 0x3e, 0x31, 0x29, 0x7b, 0x6f, 0x2d, 0x2d, 0x3b, + 0x69, 0x66, 0x28, 0x75, 0x3e, 0x31, 0x29, 0x7b, 0x75, 0x2d, 0x2d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x5f, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x77, 0x68, 0x69, + 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x2e, 0x6f, 0x3b, 0x69, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, - 0x28, 0x21, 0x28, 0x38, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x26, 0x26, 0x63, + 0x28, 0x21, 0x28, 0x38, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x26, 0x26, 0x61, 0x28, 0x69, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, 0x3d, - 0x21, 0x30, 0x7d, 0x7d, 0x69, 0x3d, 0x5f, 0x7d, 0x7d, 0x72, 0x3d, 0x30, - 0x3b, 0x6f, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, 0x68, + 0x21, 0x30, 0x7d, 0x7d, 0x69, 0x3d, 0x5f, 0x7d, 0x7d, 0x66, 0x3d, 0x30, + 0x3b, 0x75, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6f, + 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x75, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, - 0x29, 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, + 0x29, 0x3b, 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x6c, 0x65, - 0x74, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3d, 0x30, 0x2c, 0x72, 0x3d, - 0x30, 0x2c, 0x75, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6c, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, - 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, - 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, - 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, - 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, - 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, - 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, - 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, - 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, - 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x66, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, - 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x2e, 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, + 0x74, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x28, 0x74, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6f, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, + 0x6c, 0x6c, 0x79, 0x7b, 0x6f, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, + 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x75, 0x3d, 0x30, 0x2c, 0x66, 0x3d, 0x30, + 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x73, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x69, + 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, 0x74, + 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, 0x69, + 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, + 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, + 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, + 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, + 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, 0x2e, + 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, 0x2e, + 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, 0x73, + 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, - 0x2e, 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x3d, 0x65, 0x7d, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, - 0x62, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x62, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x69, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, - 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, - 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x69, 0x7d, 0x7d, 0x29, 0x29, 0x7d, - 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x66, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, - 0x53, 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x66, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, + 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, + 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, + 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, + 0x65, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, + 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x69, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, + 0x2d, 0x33, 0x33, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, + 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x69, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, + 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, + 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, - 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x79, 0x28, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, - 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x3d, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, - 0x74, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x7d, 0x2c, 0x73, 0x65, 0x74, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, - 0x20, 0x70, 0x29, 0x21, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, - 0x75, 0x74, 0x65, 0x64, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, - 0x68, 0x61, 0x76, 0x65, 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, - 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x65, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x75, 0x2b, 0x2b, - 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, - 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, - 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, - 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x66, 0x28, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, - 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, - 0x66, 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x69, 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, - 0x7c, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, - 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, - 0x3d, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, - 0x3d, 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, - 0x2e, 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, + 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, + 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, + 0x79, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, + 0x65, 0x74, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, + 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, + 0x2c, 0x73, 0x65, 0x74, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, + 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, + 0x76, 0x29, 0x21, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, + 0x74, 0x65, 0x64, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, + 0x61, 0x76, 0x65, 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, + 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x65, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, + 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, + 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, + 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, + 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, - 0x2e, 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, - 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, - 0x53, 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, - 0x3d, 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, - 0x2e, 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, - 0x65, 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x65, 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, - 0x74, 0x29, 0x7b, 0x66, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x75, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x66, - 0x29, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x33, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, - 0x66, 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x35, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, - 0x3d, 0x3d, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x75, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x63, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x68, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, - 0x74, 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x31, 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, - 0x69, 0x3d, 0x74, 0x3b, 0x61, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, - 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, - 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, - 0x7d, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x74, 0x29, 0x7d, 0x3b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x7b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, - 0x33, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, - 0x74, 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, - 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, - 0x3d, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, - 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, - 0x65, 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x68, 0x28, 0x29, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, - 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, - 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x64, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, - 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, - 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x7b, 0x6f, - 0x2b, 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, - 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, - 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, - 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x79, 0x28, 0x74, 0x29, 0x3b, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, - 0x6c, 0x79, 0x7b, 0x69, 0x3d, 0x5f, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, - 0x53, 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x76, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x69, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, - 0x72, 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, - 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x22, 0x29, 0x3b, 0x61, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, - 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x32, 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x79, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6e, 0x28, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, - 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x3d, 0x33, 0x32, 0x7d, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, - 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, + 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, + 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, + 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, + 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, + 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, + 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, + 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, + 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, + 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x64, 0x28, 0x74, + 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, + 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, + 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, + 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, + 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, + 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, + 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, + 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, + 0x29, 0x7b, 0x63, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x29, + 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, + 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, + 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, + 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, + 0x3d, 0x6c, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x61, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, + 0x74, 0x72, 0x79, 0x7b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, + 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, + 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, + 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, + 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, + 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, + 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, + 0x3d, 0x74, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, + 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, + 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, + 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, + 0x74, 0x29, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x29, 0x7b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, + 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x76, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, + 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, + 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x76, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, + 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, + 0x28, 0x29, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, + 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x76, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, + 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, + 0x65, 0x77, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, + 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x7b, 0x75, 0x2b, + 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, + 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, + 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, + 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, + 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x67, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, + 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, + 0x79, 0x7b, 0x69, 0x3d, 0x5f, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, 0x28, 0x74, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, + 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, + 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x6d, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x69, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, + 0x29, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, + 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, + 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6e, 0x28, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6b, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x3d, 0x33, 0x32, 0x7d, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, + 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, + 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, + 0x7d, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x26, 0x3d, 0x2d, 0x39, 0x3b, 0x6d, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x3b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x75, 0x2b, 0x2b, + 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, + 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x62, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x6e, 0x29, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x6f, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, + 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, + 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x29, 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, + 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, + 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, + 0x65, 0x77, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, + 0x6e, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, + 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, + 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, + 0x76, 0x61, 0x72, 0x20, 0x78, 0x2c, 0x77, 0x2c, 0x43, 0x2c, 0x45, 0x2c, + 0x55, 0x2c, 0x48, 0x2c, 0x4e, 0x2c, 0x50, 0x2c, 0x24, 0x2c, 0x44, 0x3d, + 0x7b, 0x7d, 0x2c, 0x54, 0x3d, 0x5b, 0x5d, 0x2c, 0x56, 0x3d, 0x2f, 0x61, + 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, + 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, + 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, + 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, + 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, + 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x41, 0x3d, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, + 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, + 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, + 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x57, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, + 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, + 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, + 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, + 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, + 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, + 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, + 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, + 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, + 0x69, 0x2c, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, + 0x72, 0x65, 0x66, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, + 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, + 0x68, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x5f, 0x3f, 0x2b, 0x2b, 0x43, 0x3a, 0x5f, 0x7d, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x5f, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x77, 0x2e, 0x76, 0x6e, 0x6f, + 0x64, 0x65, 0x26, 0x26, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, + 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4c, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, 0x6c, + 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x52, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x6a, 0x28, + 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, 0x5f, + 0x6b, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, + 0x2b, 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, + 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, + 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, + 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, - 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, - 0x29, 0x7d, 0x7d, 0x3b, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x26, 0x3d, 0x2d, 0x39, 0x3b, 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x3b, 0x68, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6f, 0x2b, - 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, - 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x6d, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x6e, 0x29, 0x7d, 0x3b, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, - 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x6f, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x7d, 0x7d, 0x3b, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x38, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x29, 0x29, 0x79, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, - 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, - 0x6e, 0x65, 0x77, 0x20, 0x67, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, - 0x7b, 0x6e, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, - 0x7d, 0x76, 0x61, 0x72, 0x20, 0x6b, 0x2c, 0x53, 0x2c, 0x78, 0x2c, 0x77, - 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x4e, 0x2c, 0x50, - 0x3d, 0x7b, 0x7d, 0x2c, 0x44, 0x3d, 0x5b, 0x5d, 0x2c, 0x24, 0x3d, 0x2f, - 0x61, 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, - 0x67, 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, - 0x7c, 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, - 0x63, 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, - 0x5d, 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x54, 0x3d, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, - 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, - 0x5b, 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x28, - 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, - 0x26, 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, - 0x6c, 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x46, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, - 0x7b, 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, - 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, - 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, - 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, - 0x6f, 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, - 0x33, 0x3f, 0x6b, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, - 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, - 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x72, - 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, - 0x2c, 0x72, 0x65, 0x66, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x5f, 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, - 0x5f, 0x68, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x5f, 0x3f, 0x2b, 0x2b, 0x78, 0x3a, 0x5f, 0x7d, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x5f, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x53, 0x2e, 0x76, 0x6e, - 0x6f, 0x64, 0x65, 0x26, 0x26, 0x53, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, - 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4f, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x52, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, - 0x29, 0x2b, 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, + 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x6a, 0x28, + 0x74, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, + 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x52, - 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x63, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, - 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, - 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, - 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, - 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x43, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x71, 0x2e, 0x5f, - 0x5f, 0x72, 0x2b, 0x2b, 0x7c, 0x7c, 0x45, 0x21, 0x3d, 0x3d, 0x53, 0x2e, - 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x45, 0x3d, - 0x53, 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x55, 0x29, - 0x28, 0x71, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x71, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x43, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, - 0x48, 0x29, 0x3b, 0x74, 0x3d, 0x43, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, - 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, - 0x6e, 0x3d, 0x43, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x69, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x72, 0x3d, 0x28, 0x6f, 0x3d, 0x28, 0x65, - 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, - 0x2c, 0x28, 0x75, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, 0x26, - 0x28, 0x69, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x56, 0x28, 0x7b, - 0x7d, 0x2c, 0x6f, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x6f, 0x2e, - 0x5f, 0x5f, 0x76, 0x2b, 0x31, 0x2c, 0x6e, 0x74, 0x28, 0x75, 0x2c, 0x6f, - 0x2c, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x75, 0x2e, 0x6f, 0x77, 0x6e, 0x65, - 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x5f, 0x5f, 0x68, 0x3f, - 0x5b, 0x72, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x72, 0x3f, 0x52, 0x28, 0x6f, 0x29, 0x3a, - 0x72, 0x2c, 0x6f, 0x2e, 0x5f, 0x5f, 0x68, 0x29, 0x2c, 0x65, 0x74, 0x28, - 0x69, 0x2c, 0x6f, 0x29, 0x2c, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, - 0x72, 0x26, 0x26, 0x49, 0x28, 0x6f, 0x29, 0x29, 0x2c, 0x43, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x73, - 0x6f, 0x72, 0x74, 0x28, 0x48, 0x29, 0x29, 0x3b, 0x71, 0x2e, 0x5f, 0x5f, - 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x42, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, 0x66, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x73, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, - 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x3d, 0x69, 0x26, 0x26, 0x69, - 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x44, 0x2c, 0x6d, 0x3d, 0x79, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x73, 0x3d, 0x30, 0x3b, - 0x73, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, - 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x28, 0x61, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x73, 0x5d, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x61, 0x3d, 0x6e, 0x5b, 0x73, + 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x29, 0x7b, 0x28, + 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x55, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x47, 0x2e, 0x5f, 0x5f, + 0x72, 0x2b, 0x2b, 0x7c, 0x7c, 0x48, 0x21, 0x3d, 0x3d, 0x77, 0x2e, 0x64, + 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x48, 0x3d, 0x77, + 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, + 0x47, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x47, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, + 0x28, 0x50, 0x29, 0x3b, 0x74, 0x3d, 0x55, 0x2e, 0x73, 0x68, 0x69, 0x66, + 0x74, 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, + 0x28, 0x6e, 0x3d, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, + 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x2c, 0x75, 0x3d, 0x28, 0x72, 0x3d, 0x28, 0x65, 0x3d, 0x74, + 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x28, + 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, 0x26, 0x28, 0x69, + 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x6f, 0x3d, + 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x72, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x72, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, 0x31, 0x2c, 0x69, 0x74, 0x28, + 0x66, 0x2c, 0x72, 0x2c, 0x6f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x66, 0x2e, 0x6f, + 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, + 0x6e, 0x74, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x72, 0x2e, 0x5f, + 0x5f, 0x68, 0x3f, 0x5b, 0x75, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x75, 0x3f, 0x6a, 0x28, + 0x72, 0x29, 0x3a, 0x75, 0x2c, 0x72, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x5f, + 0x29, 0x2c, 0x5f, 0x74, 0x28, 0x69, 0x2c, 0x72, 0x2c, 0x5f, 0x29, 0x2c, + 0x72, 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x75, 0x26, 0x26, 0x42, 0x28, + 0x72, 0x29, 0x29, 0x2c, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3e, 0x6e, 0x26, 0x26, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x50, + 0x29, 0x29, 0x3b, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, + 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, + 0x2c, 0x79, 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x3d, 0x30, + 0x2c, 0x53, 0x3d, 0x69, 0x26, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, + 0x7c, 0x54, 0x2c, 0x78, 0x3d, 0x53, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x2c, 0x77, 0x3d, 0x78, 0x2c, 0x43, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, + 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, + 0x43, 0x3b, 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x28, 0x70, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x3d, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x70, 0x3d, 0x6e, 0x5b, 0x63, 0x5d, 0x29, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x3f, 0x6e, 0x75, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x22, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, + 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x61, 0x3f, 0x4d, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, 0x3a, - 0x54, 0x28, 0x61, 0x29, 0x3f, 0x4d, 0x28, 0x4f, 0x2c, 0x7b, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x61, 0x7d, 0x2c, 0x6e, 0x75, + 0x70, 0x3f, 0x4f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, 0x3a, + 0x41, 0x28, 0x70, 0x29, 0x3f, 0x4f, 0x28, 0x52, 0x2c, 0x7b, 0x63, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x70, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x3a, 0x61, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4d, 0x28, - 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x61, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2c, 0x61, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x61, 0x2e, 0x72, - 0x65, 0x66, 0x3f, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x61, 0x29, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x61, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2c, 0x61, - 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x28, 0x68, 0x3d, 0x79, - 0x5b, 0x73, 0x5d, 0x29, 0x7c, 0x7c, 0x68, 0x26, 0x26, 0x61, 0x2e, 0x6b, - 0x65, 0x79, 0x3d, 0x3d, 0x68, 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x61, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x68, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x29, 0x79, 0x5b, 0x73, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x66, 0x6f, 0x72, 0x28, - 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x6d, 0x3b, 0x63, 0x2b, 0x2b, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x28, 0x68, 0x3d, 0x79, 0x5b, 0x63, 0x5d, 0x29, - 0x26, 0x26, 0x61, 0x2e, 0x6b, 0x65, 0x79, 0x3d, 0x3d, 0x68, 0x2e, 0x6b, - 0x65, 0x79, 0x26, 0x26, 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, - 0x3d, 0x68, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x79, 0x5b, 0x63, - 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x7d, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x6e, 0x74, - 0x28, 0x74, 0x2c, 0x61, 0x2c, 0x68, 0x3d, 0x68, 0x7c, 0x7c, 0x50, 0x2c, - 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, 0x66, 0x29, - 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x28, 0x63, 0x3d, - 0x61, 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x68, 0x2e, 0x72, 0x65, - 0x66, 0x21, 0x3d, 0x63, 0x26, 0x26, 0x28, 0x76, 0x7c, 0x7c, 0x28, 0x76, - 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, - 0x76, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, 0x2c, 0x76, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x63, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, - 0x7c, 0x70, 0x2c, 0x61, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x70, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, - 0x26, 0x28, 0x64, 0x3d, 0x70, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x61, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, - 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x6c, 0x3d, 0x47, 0x28, 0x61, 0x2c, - 0x6c, 0x2c, 0x74, 0x29, 0x3a, 0x6c, 0x3d, 0x4a, 0x28, 0x74, 0x2c, 0x61, - 0x2c, 0x68, 0x2c, 0x79, 0x2c, 0x70, 0x2c, 0x6c, 0x29, 0x2c, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x6c, 0x29, 0x29, 0x3a, - 0x6c, 0x26, 0x26, 0x68, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x6c, 0x26, - 0x26, 0x6c, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x28, 0x6c, 0x3d, 0x52, 0x28, 0x68, - 0x29, 0x29, 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x64, 0x2c, 0x73, 0x3d, 0x6d, 0x3b, 0x73, 0x2d, 0x2d, 0x3b, 0x29, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x26, 0x26, - 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x79, 0x5b, - 0x73, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x79, 0x5b, 0x73, 0x5d, - 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x4b, 0x28, 0x69, 0x29, - 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, - 0x29, 0x2c, 0x6f, 0x74, 0x28, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x79, 0x5b, - 0x73, 0x5d, 0x29, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x76, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x5f, 0x74, 0x28, - 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x76, 0x5b, 0x2b, 0x2b, 0x73, 0x5d, 0x2c, - 0x76, 0x5b, 0x2b, 0x2b, 0x73, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, - 0x5f, 0x26, 0x26, 0x6f, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x28, 0x69, 0x3d, 0x5f, 0x5b, 0x6f, - 0x5d, 0x29, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, - 0x6e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x3f, 0x47, 0x28, 0x69, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x3a, 0x4a, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x69, - 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x29, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x7a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, - 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, 0x28, 0x54, 0x28, 0x74, 0x29, - 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x7a, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x64, 0x29, - 0x72, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x64, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x7c, 0x7c, 0x5f, 0x21, 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x5f, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, - 0x64, 0x65, 0x29, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, - 0x61, 0x70, 0x70, 0x65, 0x6e, 0x64, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, - 0x5f, 0x29, 0x2c, 0x72, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x75, 0x3d, 0x6f, 0x2c, 0x6c, - 0x3d, 0x30, 0x3b, 0x28, 0x75, 0x3d, 0x75, 0x2e, 0x6e, 0x65, 0x78, 0x74, - 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x6c, 0x3c, - 0x69, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6c, 0x2b, 0x3d, - 0x31, 0x29, 0x69, 0x66, 0x28, 0x75, 0x3d, 0x3d, 0x5f, 0x29, 0x62, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x74, 0x3b, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, - 0x72, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x5f, 0x2c, 0x6f, - 0x29, 0x2c, 0x72, 0x3d, 0x6f, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, - 0x72, 0x3a, 0x5f, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, - 0x69, 0x6e, 0x67, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4b, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7c, 0x7c, 0x22, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x2d, 0x31, 0x3b, 0x6e, 0x3e, 0x3d, 0x30, 0x3b, 0x6e, 0x2d, 0x2d, - 0x29, 0x69, 0x66, 0x28, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, - 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x28, 0x69, 0x3d, 0x4b, 0x28, 0x65, - 0x29, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x6f, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, - 0x20, 0x65, 0x29, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, - 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, - 0x7c, 0x7c, 0x59, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x5f, 0x26, 0x26, 0x22, + 0x29, 0x3a, 0x70, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, + 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x70, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2c, 0x70, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x70, 0x2e, 0x72, + 0x65, 0x66, 0x3f, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x70, 0x29, 0x26, + 0x26, 0x28, 0x70, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2c, 0x70, 0x2e, 0x5f, + 0x5f, 0x62, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x2d, + 0x31, 0x3d, 0x3d, 0x3d, 0x28, 0x6d, 0x3d, 0x58, 0x28, 0x70, 0x2c, 0x53, + 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x77, 0x29, 0x29, 0x3f, 0x61, + 0x3d, 0x44, 0x3a, 0x28, 0x61, 0x3d, 0x53, 0x5b, 0x6d, 0x5d, 0x7c, 0x7c, + 0x44, 0x2c, 0x53, 0x5b, 0x6d, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x77, 0x2d, 0x2d, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x74, 0x2c, + 0x70, 0x2c, 0x61, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x2c, 0x64, 0x3d, 0x70, 0x2e, 0x5f, + 0x5f, 0x65, 0x2c, 0x28, 0x68, 0x3d, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x29, + 0x26, 0x26, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x21, 0x3d, 0x68, 0x26, 0x26, + 0x28, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x61, + 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, + 0x2c, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x68, 0x2c, 0x70, 0x2e, + 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x64, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x64, 0x26, 0x26, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x76, 0x26, 0x26, 0x28, 0x76, 0x3d, 0x64, 0x29, 0x2c, + 0x62, 0x3d, 0x21, 0x28, 0x67, 0x3d, 0x61, 0x3d, 0x3d, 0x3d, 0x44, 0x7c, + 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, + 0x76, 0x29, 0x26, 0x26, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x2c, 0x67, 0x3f, + 0x2d, 0x31, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x6b, 0x2d, 0x2d, 0x3a, 0x6d, + 0x21, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x28, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, + 0x2b, 0x31, 0x3f, 0x28, 0x6b, 0x2b, 0x2b, 0x2c, 0x62, 0x3d, 0x21, 0x30, + 0x29, 0x3a, 0x6d, 0x3e, 0x79, 0x3f, 0x77, 0x3e, 0x43, 0x2d, 0x79, 0x3f, + 0x28, 0x6b, 0x2b, 0x3d, 0x6d, 0x2d, 0x79, 0x2c, 0x62, 0x3d, 0x21, 0x30, + 0x29, 0x3a, 0x6b, 0x2d, 0x2d, 0x3a, 0x6b, 0x3d, 0x6d, 0x3c, 0x79, 0x26, + 0x26, 0x6d, 0x3d, 0x3d, 0x79, 0x2d, 0x31, 0x3f, 0x6d, 0x2d, 0x79, 0x3a, + 0x30, 0x29, 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x62, 0x3d, 0x62, + 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x63, 0x26, 0x26, 0x21, 0x67, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, - 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, - 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, - 0x6f, 0x7c, 0x7c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, - 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x65, 0x5b, 0x6f, 0x5d, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, 0x59, 0x28, 0x74, 0x2c, - 0x6f, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, - 0x69, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, - 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, - 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, - 0x7c, 0x7c, 0x24, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, - 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, - 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, - 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, - 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x69, 0x3d, 0x22, 0x22, 0x29, - 0x2c, 0x69, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, - 0x69, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, - 0x7c, 0x58, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, - 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x69, 0x26, 0x26, - 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x69, 0x5b, 0x6e, 0x5d, 0x7c, - 0x7c, 0x58, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, - 0x2c, 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x69, 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, - 0x5d, 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, - 0x5d, 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x43, 0x61, - 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x22, 0x29, 0x29, - 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, - 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, - 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, - 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, - 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, - 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, - 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, - 0x65, 0x3f, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, - 0x6e, 0x2c, 0x6f, 0x3f, 0x74, 0x74, 0x3a, 0x5a, 0x2c, 0x6f, 0x29, 0x3a, - 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, - 0x6f, 0x3f, 0x74, 0x74, 0x3a, 0x5a, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, - 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, - 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, - 0x28, 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, - 0x61, 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, - 0x68, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, - 0x72, 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, - 0x69, 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, - 0x6f, 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, - 0x61, 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, - 0x26, 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, - 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, - 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, - 0x6c, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, - 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, - 0x22, 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, - 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x61, 0x2e, 0x5f, + 0x5f, 0x6b, 0x21, 0x3d, 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, - 0x65, 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, - 0x5d, 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, - 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, - 0x2e, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, 0x74, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, - 0x5b, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x28, - 0x53, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x53, 0x2e, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x6c, 0x5b, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, - 0x5d, 0x28, 0x53, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x53, 0x2e, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, - 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x66, - 0x2c, 0x73, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, - 0x2c, 0x76, 0x2c, 0x79, 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, - 0x2c, 0x78, 0x2c, 0x77, 0x2c, 0x43, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x6f, 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x6c, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x68, 0x2c, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x5d, 0x29, 0x2c, 0x28, 0x66, - 0x3d, 0x53, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x66, 0x28, 0x6e, - 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x7c, 0x7c, 0x62, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x28, 0x66, 0x3d, 0x70, 0x2e, + 0x5f, 0x5f, 0x64, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x29, 0x3a, 0x66, 0x3d, 0x64, 0x2e, 0x6e, 0x65, + 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x3a, 0x66, 0x3d, + 0x51, 0x28, 0x74, 0x2c, 0x64, 0x2c, 0x66, 0x29, 0x3a, 0x66, 0x3d, 0x4a, + 0x28, 0x70, 0x2c, 0x66, 0x2c, 0x74, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, + 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x29, 0x29, 0x29, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x76, 0x2c, 0x63, + 0x3d, 0x78, 0x3b, 0x63, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x26, 0x26, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, + 0x5f, 0x65, 0x26, 0x26, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, + 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x65, 0x2e, + 0x5f, 0x5f, 0x64, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, + 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, + 0x29, 0x2c, 0x75, 0x74, 0x28, 0x53, 0x5b, 0x63, 0x5d, 0x2c, 0x53, 0x5b, + 0x63, 0x5d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x5f, 0x26, 0x26, + 0x6f, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, + 0x2b, 0x2b, 0x29, 0x28, 0x69, 0x3d, 0x5f, 0x5b, 0x6f, 0x5d, 0x29, 0x26, + 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x43, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x76, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x79, 0x3d, - 0x28, 0x66, 0x3d, 0x43, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x69, 0x5b, 0x66, 0x2e, 0x5f, - 0x5f, 0x63, 0x5d, 0x2c, 0x6d, 0x3d, 0x66, 0x3f, 0x79, 0x3f, 0x79, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x66, 0x2e, 0x5f, 0x5f, 0x3a, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, - 0x3f, 0x64, 0x3d, 0x28, 0x73, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x73, 0x2e, - 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x43, 0x26, 0x26, 0x43, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x73, - 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x28, 0x76, 0x2c, 0x6d, 0x29, 0x3a, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x73, 0x3d, 0x6e, 0x65, 0x77, - 0x20, 0x4c, 0x28, 0x76, 0x2c, 0x6d, 0x29, 0x2c, 0x73, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x43, 0x2c, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x72, 0x74, 0x29, - 0x2c, 0x79, 0x26, 0x26, 0x79, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x73, 0x29, - 0x2c, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x76, 0x2c, 0x73, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6d, 0x2c, 0x73, 0x2e, 0x5f, - 0x5f, 0x6e, 0x3d, 0x69, 0x2c, 0x63, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x21, 0x30, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, - 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x73, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x26, 0x26, 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x73, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x73, 0x2e, 0x5f, 0x5f, - 0x73, 0x3d, 0x56, 0x28, 0x7b, 0x7d, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, - 0x29, 0x29, 0x2c, 0x56, 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x43, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x3f, 0x4a, 0x28, 0x69, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3a, 0x51, 0x28, + 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x29, 0x29, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, 0x6e, 0x7c, + 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x7c, + 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, 0x28, 0x41, + 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, 0x6e, 0x2e, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x65, 0x2e, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x21, 0x3d, 0x3d, + 0x74, 0x3f, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, 0x65, + 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, + 0x3a, 0x6e, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x6e, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, + 0x65, 0x7c, 0x7c, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, + 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6e, + 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x5f, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, 0x3d, 0x74, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, 0x31, 0x2c, + 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, + 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x66, + 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, + 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x3b, 0x69, + 0x66, 0x28, 0x69, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, + 0x3f, 0x31, 0x3a, 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, + 0x3e, 0x3d, 0x30, 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, + 0x30, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, + 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, + 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, 0x3b, 0x72, 0x2d, + 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, + 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, + 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, + 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, + 0x75, 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, + 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, + 0x69, 0x6e, 0x20, 0x65, 0x29, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, + 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x20, 0x69, 0x6e, + 0x20, 0x6e, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x5f, + 0x26, 0x26, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x5b, 0x6f, + 0x5d, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, + 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x65, 0x64, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x65, 0x5b, + 0x6f, 0x5d, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, 0x74, + 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x2c, 0x65, + 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, + 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, + 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, + 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, + 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x56, 0x2e, 0x74, 0x65, 0x73, 0x74, + 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, + 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, + 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, + 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, + 0x69, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x69, 0x29, 0x66, 0x6f, 0x72, 0x28, + 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x69, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, + 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, + 0x65, 0x29, 0x69, 0x26, 0x26, 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, + 0x69, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, + 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, + 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, + 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, + 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, + 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, + 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, + 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, + 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, + 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, + 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x69, 0x7c, 0x7c, 0x74, 0x2e, + 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, + 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, + 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, + 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, 0x6e, + 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, + 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, + 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, + 0x4c, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x5f, + 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, 0x48, 0x7c, 0x3a, 0x68, + 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, + 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x22, 0x21, 0x3d, 0x3d, + 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, 0x65, 0x66, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, 0x6d, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, 0x62, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x64, 0x6f, + 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, + 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, + 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, 0x53, 0x70, 0x61, 0x6e, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, + 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x3b, + 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x63, 0x61, 0x74, 0x63, + 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x22, 0x2d, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, 0x3f, 0x74, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, + 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x28, 0x77, 0x2e, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x77, 0x2e, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x73, 0x2c, 0x63, + 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, + 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x78, + 0x2c, 0x43, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x68, 0x26, 0x26, 0x28, 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, + 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x5d, 0x29, 0x2c, 0x28, 0x73, 0x3d, 0x77, + 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x73, 0x28, 0x6e, 0x29, 0x3b, + 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x45, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x79, 0x3d, + 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, 0x28, 0x73, + 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, + 0x70, 0x65, 0x29, 0x26, 0x26, 0x69, 0x5b, 0x73, 0x2e, 0x5f, 0x5f, 0x63, + 0x5d, 0x2c, 0x67, 0x3d, 0x73, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x73, 0x2e, + 0x5f, 0x5f, 0x3a, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x3f, 0x76, + 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x65, 0x2e, + 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, + 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, + 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, 0x28, 0x6e, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x49, + 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, 0x63, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, 0x2c, 0x6d, + 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, 0x2c, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x6e, + 0x3d, 0x69, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, + 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x28, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, 0x67, 0x65, + 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, + 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, + 0x2c, 0x46, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, + 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, 0x2c, 0x61, + 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x28, 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, - 0x2c, 0x68, 0x3d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, - 0x3d, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x73, 0x2e, 0x5f, - 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x63, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x43, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, - 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x73, 0x2e, 0x63, + 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x43, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, - 0x26, 0x76, 0x21, 0x3d, 0x3d, 0x68, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x73, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, - 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x76, - 0x2c, 0x6d, 0x29, 0x2c, 0x21, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, - 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, - 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, - 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6d, 0x29, 0x7c, 0x7c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x76, 0x2c, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x73, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x73, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x26, 0x26, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x2c, - 0x67, 0x3d, 0x30, 0x3b, 0x67, 0x3c, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x67, 0x2b, 0x2b, 0x29, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x2e, - 0x5f, 0x73, 0x62, 0x5b, 0x67, 0x5d, 0x29, 0x3b, 0x73, 0x2e, 0x5f, 0x73, - 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x73, 0x29, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, - 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x28, 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, - 0x6d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x73, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x28, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x29, 0x7d, 0x29, 0x29, - 0x7d, 0x69, 0x66, 0x28, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x3d, 0x6d, 0x2c, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, - 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x74, 0x2c, 0x62, 0x3d, - 0x53, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x6b, 0x3d, 0x30, 0x2c, 0x22, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, - 0x43, 0x26, 0x26, 0x43, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x73, - 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, - 0x31, 0x2c, 0x62, 0x26, 0x26, 0x62, 0x28, 0x6e, 0x29, 0x2c, 0x66, 0x3d, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x2c, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, - 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x2e, - 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x73, 0x2e, 0x5f, 0x73, - 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, 0x6f, - 0x7b, 0x73, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x62, 0x26, - 0x26, 0x62, 0x28, 0x6e, 0x29, 0x2c, 0x66, 0x3d, 0x73, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x73, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x2b, - 0x2b, 0x6b, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x73, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x56, 0x28, 0x56, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, 0x2c, 0x73, - 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x63, 0x7c, 0x7c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x53, - 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, - 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x70, 0x3d, - 0x73, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, - 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x68, 0x2c, 0x61, 0x29, 0x29, 0x2c, 0x42, 0x28, 0x74, 0x2c, - 0x54, 0x28, 0x77, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, 0x26, - 0x26, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x4f, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, - 0x3f, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x66, 0x29, 0x3f, 0x77, 0x3a, 0x5b, - 0x77, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, - 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x29, 0x2c, 0x73, 0x2e, 0x62, 0x61, - 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, + 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, + 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x79, + 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, + 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, 0x2c, 0x67, + 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, + 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x63, + 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, 0x7c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, + 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, + 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, 0x7d, + 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, 0x2e, 0x5f, + 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x62, 0x2b, + 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, 0x3b, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x29, 0x2c, 0x64, 0x26, 0x26, 0x28, - 0x73, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, - 0x31, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x69, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6c, - 0x29, 0x3b, 0x28, 0x66, 0x3d, 0x53, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, - 0x64, 0x29, 0x26, 0x26, 0x66, 0x28, 0x6e, 0x29, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x28, 0x6c, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x21, 0x21, - 0x6c, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, - 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, - 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x63, - 0x26, 0x26, 0x53, 0x2e, 0x5f, 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, - 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, - 0x7b, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x63, 0x3d, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x68, 0x3d, 0x6e, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, 0x3d, 0x6e, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x2c, 0x70, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, - 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x28, 0x5f, 0x3d, - 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, - 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x70, 0x3c, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3b, 0x70, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x28, - 0x6c, 0x3d, 0x6f, 0x5b, 0x70, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x73, 0x65, - 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22, 0x69, - 0x6e, 0x20, 0x6c, 0x3d, 0x3d, 0x21, 0x21, 0x61, 0x26, 0x26, 0x28, 0x61, - 0x3f, 0x6c, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x61, 0x6d, 0x65, - 0x3d, 0x3d, 0x3d, 0x61, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, 0x6e, - 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x7b, 0x74, 0x3d, - 0x6c, 0x2c, 0x6f, 0x5b, 0x70, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x3d, 0x61, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, 0x6f, 0x64, 0x65, - 0x28, 0x68, 0x29, 0x3b, 0x74, 0x3d, 0x5f, 0x3f, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, 0x22, 0x68, 0x74, - 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, 0x2e, - 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, 0x73, 0x76, 0x67, - 0x22, 0x2c, 0x61, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x28, 0x61, 0x2c, 0x68, 0x2e, 0x69, 0x73, 0x26, 0x26, - 0x68, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x3d, - 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x3d, 0x61, 0x29, 0x63, 0x3d, 0x3d, 0x3d, 0x68, 0x7c, 0x7c, 0x75, 0x26, - 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, 0x3d, 0x68, 0x7c, - 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x68, 0x29, 0x3b, - 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, 0x3d, 0x6f, 0x26, - 0x26, 0x6b, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x2c, 0x66, 0x3d, - 0x28, 0x63, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, 0x7c, - 0x50, 0x29, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, - 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, - 0x4d, 0x4c, 0x2c, 0x73, 0x3d, 0x68, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, - 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, - 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x2c, 0x21, 0x75, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x63, 0x3d, 0x7b, 0x7d, 0x2c, 0x70, 0x3d, 0x30, 0x3b, 0x70, - 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x70, 0x2b, 0x2b, - 0x29, 0x63, 0x5b, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x73, 0x5b, 0x70, 0x5d, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, - 0x3d, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x73, 0x5b, 0x70, 0x5d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x28, - 0x73, 0x7c, 0x7c, 0x66, 0x29, 0x26, 0x26, 0x28, 0x73, 0x26, 0x26, 0x28, - 0x66, 0x26, 0x26, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, - 0x3d, 0x66, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, - 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, - 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, - 0x3d, 0x73, 0x26, 0x26, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, - 0x7c, 0x7c, 0x22, 0x22, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x51, 0x28, - 0x74, 0x2c, 0x68, 0x2c, 0x63, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, 0x73, - 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x42, 0x28, 0x74, 0x2c, 0x54, 0x28, - 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x29, 0x3f, 0x70, 0x3a, 0x5b, 0x70, - 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x26, 0x26, 0x22, - 0x66, 0x6f, 0x72, 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x61, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, - 0x3f, 0x6f, 0x5b, 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, - 0x26, 0x52, 0x28, 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x70, - 0x3d, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x70, 0x2d, - 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x70, - 0x5d, 0x26, 0x26, 0x41, 0x28, 0x6f, 0x5b, 0x70, 0x5d, 0x29, 0x3b, 0x75, - 0x7c, 0x7c, 0x28, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x69, 0x6e, - 0x20, 0x68, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x28, 0x70, 0x3d, 0x68, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, - 0x26, 0x26, 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7c, 0x7c, 0x22, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, - 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x61, - 0x26, 0x26, 0x70, 0x21, 0x3d, 0x3d, 0x63, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x26, 0x26, 0x59, 0x28, 0x74, 0x2c, 0x22, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x22, 0x2c, 0x70, 0x2c, 0x63, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x22, 0x69, 0x6e, 0x20, 0x68, 0x26, 0x26, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x70, 0x3d, 0x68, 0x2e, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x29, 0x26, 0x26, 0x70, 0x21, 0x3d, - 0x3d, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x26, 0x26, - 0x59, 0x28, 0x74, 0x2c, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x22, 0x2c, 0x70, 0x2c, 0x63, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, - 0x64, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, - 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, - 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, - 0x29, 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, - 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x69, 0x2c, 0x5f, 0x3b, 0x69, 0x66, 0x28, 0x53, 0x2e, 0x75, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x53, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x69, 0x3d, 0x74, - 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x69, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x7c, 0x7c, 0x5f, 0x74, 0x28, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, - 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, - 0x69, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x50, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x69, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x5f, 0x3d, - 0x30, 0x3b, 0x5f, 0x3c, 0x69, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x69, 0x5b, 0x5f, 0x5d, 0x26, 0x26, 0x6f, - 0x74, 0x28, 0x69, 0x5b, 0x5f, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x41, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, - 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3b, 0x53, 0x2e, 0x5f, - 0x5f, 0x26, 0x26, 0x53, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x2c, 0x5f, 0x3d, 0x28, 0x69, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, - 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x74, 0x28, 0x6e, 0x2c, 0x74, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, 0x65, 0x61, + 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, + 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x29, + 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x74, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x3d, + 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, 0x22, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, + 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, + 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, + 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, + 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, 0x63, + 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, + 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x63, 0x2e, 0x5f, 0x73, + 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, 0x6f, + 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, + 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, 0x63, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, 0x68, + 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x2b, + 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, 0x69, + 0x3d, 0x46, 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, 0x2c, 0x63, + 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, 0x7c, 0x7c, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, + 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, + 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x64, 0x3d, + 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, + 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x7a, 0x28, 0x74, 0x2c, + 0x41, 0x28, 0x43, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x26, + 0x26, 0x73, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x52, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x6b, 0x65, 0x79, + 0x3f, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x73, 0x29, 0x3f, 0x43, 0x3a, 0x5b, + 0x43, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, + 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x2c, 0x63, 0x2e, + 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x2c, 0x76, 0x26, + 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, + 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x3b, 0x28, 0x73, 0x3d, + 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x73, + 0x28, 0x6e, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, + 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x28, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, + 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x75, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x21, 0x21, 0x66, 0x2c, 0x6f, 0x5b, 0x6f, + 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, + 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, + 0x69, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x69, 0x5d, 0x2c, 0x65, 0x5b, + 0x2b, 0x2b, 0x69, 0x5d, 0x29, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x26, + 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, + 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, + 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, + 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, + 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x6f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x73, 0x2c, 0x63, 0x2c, 0x68, + 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, 0x3d, 0x6e, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x6e, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2c, 0x64, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x5f, + 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, + 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x64, 0x3c, 0x6f, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, + 0x28, 0x6c, 0x3d, 0x6f, 0x5b, 0x64, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x73, + 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22, + 0x69, 0x6e, 0x20, 0x6c, 0x3d, 0x3d, 0x21, 0x21, 0x70, 0x26, 0x26, 0x28, + 0x70, 0x3f, 0x6c, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x61, 0x6d, + 0x65, 0x3d, 0x3d, 0x3d, 0x70, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, + 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x7b, 0x74, + 0x3d, 0x6c, 0x2c, 0x6f, 0x5b, 0x64, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x70, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, 0x6f, 0x64, + 0x65, 0x28, 0x61, 0x29, 0x3b, 0x74, 0x3d, 0x5f, 0x3f, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, 0x22, 0x68, + 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, + 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, 0x73, 0x76, + 0x67, 0x22, 0x2c, 0x70, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x70, 0x2c, 0x61, 0x2e, 0x69, 0x73, 0x26, + 0x26, 0x61, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, + 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x3d, 0x70, 0x29, 0x68, 0x3d, 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x75, + 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, 0x3d, 0x61, + 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x61, 0x29, + 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, 0x3d, 0x6f, + 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x2c, 0x73, + 0x3d, 0x28, 0x68, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, + 0x7c, 0x44, 0x29, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, + 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x2c, 0x63, 0x3d, 0x61, 0x2e, 0x64, 0x61, 0x6e, 0x67, + 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, + 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x2c, 0x21, 0x75, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, + 0x6f, 0x72, 0x28, 0x68, 0x3d, 0x7b, 0x7d, 0x2c, 0x64, 0x3d, 0x30, 0x3b, + 0x64, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, + 0x2b, 0x29, 0x68, 0x5b, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, + 0x75, 0x74, 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x5d, 0x3d, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x28, 0x63, 0x7c, 0x7c, 0x73, 0x29, 0x26, 0x26, 0x28, 0x63, 0x26, 0x26, + 0x28, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, + 0x3d, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, + 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, + 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, + 0x7c, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, + 0x4c, 0x3d, 0x63, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, + 0x6c, 0x7c, 0x7c, 0x22, 0x22, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x59, + 0x28, 0x74, 0x2c, 0x61, 0x2c, 0x68, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, + 0x63, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x3b, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x7a, 0x28, 0x74, 0x2c, 0x41, + 0x28, 0x64, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x29, 0x3f, 0x64, 0x3a, 0x5b, + 0x64, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x26, 0x26, + 0x22, 0x66, 0x6f, 0x72, 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x70, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, + 0x6f, 0x3f, 0x6f, 0x5b, 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, + 0x26, 0x26, 0x6a, 0x28, 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, + 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, + 0x72, 0x28, 0x64, 0x3d, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x64, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x6f, 0x5b, 0x64, 0x5d, 0x26, 0x26, 0x4d, 0x28, 0x6f, 0x5b, 0x64, 0x5d, + 0x29, 0x3b, 0x75, 0x7c, 0x7c, 0x28, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, 0x3d, 0x61, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x26, 0x26, 0x28, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7c, 0x7c, 0x22, 0x70, 0x72, 0x6f, 0x67, + 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x21, + 0x64, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, + 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x68, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, + 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x22, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, + 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, + 0x3d, 0x61, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x29, 0x26, + 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x22, 0x63, 0x68, + 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3b, 0x69, 0x66, + 0x28, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, + 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, + 0x2c, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, + 0x28, 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, + 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x69, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, + 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, + 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x69, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, + 0x69, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x69, 0x66, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, + 0x6f, 0x72, 0x28, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x69, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x69, 0x5b, + 0x5f, 0x5d, 0x26, 0x26, 0x75, 0x74, 0x28, 0x69, 0x5b, 0x5f, 0x5d, 0x2c, + 0x6e, 0x2c, 0x65, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, + 0x4d, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x6f, 0x72, 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, + 0x6f, 0x2c, 0x72, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x77, 0x2e, + 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x2c, 0x5f, 0x3d, 0x28, 0x69, + 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, + 0x75, 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, + 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, + 0x2c, 0x72, 0x3d, 0x5b, 0x5d, 0x2c, 0x69, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x69, 0x26, 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x46, 0x28, 0x4f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x5f, 0x7c, 0x7c, 0x50, 0x2c, 0x50, + 0x5f, 0x5f, 0x6b, 0x3d, 0x57, 0x28, 0x52, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x5f, 0x7c, 0x7c, 0x44, 0x2c, 0x44, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x5f, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, - 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x6b, 0x2e, + 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x5f, 0x3f, 0x5f, 0x2e, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, - 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x69, 0x29, 0x2c, 0x65, 0x74, - 0x28, 0x6f, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x75, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x56, 0x28, 0x7b, 0x7d, 0x2c, - 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, - 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, - 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, - 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, - 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, - 0x6b, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4d, - 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x69, 0x7c, - 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x5f, 0x7c, 0x7c, 0x74, 0x2e, - 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, + 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x69, 0x2c, 0x72, 0x29, 0x2c, + 0x5f, 0x74, 0x28, 0x6f, 0x2c, 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, - 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x4e, - 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, - 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x69, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, - 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x69, 0x3d, - 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, - 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, - 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x6a, 0x28, 0x74, 0x29, 0x7d, 0x29, - 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, - 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, - 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, - 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, - 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x6b, 0x3d, 0x44, 0x2e, 0x73, 0x6c, 0x69, - 0x63, 0x65, 0x2c, 0x53, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x5f, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x63, 0x29, 0x26, 0x26, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, - 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x5f, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, - 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, - 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x5f, - 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, + 0x6e, 0x29, 0x7b, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x73, 0x74, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x46, + 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, + 0x73, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, + 0x73, 0x29, 0x2c, 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, + 0x6f, 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, + 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, + 0x3a, 0x75, 0x5b, 0x6f, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, + 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, + 0x65, 0x29, 0x2c, 0x4f, 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, + 0x75, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x5f, + 0x7c, 0x7c, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, + 0x3d, 0x7b, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, + 0x43, 0x22, 0x2b, 0x24, 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, + 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, + 0x64, 0x72, 0x65, 0x6e, 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x64, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, + 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, + 0x2c, 0x28, 0x69, 0x3d, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, + 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x71, 0x28, + 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x75, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, + 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, + 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, + 0x65, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, + 0x74, 0x29, 0x2c, 0x31, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x64, 0x65, 0x72, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, + 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x54, + 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x2c, 0x77, 0x3d, 0x7b, 0x5f, 0x5f, + 0x65, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x5f, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x26, 0x26, 0x21, 0x5f, 0x2e, 0x5f, + 0x5f, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, + 0x5f, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x5f, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, - 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, - 0x68, 0x28, 0x74, 0x2c, 0x69, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, - 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x5f, - 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, - 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, - 0x78, 0x3d, 0x30, 0x2c, 0x77, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x4c, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, - 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, - 0x3d, 0x56, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, 0x28, 0x56, 0x28, 0x7b, - 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, 0x26, 0x56, 0x28, 0x65, - 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, - 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, 0x6a, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x4c, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, - 0x2c, 0x6a, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x4c, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, - 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x4f, 0x2c, 0x43, 0x3d, 0x5b, 0x5d, - 0x2c, 0x55, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x2c, 0x48, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, - 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, - 0x71, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, 0x4e, 0x3d, 0x30, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x63, 0x74, 0x2c, 0x68, 0x74, 0x2c, 0x61, 0x74, - 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x3d, 0x30, 0x2c, 0x76, 0x74, 0x3d, - 0x5b, 0x5d, 0x2c, 0x79, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x6d, 0x74, 0x3d, - 0x53, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x67, 0x74, 0x3d, 0x53, 0x2e, 0x5f, - 0x5f, 0x72, 0x2c, 0x62, 0x74, 0x3d, 0x53, 0x2e, 0x64, 0x69, 0x66, 0x66, - 0x65, 0x64, 0x2c, 0x6b, 0x74, 0x3d, 0x53, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, - 0x53, 0x74, 0x3d, 0x53, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x78, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x68, 0x26, - 0x26, 0x53, 0x2e, 0x5f, 0x5f, 0x68, 0x28, 0x68, 0x74, 0x2c, 0x74, 0x2c, - 0x64, 0x74, 0x7c, 0x7c, 0x6e, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x30, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x48, - 0x7c, 0x7c, 0x28, 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, - 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, - 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, - 0x5f, 0x5f, 0x56, 0x3a, 0x79, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, - 0x5f, 0x5b, 0x74, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x77, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x64, 0x74, 0x3d, 0x31, 0x2c, 0x43, 0x74, 0x28, 0x49, - 0x74, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, 0x2b, - 0x2b, 0x2c, 0x32, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x2e, 0x74, 0x3d, - 0x74, 0x2c, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x69, - 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, - 0x49, 0x74, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, - 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x69, 0x2e, - 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x2c, 0x65, 0x3d, 0x69, 0x2e, 0x74, 0x28, - 0x6e, 0x2c, 0x74, 0x29, 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, - 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x69, 0x2e, - 0x5f, 0x5f, 0x5b, 0x31, 0x5d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x63, - 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, - 0x29, 0x29, 0x7d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x68, - 0x74, 0x2c, 0x21, 0x68, 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, - 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x5f, - 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, - 0x5f, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, + 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x28, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, + 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x5f, + 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x5f, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x43, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x2c, 0x69, 0x7c, 0x7c, 0x7b, + 0x7d, 0x29, 0x2c, 0x72, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, + 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x5f, + 0x5f, 0x45, 0x3d, 0x5f, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, + 0x29, 0x7b, 0x74, 0x3d, 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x74, 0x7d, 0x7d, 0x2c, 0x43, 0x3d, 0x30, 0x2c, 0x45, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, - 0x29, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, + 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, + 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, + 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, + 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, + 0x26, 0x46, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, + 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, + 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, + 0x68, 0x28, 0x74, 0x29, 0x2c, 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x52, 0x2c, + 0x55, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, + 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, + 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, 0x50, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, + 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, + 0x5f, 0x62, 0x7d, 0x2c, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, + 0x24, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x61, 0x74, 0x2c, 0x70, + 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, 0x79, 0x74, 0x3d, 0x30, + 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, 0x74, 0x3d, 0x5b, 0x5d, + 0x2c, 0x62, 0x74, 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x6b, 0x74, + 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x74, 0x3d, 0x77, 0x2e, + 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x78, 0x74, 0x3d, 0x77, 0x2e, + 0x5f, 0x5f, 0x63, 0x2c, 0x77, 0x74, 0x3d, 0x77, 0x2e, 0x75, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, + 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x68, 0x28, 0x70, + 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, 0x6e, 0x29, 0x2c, 0x79, + 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x70, 0x74, + 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, 0x5f, 0x5f, 0x68, + 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, 0x67, 0x74, 0x7d, 0x29, + 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x31, 0x2c, + 0x55, 0x74, 0x28, 0x42, 0x74, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x69, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, + 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, 0x65, 0x3f, 0x65, + 0x28, 0x6e, 0x29, 0x3a, 0x42, 0x74, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x69, + 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x5b, 0x30, + 0x5d, 0x3a, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x2c, 0x65, 0x3d, + 0x69, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x3b, 0x6e, 0x21, 0x3d, + 0x3d, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x5b, + 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, 0x5d, 0x5d, 0x2c, 0x69, + 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, + 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, 0x74, 0x2e, 0x75, 0x29, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, + 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, + 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x7d, 0x29, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x72, 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x65, + 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, 0x29, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, + 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x21, 0x31, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x21, 0x30, + 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, 0x21, 0x72, 0x26, 0x26, + 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, + 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, 0x70, 0x74, 0x2e, 0x75, + 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x70, 0x74, + 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x2c, 0x72, + 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3b, + 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x6f, 0x3b, + 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, 0x69, 0x7d, 0x72, 0x26, + 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x2c, 0x70, 0x74, 0x2e, + 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x5f, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, + 0x7c, 0x7c, 0x69, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, + 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x77, 0x2e, 0x5f, 0x5f, 0x73, 0x26, + 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, + 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, + 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4e, 0x74, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, + 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, 0x29, 0x3b, 0x21, 0x77, + 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, + 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, + 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, + 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x4e, 0x74, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, + 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, + 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, + 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x44, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, + 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, + 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, + 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, + 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, + 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x38, 0x2c, + 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, + 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, 0x65, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, + 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, + 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, + 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, + 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x26, 0x26, 0x77, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, + 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, + 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, + 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x70, + 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x70, 0x74, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, + 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, + 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x69, 0x29, + 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, + 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, + 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, + 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, + 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, + 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, + 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x74, 0x28, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, + 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, + 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, + 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, + 0x68, 0x28, 0x49, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, + 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x75, + 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x77, 0x2e, 0x5f, + 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x62, + 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x77, 0x2e, + 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, 0x6b, 0x74, 0x28, 0x74, + 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, + 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, + 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, 0x64, 0x74, 0x3d, 0x3d, + 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, + 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, + 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, - 0x5d, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, - 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, - 0x5d, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, - 0x29, 0x2c, 0x21, 0x28, 0x21, 0x72, 0x26, 0x26, 0x69, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, - 0x26, 0x26, 0x28, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, - 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x68, 0x74, 0x2e, 0x73, 0x68, 0x6f, - 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x68, 0x74, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3b, 0x68, 0x74, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x2c, 0x6f, 0x3d, 0x69, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7d, 0x2c, 0x68, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, - 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x5f, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x69, 0x2e, - 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x45, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, - 0x3b, 0x21, 0x53, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x52, 0x74, 0x28, - 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, - 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, - 0x2b, 0x2b, 0x2c, 0x34, 0x29, 0x3b, 0x21, 0x53, 0x2e, 0x5f, 0x5f, 0x73, - 0x26, 0x26, 0x52, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, - 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, - 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x74, 0x3d, 0x35, 0x2c, - 0x50, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, - 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x4e, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x64, 0x74, - 0x3d, 0x36, 0x2c, 0x55, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, - 0x6e, 0x28, 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, - 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, - 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, - 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, - 0x6e, 0x63, 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, - 0x74, 0x2b, 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x52, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, - 0x29, 0x3f, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, - 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, - 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, - 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x64, 0x74, 0x3d, 0x38, 0x2c, 0x50, 0x74, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x5d, 0x2c, 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, 0x2b, - 0x2b, 0x2c, 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x65, 0x2e, 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, - 0x68, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x2e, 0x75, 0x73, 0x65, 0x44, - 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x53, - 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x78, 0x74, - 0x28, 0x63, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, - 0x77, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x68, 0x74, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, - 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x68, 0x74, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, - 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, - 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x65, 0x5b, 0x31, - 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, - 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x41, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x78, - 0x74, 0x28, 0x63, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, 0x2e, 0x5f, 0x5f, - 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, - 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, - 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, - 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x76, 0x74, - 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x4f, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, - 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x4c, 0x74, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, - 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x53, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x68, - 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6d, 0x74, 0x26, 0x26, 0x6d, - 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x67, 0x74, 0x26, 0x26, 0x67, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x63, 0x74, - 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x68, 0x74, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, - 0x6e, 0x26, 0x26, 0x28, 0x61, 0x74, 0x3d, 0x3d, 0x3d, 0x68, 0x74, 0x3f, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x68, 0x74, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, - 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, - 0x79, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, - 0x63, 0x68, 0x28, 0x4f, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x4c, 0x74, 0x29, - 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x74, - 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x68, 0x74, 0x7d, 0x2c, - 0x53, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x62, 0x74, 0x26, - 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, - 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, - 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x76, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x6e, 0x29, 0x26, 0x26, 0x70, 0x74, 0x3d, 0x3d, 0x3d, 0x53, 0x2e, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, - 0x28, 0x70, 0x74, 0x3d, 0x53, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, - 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, 0x57, 0x74, 0x29, 0x28, 0x46, 0x74, - 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x79, 0x74, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x79, 0x74, 0x7d, 0x29, - 0x29, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x68, 0x74, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, - 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, + 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, + 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, + 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x4f, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, + 0x28, 0x49, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, + 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, + 0x70, 0x74, 0x7d, 0x2c, 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, + 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x6d, 0x74, 0x2e, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, 0x26, 0x76, 0x74, 0x3d, + 0x3d, 0x3d, 0x77, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, + 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, + 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, 0x77, 0x2e, 0x72, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, 0x4c, 0x74, + 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, + 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, - 0x7c, 0x7c, 0x4c, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x73, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, - 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, - 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, - 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x73, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, - 0x2c, 0x6b, 0x74, 0x26, 0x26, 0x6b, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7d, 0x2c, 0x53, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, - 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, - 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x4f, 0x74, 0x28, - 0x74, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x6e, 0x3d, 0x74, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, - 0x48, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, - 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4d, 0x74, 0x3d, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, - 0x72, 0x61, 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x57, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x2c, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x28, 0x69, 0x29, 0x2c, 0x4d, 0x74, 0x26, 0x26, 0x63, - 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, - 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, - 0x7d, 0x2c, 0x69, 0x3d, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, - 0x75, 0x74, 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4d, 0x74, - 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, - 0x6d, 0x65, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, 0x68, 0x74, 0x3d, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, - 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, - 0x29, 0x2c, 0x68, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, - 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, - 0x3a, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6a, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x5b, 0x74, 0x5d, - 0x3d, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x53, 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, - 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x71, 0x74, 0x2c, - 0x42, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x47, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x42, 0x74, 0x29, - 0x42, 0x74, 0x28, 0x29, 0x3b, 0x42, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, - 0x2e, 0x53, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x7a, 0x74, 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, - 0x7d, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4b, - 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x74, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x50, - 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, - 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, - 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x24, 0x75, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x28, 0x28, 0x29, 0x3d, 0x3e, + 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x48, + 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x21, + 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, + 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x63, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, + 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x69, 0x6c, + 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7d, + 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x73, 0x29, 0x7b, + 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, + 0x5f, 0x5f, 0x65, 0x28, 0x73, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, + 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x78, 0x74, 0x26, 0x26, 0x78, 0x74, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x77, 0x74, 0x26, 0x26, 0x77, 0x74, 0x28, 0x74, + 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x48, + 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, + 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, + 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, 0x7d, 0x29, 0x29, 0x2c, + 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x6e, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, + 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, + 0x20, 0x4f, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, + 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x69, 0x29, 0x2c, 0x4f, + 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, + 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x69, 0x3d, 0x73, 0x65, 0x74, 0x54, + 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, + 0x29, 0x3b, 0x4f, 0x74, 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x74, 0x28, 0x74, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2c, 0x65, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, + 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, + 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, + 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, + 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, + 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x42, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, + 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x77, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x77, 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, + 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, 0x3b, 0x7a, 0x74, 0x3d, + 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, 0x28, 0x7b, 0x64, 0x61, + 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, + 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, + 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, + 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x21, + 0x45, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x29, 0x26, + 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, + 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3a, 0x74, 0x2e, 0x6e, + 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, + 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x3b, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, + 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x7d, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x7a, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x28, 0x66, + 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x7a, + 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, @@ -1560,7 +1583,7 @@ unsigned char index_js[] = { 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, 0x7d, 0x7d, 0x29, 0x3b, - 0x6a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, + 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, @@ -1570,34 +1593,34 @@ unsigned char index_js[] = { 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x65, 0x5b, 0x69, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x20, - 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x66, + 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x63, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, 0x74, 0x5b, 0x69, 0x5d, 0x3d, 0x5f, 0x3b, 0x65, 0x5b, 0x69, 0x5d, 0x3d, 0x5f, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x6a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x47, 0x74, 0x28, 0x29, 0x3b, + 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x29, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3b, 0x62, 0x28, 0x28, + 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3b, 0x53, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, 0x71, 0x74, 0x3d, 0x69, - 0x3b, 0x47, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x6a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x47, - 0x74, 0x28, 0x29, 0x3b, 0x71, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, 0x47, 0x74, 0x3d, 0x69, + 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, + 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, + 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7d, 0x29, - 0x3b, 0x6a, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x47, 0x74, 0x28, - 0x29, 0x3b, 0x71, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, + 0x3b, 0x71, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, + 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, + 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x65, @@ -1617,19 +1640,19 @@ unsigned char index_js[] = { 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, 0x29, 0x7b, 0x6f, 0x3d, - 0x4a, 0x74, 0x28, 0x65, 0x2c, 0x5f, 0x2c, 0x72, 0x2c, 0x69, 0x29, 0x3b, + 0x51, 0x74, 0x28, 0x65, 0x2c, 0x5f, 0x2c, 0x72, 0x2c, 0x69, 0x29, 0x3b, 0x6e, 0x5b, 0x5f, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x69, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6f, 0x3d, - 0x73, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, + 0x68, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, - 0x2c, 0x64, 0x3a, 0x62, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, + 0x2c, 0x64, 0x3a, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x69, 0x5b, 0x6e, @@ -1639,7 +1662,7 @@ unsigned char index_js[] = { 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x7d, - 0x7d, 0x29, 0x7d, 0x7d, 0x6a, 0x74, 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, + 0x7d, 0x29, 0x7d, 0x7d, 0x71, 0x74, 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, @@ -1656,196 +1679,198 @@ unsigned char index_js[] = { 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, 0x2e, 0x64, 0x28, 0x29, - 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x6a, 0x74, + 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x3c, - 0x33, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, - 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7d, 0x29, 0x3b, 0x4c, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, - 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, - 0x73, 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, - 0x5b, 0x69, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x69, 0x20, - 0x69, 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x50, - 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x73, 0x28, 0x74, 0x29, 0x2c, 0x5b, + 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x6e, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, + 0x2c, 0x69, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, + 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, + 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, 0x7c, 0x7c, 0x34, 0x26, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x33, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x69, + 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, + 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x5b, 0x69, 0x5d, 0x21, 0x3d, + 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, + 0x69, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x69, 0x66, 0x28, 0x21, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x74, 0x28, 0x74, 0x29, + 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x50, 0x74, 0x28, + 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, + 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x51, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x48, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x71, 0x74, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x50, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x64, 0x28, 0x28, - 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x48, 0x74, 0x28, 0x74, 0x29, - 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, - 0x3b, 0x45, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x62, 0x28, 0x28, 0x29, - 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, - 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x59, - 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, - 0x5d, 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, - 0x30, 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, - 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, - 0x2b, 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, - 0x30, 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, - 0x5b, 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, - 0x73, 0x73, 0x69, 0x67, 0x6e, 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, - 0x7b, 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, - 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x3d, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, - 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, - 0x75, 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, - 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, - 0x22, 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x5f, 0x3d, 0x74, 0x2e, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x59, 0x74, 0x28, 0x74, 0x2c, 0x75, - 0x2c, 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, - 0x29, 0x29, 0x2c, 0x69, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x5f, 0x29, - 0x2c, 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, - 0x32, 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, 0x5f, 0x29, 0x29, 0x3a, 0x69, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x5a, 0x74, 0x3d, 0x6e, 0x65, 0x77, 0x20, - 0x4d, 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x74, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x5a, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, - 0x28, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x5a, - 0x74, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, - 0x29, 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x59, 0x74, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, - 0x28, 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x69, - 0x3d, 0x31, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, - 0x2c, 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, - 0x3d, 0x69, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x5f, 0x3d, 0x5f, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, - 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, - 0x6e, 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, - 0x29, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, - 0x2c, 0x5f, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, - 0x74, 0x7c, 0x7c, 0x5f, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x69, 0x3d, 0x32, - 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x2e, 0x2e, - 0x2e, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, - 0x32, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x5f, 0x26, 0x26, 0x21, 0x74, - 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, - 0x21, 0x30, 0x2c, 0x5f, 0x29, 0x3a, 0x69, 0x3e, 0x3d, 0x35, 0x26, 0x26, - 0x28, 0x28, 0x5f, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, - 0x3d, 0x69, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x69, 0x2c, 0x30, 0x2c, 0x5f, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, - 0x36, 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x69, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x69, - 0x3d, 0x36, 0x29, 0x29, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x6c, - 0x3d, 0x30, 0x3b, 0x6c, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x7b, 0x6c, 0x26, 0x26, 0x28, 0x31, - 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, - 0x6c, 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x66, 0x3d, 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x5b, 0x6c, 0x5d, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, - 0x74, 0x5b, 0x6c, 0x5d, 0x5b, 0x66, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, - 0x69, 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, - 0x28, 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x69, 0x3d, 0x33, - 0x29, 0x3a, 0x5f, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x69, - 0x3f, 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, - 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x31, 0x2c, - 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x5f, 0x3d, 0x6e, 0x2b, 0x5f, 0x5b, - 0x30, 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, - 0x3d, 0x22, 0x22, 0x3a, 0x5f, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x31, 0x29, 0x3a, - 0x69, 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, - 0x28, 0x69, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x5f, 0x2c, 0x5f, 0x3d, 0x22, - 0x22, 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x28, 0x69, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, - 0x74, 0x5b, 0x6c, 0x5d, 0x5b, 0x66, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, - 0x75, 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x69, 0x3d, 0x72, 0x2c, - 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x69, 0x29, 0x2c, 0x69, 0x3d, 0x30, - 0x29, 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, - 0x5c, 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, - 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x32, - 0x29, 0x3a, 0x5f, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, - 0x69, 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, - 0x26, 0x26, 0x28, 0x69, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, - 0x5d, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, - 0x29, 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, - 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, - 0x29, 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, - 0x6e, 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x6e, 0x3d, 0x74, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x46, 0x29, - 0x3b, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x4c, 0x20, 0x61, 0x73, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x4f, - 0x20, 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, - 0x2c, 0x66, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x2c, 0x65, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, - 0x66, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x64, 0x20, 0x61, 0x73, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x73, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x2c, 0x46, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, - 0x57, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x66, 0x2c, 0x62, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, - 0x63, 0x74, 0x2c, 0x46, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x6e, 0x6e, - 0x20, 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x77, - 0x20, 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x53, 0x20, 0x61, 0x73, 0x20, - 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x75, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x73, 0x20, 0x61, - 0x73, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x7a, 0x20, 0x61, - 0x73, 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x51, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, - 0x65, 0x64, 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x54, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, - 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x56, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x6f, - 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x55, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, 0x75, 0x74, - 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x50, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x43, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, 0x63, 0x65, - 0x72, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, - 0x65, 0x66, 0x2c, 0x4b, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x45, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x2c, 0x77, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a + 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x6e, 0x3d, 0x50, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x48, 0x74, 0x28, 0x28, + 0x29, 0x3d, 0x3e, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, + 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, 0x3d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3b, 0x6e, 0x5b, 0x30, + 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x2c, 0x75, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x72, + 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, + 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x3b, 0x33, + 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x30, 0x5d, 0x3d, 0x75, 0x3a, + 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x3d, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, + 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x2c, 0x75, 0x29, + 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, 0x69, 0x5b, 0x31, 0x5d, + 0x3d, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, + 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, 0x3a, 0x36, 0x3d, 0x3d, + 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, + 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, 0x22, 0x3a, 0x72, 0x3f, + 0x28, 0x5f, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x75, + 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, 0x65, 0x2c, 0x5b, 0x22, + 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, 0x29, 0x2c, 0x69, 0x2e, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x5f, 0x29, 0x2c, 0x75, 0x5b, 0x30, 0x5d, + 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, 0x3a, 0x28, 0x6e, 0x5b, + 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, + 0x5f, 0x29, 0x29, 0x3a, 0x69, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x75, + 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, + 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x6e, 0x6e, 0x2e, 0x67, + 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, 0x6e, 0x3d, 0x6e, 0x65, + 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, 0x2e, 0x73, 0x65, 0x74, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x28, 0x6e, + 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x2e, 0x67, + 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x73, 0x65, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, + 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, + 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, 0x72, 0x3d, 0x5b, 0x30, + 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, + 0x74, 0x7c, 0x7c, 0x28, 0x5f, 0x3d, 0x5f, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, + 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x24, + 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, 0x3f, 0x72, 0x2e, 0x70, + 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, 0x5f, 0x29, 0x3a, 0x33, + 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x5f, 0x29, + 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x33, 0x2c, 0x74, + 0x2c, 0x5f, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x32, 0x3d, 0x3d, + 0x3d, 0x69, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, 0x22, 0x3d, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x69, + 0x26, 0x26, 0x5f, 0x26, 0x26, 0x21, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, 0x30, 0x2c, 0x5f, 0x29, + 0x3a, 0x69, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, 0x28, 0x5f, 0x7c, 0x7c, + 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x26, 0x26, + 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x30, 0x2c, + 0x5f, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x2c, 0x74, 0x26, + 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x74, + 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x29, 0x2c, + 0x5f, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, 0x30, 0x3b, 0x66, 0x3c, + 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x2b, 0x2b, + 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, + 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, 0x29, 0x29, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x3d, 0x30, 0x3b, 0x6c, + 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, + 0x6c, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x3c, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x3d, + 0x5b, 0x72, 0x5d, 0x2c, 0x69, 0x3d, 0x33, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, + 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x2d, 0x2d, 0x22, + 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, + 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, + 0x3a, 0x5f, 0x3d, 0x6e, 0x2b, 0x5f, 0x5b, 0x30, 0x5d, 0x3a, 0x6f, 0x3f, + 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, 0x22, 0x22, 0x3a, 0x5f, + 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, + 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, + 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, + 0x29, 0x2c, 0x69, 0x3d, 0x31, 0x29, 0x3a, 0x69, 0x26, 0x26, 0x28, 0x22, + 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x35, 0x2c, + 0x65, 0x3d, 0x5f, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x22, 0x2f, + 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x69, 0x3c, 0x35, 0x7c, + 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, + 0x6c, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x33, + 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, + 0x5d, 0x29, 0x2c, 0x69, 0x3d, 0x72, 0x2c, 0x28, 0x72, 0x3d, 0x72, 0x5b, + 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x32, 0x2c, 0x30, + 0x2c, 0x69, 0x29, 0x2c, 0x69, 0x3d, 0x30, 0x29, 0x3a, 0x22, 0x20, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x74, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, + 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, + 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x21, + 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x69, 0x3d, + 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x7d, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x7d, 0x28, + 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, 0x29, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, 0x3a, 0x6e, 0x5b, 0x30, + 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, 0x3d, 0x65, 0x6e, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x57, 0x29, 0x3b, 0x65, 0x78, 0x70, 0x6f, + 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x52, 0x20, 0x61, 0x73, 0x20, 0x46, + 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x63, 0x20, 0x61, 0x73, + 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x65, 0x20, 0x61, 0x73, + 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x79, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, + 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, + 0x57, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, + 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x53, 0x20, + 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x57, 0x20, + 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, 0x61, 0x73, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, 0x20, 0x68, 0x79, + 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x45, 0x20, 0x61, 0x73, 0x20, 0x69, + 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x2c, 0x6c, 0x74, 0x20, 0x61, 0x73, 0x20, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x2c, 0x68, 0x20, 0x61, 0x73, 0x20, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x2c, 0x4b, 0x20, 0x61, 0x73, 0x20, 0x74, 0x6f, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2c, 0x72, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x64, + 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x61, + 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, + 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x6f, 0x75, 0x6e, + 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, + 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, 0x75, 0x74, 0x45, 0x66, + 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, 0x63, 0x65, 0x72, 0x2c, + 0x50, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, + 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x45, 0x66, 0x66, 0x65, + 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a }; -unsigned int index_js_len = 22174; +unsigned int index_js_len = 22472; diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp new file mode 100644 index 000000000..0a05c369d --- /dev/null +++ b/examples/server/json-schema-to-grammar.mjs.hpp @@ -0,0 +1,311 @@ +unsigned char json_schema_to_grammar_mjs[] = { + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, + 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, 0x3f, + 0x27, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, + 0x65, 0x61, 0x6e, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, + 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, 0x22, + 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, + 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, 0x29, + 0x29, 0x20, 0x28, 0x22, 0x2e, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, + 0x2b, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, 0x20, 0x5b, 0x2d, + 0x2b, 0x5d, 0x3f, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2b, 0x29, 0x3f, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x69, + 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, + 0x22, 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, + 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, + 0x29, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x60, 0x20, 0x22, + 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x5b, 0x5e, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, + 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x22, 0x5c, + 0x5c, 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, + 0x2f, 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, + 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2a, 0x20, + 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x20, 0x27, 0x22, + 0x6e, 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, + 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, 0x61, 0x2d, 0x7a, 0x2d, + 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, + 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, + 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5d, + 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, + 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, + 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, 0x3d, 0x20, + 0x7b, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x72, 0x27, + 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x6e, + 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x22, + 0x27, 0x7d, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, + 0x7c, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, + 0x65, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x27, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x27, 0x2c, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, 0x52, 0x55, + 0x4c, 0x45, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, + 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, + 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, + 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x20, + 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, + 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, + 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x7d, + 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, + 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, + 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, + 0x52, 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x65, 0x73, + 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x28, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, + 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, + 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x6b, 0x65, 0x79, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x72, 0x6f, 0x6f, + 0x74, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, + 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, + 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x6f, 0x6e, 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x2e, 0x6d, + 0x61, 0x70, 0x28, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, + 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, + 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, + 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x65, + 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x65, 0x6e, 0x75, 0x6d, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, + 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, + 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, 0x26, 0x26, + 0x20, 0x27, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, + 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x60, 0x72, 0x65, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x64, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x50, 0x61, 0x69, 0x72, 0x73, 0x20, 0x3d, 0x20, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, + 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, + 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, + 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, 0x66, 0x20, + 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, 0x20, 0x74, + 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6b, 0x65, 0x79, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x3d, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x61, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x27, 0x20, 0x3f, 0x20, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x61, 0x5b, + 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, + 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, + 0x20, 0x3d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, + 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x27, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, + 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x42, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x5b, 0x30, 0x5d, 0x2e, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, + 0x65, 0x28, 0x62, 0x5b, 0x30, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, + 0x50, 0x61, 0x69, 0x72, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, + 0x68, 0x28, 0x28, 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x5d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, + 0x69, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, + 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x20, + 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, + 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, + 0x2b, 0x3d, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, + 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, + 0x27, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, + 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, + 0x20, 0x26, 0x26, 0x20, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, + 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, + 0x44, 0x4f, 0x20, 0x60, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, + 0x65, 0x6d, 0x73, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, + 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x3a, 0x20, 0x22, 0x22, 0x7d, + 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, + 0x20, 0x3d, 0x20, 0x60, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x20, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x29, 0x2a, 0x29, + 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, + 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, + 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, + 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, + 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, + 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x67, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, + 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x2b, 0x3d, 0x20, + 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x3a, 0x3a, 0x3d, + 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, 0x6e, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a +}; +unsigned int json_schema_to_grammar_mjs_len = 3695; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index de41da187..c43becc42 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -141,6 +141,7 @@ } from '/index.js'; import { llama } from '/completion.js'; + import { SchemaConverter } from '/json-schema-to-grammar.mjs'; const session = signal({ prompt: "This is a conversation between user and llama, a friendly chatbot. respond in simple markdown.", @@ -166,6 +167,7 @@ mirostat: 0, // 0/1/2 mirostat_tau: 5, // target entropy mirostat_eta: 0.1, // learning rate + grammar: null, }) const llamaStats = signal(null) @@ -304,6 +306,26 @@ const updateParamsFloat = (el) => params.value = { ...params.value, [el.target.name]: parseFloat(el.target.value) } const updateParamsInt = (el) => params.value = { ...params.value, [el.target.name]: Math.floor(parseFloat(el.target.value)) } + const grammarJsonSchemaPropOrder = signal('') + const updateGrammarJsonSchemaPropOrder = (el) => grammarJsonSchemaPropOrder.value = el.target.value + const convertJSONSchemaGrammar = () => { + try { + const schema = JSON.parse(params.value.grammar) + const converter = new SchemaConverter( + grammarJsonSchemaPropOrder.value + .split(',') + .reduce((acc, cur, i) => ({...acc, [cur.trim()]: i}), {}) + ) + converter.visit(schema, '') + params.value = { + ...params.value, + grammar: converter.formatGrammar(), + } + } catch (e) { + alert(`Convert failed: ${e.message}`) + } + } + const FloatField = ({label, max, min, name, step, value}) => { return html`

@@ -355,6 +377,13 @@