diff --git a/.devops/lamma-cpp-clblast.srpm.spec b/.devops/lamma-cpp-clblast.srpm.spec new file mode 100644 index 000000000..739c68281 --- /dev/null +++ b/.devops/lamma-cpp-clblast.srpm.spec @@ -0,0 +1,58 @@ +# SRPM for building from source and packaging an RPM for RPM-based distros. +# https://fedoraproject.org/wiki/How_to_create_an_RPM_package +# Built and maintained by John Boero - boeroboy@gmail.com +# In honor of Seth Vidal https://www.redhat.com/it/blog/thank-you-seth-vidal + +# Notes for llama.cpp: +# 1. Tags are currently based on hash - which will not sort asciibetically. +# We need to declare standard versioning if people want to sort latest releases. +# 2. Builds for CUDA/OpenCL support are separate, with different depenedencies. +# 3. NVidia's developer repo must be enabled with nvcc, cublas, clblas, etc installed. +# Example: https://developer.download.nvidia.com/compute/cuda/repos/fedora37/x86_64/cuda-fedora37.repo +# 4. OpenCL/CLBLAST support simply requires the ICD loader and basic opencl libraries. +# It is up to the user to install the correct vendor-specific support. + +Name: llama.cpp-clblast +Version: master +Release: 1%{?dist} +Summary: OpenCL Inference of LLaMA model in pure C/C++ +License: MIT +Source0: https://github.com/ggerganov/llama.cpp/archive/refs/heads/master.tar.gz +BuildRequires: coreutils make gcc-c++ git mesa-libOpenCL-devel +URL: https://github.com/ggerganov/llama.cpp + +%define debug_package %{nil} +%define source_date_epoch_from_changelog 0 + +%description +CPU inference for Meta's Lllama2 models using default options. + +%prep +%setup -n llama.cpp-master + +%build +make -j LLAMA_CLBLAST=1 + +%install +mkdir -p %{buildroot}%{_bindir}/ +cp -p main %{buildroot}%{_bindir}/llamacppclblast +cp -p server %{buildroot}%{_bindir}/llamacppclblastserver +cp -p simple %{buildroot}%{_bindir}/llamacppclblastsimple + +%clean +rm -rf %{buildroot} +rm -rf %{_builddir}/* + +%files +%{_bindir}/llamacppclblast +%{_bindir}/llamacppclblastserver +%{_bindir}/llamacppclblastsimple + +%pre + +%post + +%preun +%postun + +%changelog diff --git a/.devops/lamma-cpp-cublas.srpm.spec b/.devops/lamma-cpp-cublas.srpm.spec new file mode 100644 index 000000000..75d32fbe7 --- /dev/null +++ b/.devops/lamma-cpp-cublas.srpm.spec @@ -0,0 +1,59 @@ +# SRPM for building from source and packaging an RPM for RPM-based distros. +# https://fedoraproject.org/wiki/How_to_create_an_RPM_package +# Built and maintained by John Boero - boeroboy@gmail.com +# In honor of Seth Vidal https://www.redhat.com/it/blog/thank-you-seth-vidal + +# Notes for llama.cpp: +# 1. Tags are currently based on hash - which will not sort asciibetically. +# We need to declare standard versioning if people want to sort latest releases. +# 2. Builds for CUDA/OpenCL support are separate, with different depenedencies. +# 3. NVidia's developer repo must be enabled with nvcc, cublas, clblas, etc installed. +# Example: https://developer.download.nvidia.com/compute/cuda/repos/fedora37/x86_64/cuda-fedora37.repo +# 4. OpenCL/CLBLAST support simply requires the ICD loader and basic opencl libraries. +# It is up to the user to install the correct vendor-specific support. + +Name: llama.cpp-cublas +Version: master +Release: 1%{?dist} +Summary: CPU Inference of LLaMA model in pure C/C++ (no CUDA/OpenCL) +License: MIT +Source0: https://github.com/ggerganov/llama.cpp/archive/refs/heads/master.tar.gz +BuildRequires: coreutils make gcc-c++ git cuda-toolkit +Requires: cuda-toolkit +URL: https://github.com/ggerganov/llama.cpp + +%define debug_package %{nil} +%define source_date_epoch_from_changelog 0 + +%description +CPU inference for Meta's Lllama2 models using default options. + +%prep +%setup -n llama.cpp-master + +%build +make -j LLAMA_CUBLAS=1 + +%install +mkdir -p %{buildroot}%{_bindir}/ +cp -p main %{buildroot}%{_bindir}/llamacppcublas +cp -p server %{buildroot}%{_bindir}/llamacppcublasserver +cp -p simple %{buildroot}%{_bindir}/llamacppcublassimple + +%clean +rm -rf %{buildroot} +rm -rf %{_builddir}/* + +%files +%{_bindir}/llamacppcublas +%{_bindir}/llamacppcublasserver +%{_bindir}/llamacppcublassimple + +%pre + +%post + +%preun +%postun + +%changelog diff --git a/.devops/llama-cpp.srpm.spec b/.devops/llama-cpp.srpm.spec new file mode 100644 index 000000000..c65251a5a --- /dev/null +++ b/.devops/llama-cpp.srpm.spec @@ -0,0 +1,58 @@ +# SRPM for building from source and packaging an RPM for RPM-based distros. +# https://fedoraproject.org/wiki/How_to_create_an_RPM_package +# Built and maintained by John Boero - boeroboy@gmail.com +# In honor of Seth Vidal https://www.redhat.com/it/blog/thank-you-seth-vidal + +# Notes for llama.cpp: +# 1. Tags are currently based on hash - which will not sort asciibetically. +# We need to declare standard versioning if people want to sort latest releases. +# 2. Builds for CUDA/OpenCL support are separate, with different depenedencies. +# 3. NVidia's developer repo must be enabled with nvcc, cublas, clblas, etc installed. +# Example: https://developer.download.nvidia.com/compute/cuda/repos/fedora37/x86_64/cuda-fedora37.repo +# 4. OpenCL/CLBLAST support simply requires the ICD loader and basic opencl libraries. +# It is up to the user to install the correct vendor-specific support. + +Name: llama.cpp +Version: master +Release: 1%{?dist} +Summary: CPU Inference of LLaMA model in pure C/C++ (no CUDA/OpenCL) +License: MIT +Source0: https://github.com/ggerganov/llama.cpp/archive/refs/heads/master.tar.gz +BuildRequires: coreutils make gcc-c++ git +URL: https://github.com/ggerganov/llama.cpp + +%define debug_package %{nil} +%define source_date_epoch_from_changelog 0 + +%description +CPU inference for Meta's Lllama2 models using default options. + +%prep +%autosetup + +%build +make -j + +%install +mkdir -p %{buildroot}%{_bindir}/ +cp -p main %{buildroot}%{_bindir}/llamacpp +cp -p server %{buildroot}%{_bindir}/llamacppserver +cp -p simple %{buildroot}%{_bindir}/llamacppsimple + +%clean +rm -rf %{buildroot} +rm -rf %{_builddir}/* + +%files +%{_bindir}/llamacpp +%{_bindir}/llamacppserver +%{_bindir}/llamacppsimple + +%pre + +%post + +%preun +%postun + +%changelog diff --git a/.gitignore b/.gitignore index c1ab6bb6d..f3121794a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,10 @@ *.o *.a *.so +*.gguf +*.bin +*.exe +*.dll .DS_Store .build/ .cache/ @@ -39,13 +43,17 @@ models-mnt /perplexity /embedding /train-text-from-scratch +/convert-llama2c-to-ggml /simple /benchmark-matmult /vdot /server /Pipfile /embd-input-test +/gguf +/gguf-llama-simple /libllama.so +/llama-bench build-info.h arm_neon.h compile_commands.json @@ -62,12 +70,12 @@ perf-*.txt examples/jeopardy/results.txt - pyproject.toml poetry.lock poetry.toml # Test binaries +tests/test-grammar-parser tests/test-double-float tests/test-grad0 tests/test-opt @@ -75,4 +83,3 @@ tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 - diff --git a/CMakeLists.txt b/CMakeLists.txt index d085bc835..bb63ef98e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -69,7 +69,6 @@ option(LLAMA_BLAS "llama: use BLAS" set(LLAMA_BLAS_VENDOR "Generic" CACHE STRING "llama: BLAS library vendor") option(LLAMA_CUBLAS "llama: use CUDA" OFF) #option(LLAMA_CUDA_CUBLAS "llama: use cuBLAS for prompt processing" OFF) -set(LLAMA_CUDA_MMQ_Y "64" CACHE STRING "llama: y tile size for mmq CUDA kernels") option(LLAMA_CUDA_FORCE_DMMV "llama: use dmmv instead of mmvq CUDA kernels" OFF) set(LLAMA_CUDA_DMMV_X "32" CACHE STRING "llama: x stride for dmmv CUDA kernels") set(LLAMA_CUDA_MMV_Y "1" CACHE STRING "llama: y block size for mmv CUDA kernels") @@ -256,7 +255,6 @@ if (LLAMA_CUBLAS) # if (LLAMA_CUDA_CUBLAS) # add_compile_definitions(GGML_CUDA_CUBLAS) # endif() - add_compile_definitions(GGML_CUDA_MMQ_Y=${LLAMA_CUDA_MMQ_Y}) if (LLAMA_CUDA_FORCE_DMMV) add_compile_definitions(GGML_CUDA_FORCE_DMMV) endif() @@ -298,7 +296,6 @@ if (LLAMA_METAL) find_library(FOUNDATION_LIBRARY Foundation REQUIRED) find_library(METAL_FRAMEWORK Metal REQUIRED) find_library(METALKIT_FRAMEWORK MetalKit REQUIRED) - find_library(METALPERFORMANCE_FRAMEWORK MetalPerformanceShaders REQUIRED) set(GGML_SOURCES_METAL ggml-metal.m ggml-metal.h) @@ -315,7 +312,6 @@ if (LLAMA_METAL) ${FOUNDATION_LIBRARY} ${METAL_FRAMEWORK} ${METALKIT_FRAMEWORK} - ${METALPERFORMANCE_FRAMEWORK} ) endif() @@ -501,9 +497,11 @@ else() endif() # -# Build libraries +# libraries # +# ggml + add_library(ggml OBJECT ggml.c ggml.h @@ -528,10 +526,11 @@ if (BUILD_SHARED_LIBS) install(TARGETS ggml_shared LIBRARY) endif() +# llama + add_library(llama llama.cpp llama.h - llama-util.h ) target_include_directories(llama PUBLIC .) @@ -550,6 +549,10 @@ if (BUILD_SHARED_LIBS) install(TARGETS llama LIBRARY) endif() +# +# install +# + include(GNUInstallDirs) install( FILES convert.py @@ -573,11 +576,23 @@ install( WORLD_READ WORLD_EXECUTE DESTINATION ${CMAKE_INSTALL_BINDIR}) +if (LLAMA_METAL) + install( + FILES ggml-metal.metal + PERMISSIONS + OWNER_READ + OWNER_WRITE + GROUP_READ + WORLD_READ + DESTINATION ${CMAKE_INSTALL_BINDIR}) +endif() # # programs, examples and tests # +add_subdirectory(common) + if (LLAMA_BUILD_TESTS AND NOT CMAKE_JS_VERSION) include(CTest) add_subdirectory(tests) diff --git a/Makefile b/Makefile index e0528aeee..d31acc450 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,8 @@ # Define the default target now so that it is always the first target -BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch simple server embd-input-test +BUILD_TARGETS = main quantize quantize-stats perplexity embedding vdot train-text-from-scratch convert-llama2c-to-ggml simple server embd-input-test gguf llama-bench # Binaries only useful for tests -TEST_TARGETS = tests/test-double-float tests/test-grad0 tests/test-opt tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 +TEST_TARGETS = tests/test-llama-grammar tests/test-grammar-parser tests/test-double-float tests/test-grad0 tests/test-opt tests/test-quantize-fns tests/test-quantize-perf tests/test-sampling tests/test-tokenizer-0 default: $(BUILD_TARGETS) @@ -45,8 +45,8 @@ OPT = -Ofast else OPT = -O3 endif -CFLAGS = -I. $(OPT) -std=c11 -fPIC -CXXFLAGS = -I. -I./examples $(OPT) -std=c++11 -fPIC +CFLAGS = -I. $(OPT) -std=c11 -fPIC +CXXFLAGS = -I. -I./common $(OPT) -std=c++11 -fPIC LDFLAGS = ifdef LLAMA_DEBUG @@ -142,6 +142,28 @@ ifeq ($(UNAME_M),$(filter $(UNAME_M),x86_64 i686 amd64)) #CXXFLAGS += -mssse3 endif +ifneq ($(filter aarch64%,$(UNAME_M)),) + # Apple M1, M2, etc. + # Raspberry Pi 3, 4, Zero 2 (64-bit) + CFLAGS += -mcpu=native + CXXFLAGS += -mcpu=native +endif + +ifneq ($(filter armv6%,$(UNAME_M)),) + # Raspberry Pi 1, Zero + CFLAGS += -mfpu=neon-fp-armv8 -mfp16-format=ieee -mno-unaligned-access +endif + +ifneq ($(filter armv7%,$(UNAME_M)),) + # Raspberry Pi 2 + CFLAGS += -mfpu=neon-fp-armv8 -mfp16-format=ieee -mno-unaligned-access -funsafe-math-optimizations +endif + +ifneq ($(filter armv8%,$(UNAME_M)),) + # Raspberry Pi 3, 4, Zero 2 (32-bit) + CFLAGS += -mfp16-format=ieee -mno-unaligned-access +endif + ifneq ($(filter ppc64%,$(UNAME_M)),) POWER9_M := $(shell grep "POWER9" /proc/cpuinfo) ifneq (,$(findstring POWER9,$(POWER9_M))) @@ -231,11 +253,6 @@ ifdef LLAMA_CUDA_KQUANTS_ITER else NVCCFLAGS += -DK_QUANTS_PER_ITERATION=2 endif -ifdef LLAMA_CUDA_MMQ_Y - NVCCFLAGS += -DGGML_CUDA_MMQ_Y=$(LLAMA_CUDA_MMQ_Y) -else - NVCCFLAGS += -DGGML_CUDA_MMQ_Y=64 -endif # LLAMA_CUDA_MMQ_Y #ifdef LLAMA_CUDA_CUBLAS # NVCCFLAGS += -DGGML_CUDA_CUBLAS #endif # LLAMA_CUDA_CUBLAS @@ -266,32 +283,10 @@ endif # LLAMA_CLBLAST ifdef LLAMA_METAL CFLAGS += -DGGML_USE_METAL -DGGML_METAL_NDEBUG CXXFLAGS += -DGGML_USE_METAL - LDFLAGS += -framework Foundation -framework Metal -framework MetalKit -framework MetalPerformanceShaders + LDFLAGS += -framework Foundation -framework Metal -framework MetalKit OBJS += ggml-metal.o endif # LLAMA_METAL -ifneq ($(filter aarch64%,$(UNAME_M)),) - # Apple M1, M2, etc. - # Raspberry Pi 3, 4, Zero 2 (64-bit) - CFLAGS += -mcpu=native - CXXFLAGS += -mcpu=native -endif - -ifneq ($(filter armv6%,$(UNAME_M)),) - # Raspberry Pi 1, Zero - CFLAGS += -mfpu=neon-fp-armv8 -mfp16-format=ieee -mno-unaligned-access -endif - -ifneq ($(filter armv7%,$(UNAME_M)),) - # Raspberry Pi 2 - CFLAGS += -mfpu=neon-fp-armv8 -mfp16-format=ieee -mno-unaligned-access -funsafe-math-optimizations -endif - -ifneq ($(filter armv8%,$(UNAME_M)),) - # Raspberry Pi 3, 4, Zero 2 (32-bit) - CFLAGS += -mfp16-format=ieee -mno-unaligned-access -endif - ifdef LLAMA_METAL ggml-metal.o: ggml-metal.m ggml-metal.h $(CC) $(CFLAGS) -c $< -o $@ @@ -334,23 +329,23 @@ ggml-alloc.o: ggml-alloc.c ggml.h ggml-alloc.h OBJS += ggml-alloc.o -llama.o: llama.cpp ggml.h ggml-alloc.h ggml-cuda.h ggml-metal.h llama.h llama-util.h +llama.o: llama.cpp ggml.h ggml-alloc.h ggml-cuda.h ggml-metal.h llama.h $(CXX) $(CXXFLAGS) -c $< -o $@ -common.o: examples/common.cpp examples/common.h +common.o: common/common.cpp common/common.h $(CXX) $(CXXFLAGS) -c $< -o $@ -console.o: examples/console.cpp examples/console.h +console.o: common/console.cpp common/console.h $(CXX) $(CXXFLAGS) -c $< -o $@ -grammar-parser.o: examples/grammar-parser.cpp examples/grammar-parser.h +grammar-parser.o: common/grammar-parser.cpp common/grammar-parser.h $(CXX) $(CXXFLAGS) -c $< -o $@ libllama.so: llama.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -shared -fPIC -o $@ $^ $(LDFLAGS) clean: - rm -vf *.o *.so *.dll main quantize quantize-stats perplexity embedding benchmark-matmult save-load-state server simple vdot train-text-from-scratch embd-input-test build-info.h $(TEST_TARGETS) + rm -vf *.o *.so *.dll main quantize quantize-stats perplexity embedding benchmark-matmult save-load-state server simple vdot train-text-from-scratch convert-llama2c-to-ggml embd-input-test gguf llama-bench build-info.h $(TEST_TARGETS) # # Examples @@ -380,7 +375,7 @@ embedding: examples/embedding/embedding.cpp build-info.h ggml. save-load-state: examples/save-load-state/save-load-state.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp build-info.h ggml.o llama.o common.o $(OBJS) +server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp build-info.h ggml.o llama.o common.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) $(LIB_PRE)embdinput$(DSO_EXT): examples/embd-input/embd-input.h examples/embd-input/embd-input-lib.cpp build-info.h ggml.o llama.o common.o $(OBJS) @@ -390,7 +385,16 @@ $(LIB_PRE)embdinput$(DSO_EXT): examples/embd-input/embd-input.h examples/embd-in embd-input-test: $(LIB_PRE)embdinput$(DSO_EXT) examples/embd-input/embd-input-test.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %$(DSO_EXT),$(filter-out %.h,$(filter-out %.hpp,$^))) -o $@ $(LDFLAGS) -L. -lembdinput -train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp build-info.h ggml.o llama.o $(OBJS) +gguf: examples/gguf/gguf.cpp build-info.h ggml.o llama.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + +train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp build-info.h ggml.o llama.o common.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + +convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp build-info.h ggml.o llama.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + +llama-bench: examples/llama-bench/llama-bench.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) build-info.h: $(wildcard .git/index) scripts/build-info.sh @@ -414,6 +418,12 @@ benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.h ggml.o vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) +tests/test-llama-grammar: tests/test-llama-grammar.cpp build-info.h ggml.o llama.o common.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.txt,$^) -o $@ $(LDFLAGS) + +tests/test-grammar-parser: tests/test-grammar-parser.cpp build-info.h ggml.o llama.o common.o $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.txt,$^) -o $@ $(LDFLAGS) + tests/test-double-float: tests/test-double-float.cpp build-info.h ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.txt,$^) -o $@ $(LDFLAGS) diff --git a/README.md b/README.md index 2ece294b7..eebb11392 100644 --- a/README.md +++ b/README.md @@ -9,13 +9,21 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ -**Hot topics:** +### Hot topics -- Simple web chat example: https://github.com/ggerganov/llama.cpp/pull/1998 -- k-quants now support super-block size of 64: https://github.com/ggerganov/llama.cpp/pull/2001 -- New roadmap: https://github.com/users/ggerganov/projects/7 -- Azure CI brainstorming: https://github.com/ggerganov/llama.cpp/discussions/1985 -- p1 : LLM-based code completion engine at the edge : https://github.com/ggml-org/p1/discussions/1 +- Added support for Falcon models: https://github.com/ggerganov/llama.cpp/pull/2717 + +- A new file format has been introduced: [GGUF](https://github.com/ggerganov/llama.cpp/pull/2398) + + Last revision compatible with the old format: [dadbed9](https://github.com/ggerganov/llama.cpp/commit/dadbed99e65252d79f81101a392d0d6497b86caa) + + ### Current `master` should be considered in Beta - expect some issues for a few days! + + ### Be prepared to re-convert and / or re-quantize your GGUF models while this notice is up! + + ### Issues with non-GGUF models will be considered with low priority! + +----
Table of Contents @@ -33,6 +41,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++
  • Memory/Disk Requirements
  • Quantization
  • Interactive mode
  • +
  • Constrained output with grammars
  • Instruction mode with Alpaca
  • Using OpenLLaMA
  • Using GPT4All
  • @@ -59,12 +68,11 @@ The main goal of `llama.cpp` is to run the LLaMA model using 4-bit integer quant - Apple silicon first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks - AVX, AVX2 and AVX512 support for x86 architectures - Mixed F16 / F32 precision -- 4-bit, 5-bit and 8-bit integer quantization support -- Supports OpenBLAS/Apple BLAS/ARM Performance Lib/ATLAS/BLIS/Intel MKL/NVHPC/ACML/SCSL/SGIMATH and [more](https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors) in BLAS -- cuBLAS and CLBlast support +- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit and 8-bit integer quantization support +- CUDA, Metal and OpenCL GPU backend support The original implementation of `llama.cpp` was [hacked in an evening](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022). -Since then, the project has improved significantly thanks to many contributions. This project is for educational purposes and serves +Since then, the project has improved significantly thanks to many contributions. This project is mainly for educational purposes and serves as the main playground for developing new features for the [ggml](https://github.com/ggerganov/ggml) library. **Supported platforms:** @@ -78,6 +86,7 @@ as the main playground for developing new features for the [ggml](https://github - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 +- [X] Falcon - [X] [Alpaca](https://github.com/ggerganov/llama.cpp#instruction-mode-with-alpaca) - [X] [GPT4All](https://github.com/ggerganov/llama.cpp#using-gpt4all) - [X] [Chinese LLaMA / Alpaca](https://github.com/ymcui/Chinese-LLaMA-Alpaca) and [Chinese LLaMA-2 / Alpaca-2](https://github.com/ymcui/Chinese-LLaMA-Alpaca-2) @@ -96,8 +105,10 @@ as the main playground for developing new features for the [ggml](https://github - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) - Node.js: [hlhr202/llama-node](https://github.com/hlhr202/llama-node) - Ruby: [yoshoku/llama_cpp.rb](https://github.com/yoshoku/llama_cpp.rb) +- Rust: [mdrokz/rust-llama.cpp](https://github.com/mdrokz/rust-llama.cpp) - C#/.NET: [SciSharp/LLamaSharp](https://github.com/SciSharp/LLamaSharp) - Scala 3: [donderom/llm4s](https://github.com/donderom/llm4s) +- Clojure: [phronmophobic/llama.clj](https://github.com/phronmophobic/llama.clj) **UI:** @@ -106,90 +117,84 @@ as the main playground for developing new features for the [ggml](https://github --- -Here is a typical run using LLaMA-7B: +Here is a typical run using LLaMA v2 13B on M2 Ultra: ```java -make -j && ./main -m ./models/7B/ggml-model-q4_0.bin -p "Building a website can be done in 10 simple steps:" -n 512 +$ make -j && ./main -m models/llama-13b-v2/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e I llama.cpp build info: I UNAME_S: Darwin I UNAME_P: arm I UNAME_M: arm64 -I CFLAGS: -I. -O3 -DNDEBUG -std=c11 -fPIC -pthread -DGGML_USE_ACCELERATE -I CXXFLAGS: -I. -I./examples -O3 -DNDEBUG -std=c++11 -fPIC -pthread +I CFLAGS: -I. -O3 -std=c11 -fPIC -DNDEBUG -Wall -Wextra -Wpedantic -Wcast-qual -Wdouble-promotion -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes -pthread -DGGML_USE_K_QUANTS -DGGML_USE_ACCELERATE +I CXXFLAGS: -I. -I./common -O3 -std=c++11 -fPIC -DNDEBUG -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function -Wno-multichar -pthread -DGGML_USE_K_QUANTS I LDFLAGS: -framework Accelerate -I CC: Apple clang version 14.0.0 (clang-1400.0.29.202) -I CXX: Apple clang version 14.0.0 (clang-1400.0.29.202) +I CC: Apple clang version 14.0.3 (clang-1403.0.22.14.1) +I CXX: Apple clang version 14.0.3 (clang-1403.0.22.14.1) make: Nothing to be done for `default'. -main: seed = 1678486056 -llama_model_load: loading model from './models/7B/ggml-model-q4_0.bin' - please wait ... -llama_model_load: n_vocab = 32000 -llama_model_load: n_ctx = 512 -llama_model_load: n_embd = 4096 -llama_model_load: n_mult = 256 -llama_model_load: n_head = 32 -llama_model_load: n_layer = 32 -llama_model_load: n_rot = 128 -llama_model_load: f16 = 2 -llama_model_load: n_ff = 11008 -llama_model_load: ggml ctx size = 4529.34 MB -llama_model_load: memory_size = 512.00 MB, n_mem = 16384 -llama_model_load: .................................... done -llama_model_load: model size = 4017.27 MB / num tensors = 291 +main: build = 1041 (cf658ad) +main: seed = 1692823051 +llama_model_loader: loaded meta data with 16 key-value pairs and 363 tensors from models/llama-13b-v2/ggml-model-q4_0.gguf (version GGUF V1 (latest)) +llama_model_loader: - type f32: 81 tensors +llama_model_loader: - type q4_0: 281 tensors +llama_model_loader: - type q6_K: 1 tensors +llm_load_print_meta: format = GGUF V1 (latest) +llm_load_print_meta: arch = llama +llm_load_print_meta: vocab type = SPM +llm_load_print_meta: n_vocab = 32000 +llm_load_print_meta: n_merges = 0 +llm_load_print_meta: n_ctx_train = 4096 +llm_load_print_meta: n_ctx = 512 +llm_load_print_meta: n_embd = 5120 +llm_load_print_meta: n_head = 40 +llm_load_print_meta: n_head_kv = 40 +llm_load_print_meta: n_layer = 40 +llm_load_print_meta: n_rot = 128 +llm_load_print_meta: n_gqa = 1 +llm_load_print_meta: f_norm_eps = 1.0e-05 +llm_load_print_meta: f_norm_rms_eps = 1.0e-05 +llm_load_print_meta: n_ff = 13824 +llm_load_print_meta: freq_base = 10000.0 +llm_load_print_meta: freq_scale = 1 +llm_load_print_meta: model type = 13B +llm_load_print_meta: model ftype = mostly Q4_0 +llm_load_print_meta: model size = 13.02 B +llm_load_print_meta: general.name = LLaMA v2 +llm_load_print_meta: BOS token = 1 '' +llm_load_print_meta: EOS token = 2 '' +llm_load_print_meta: UNK token = 0 '' +llm_load_print_meta: LF token = 13 '<0x0A>' +llm_load_tensors: ggml ctx size = 0.11 MB +llm_load_tensors: mem required = 7024.01 MB (+ 400.00 MB per state) +................................................................................................... +llama_new_context_with_model: kv self size = 400.00 MB +llama_new_context_with_model: compute buffer total size = 75.41 MB -main: prompt: 'Building a website can be done in 10 simple steps:' -main: number of tokens in prompt = 15 - 1 -> '' - 8893 -> 'Build' - 292 -> 'ing' - 263 -> ' a' - 4700 -> ' website' - 508 -> ' can' - 367 -> ' be' - 2309 -> ' done' - 297 -> ' in' - 29871 -> ' ' - 29896 -> '1' - 29900 -> '0' - 2560 -> ' simple' - 6576 -> ' steps' - 29901 -> ':' - -sampling parameters: temp = 0.800000, top_k = 40, top_p = 0.950000 +system_info: n_threads = 16 / 24 | AVX = 0 | AVX2 = 0 | AVX512 = 0 | AVX512_VBMI = 0 | AVX512_VNNI = 0 | FMA = 0 | NEON = 1 | ARM_FMA = 1 | F16C = 0 | FP16_VA = 1 | WASM_SIMD = 0 | BLAS = 1 | SSE3 = 0 | VSX = 0 | +sampling: repeat_last_n = 64, repeat_penalty = 1.100000, presence_penalty = 0.000000, frequency_penalty = 0.000000, top_k = 40, tfs_z = 1.000000, top_p = 0.950000, typical_p = 1.000000, temp = 0.800000, mirostat = 0, mirostat_lr = 0.100000, mirostat_ent = 5.000000 +generate: n_ctx = 512, n_batch = 512, n_predict = 400, n_keep = 0 -Building a website can be done in 10 simple steps: -1) Select a domain name and web hosting plan -2) Complete a sitemap -3) List your products -4) Write product descriptions -5) Create a user account -6) Build the template -7) Start building the website -8) Advertise the website -9) Provide email support -10) Submit the website to search engines -A website is a collection of web pages that are formatted with HTML. HTML is the code that defines what the website looks like and how it behaves. -The HTML code is formatted into a template or a format. Once this is done, it is displayed on the user's browser. -The web pages are stored in a web server. The web server is also called a host. When the website is accessed, it is retrieved from the server and displayed on the user's computer. -A website is known as a website when it is hosted. This means that it is displayed on a host. The host is usually a web server. -A website can be displayed on different browsers. The browsers are basically the software that renders the website on the user's screen. -A website can also be viewed on different devices such as desktops, tablets and smartphones. -Hence, to have a website displayed on a browser, the website must be hosted. -A domain name is an address of a website. It is the name of the website. -The website is known as a website when it is hosted. This means that it is displayed on a host. The host is usually a web server. -A website can be displayed on different browsers. The browsers are basically the software that renders the website on the user’s screen. -A website can also be viewed on different devices such as desktops, tablets and smartphones. Hence, to have a website displayed on a browser, the website must be hosted. -A domain name is an address of a website. It is the name of the website. -A website is an address of a website. It is a collection of web pages that are formatted with HTML. HTML is the code that defines what the website looks like and how it behaves. -The HTML code is formatted into a template or a format. Once this is done, it is displayed on the user’s browser. -A website is known as a website when it is hosted - -main: mem per token = 14434244 bytes -main: load time = 1332.48 ms -main: sample time = 1081.40 ms -main: predict time = 31378.77 ms / 61.41 ms per token -main: total time = 34036.74 ms + Building a website can be done in 10 simple steps: +Step 1: Find the right website platform. +Step 2: Choose your domain name and hosting plan. +Step 3: Design your website layout. +Step 4: Write your website content and add images. +Step 5: Install security features to protect your site from hackers or spammers +Step 6: Test your website on multiple browsers, mobile devices, operating systems etc… +Step 7: Test it again with people who are not related to you personally – friends or family members will work just fine! +Step 8: Start marketing and promoting the website via social media channels or paid ads +Step 9: Analyze how many visitors have come to your site so far, what type of people visit more often than others (e.g., men vs women) etc… +Step 10: Continue to improve upon all aspects mentioned above by following trends in web design and staying up-to-date on new technologies that can enhance user experience even further! +How does a Website Work? +A website works by having pages, which are made of HTML code. This code tells your computer how to display the content on each page you visit – whether it’s an image or text file (like PDFs). In order for someone else’s browser not only be able but also want those same results when accessing any given URL; some additional steps need taken by way of programming scripts that will add functionality such as making links clickable! +The most common type is called static HTML pages because they remain unchanged over time unless modified manually (either through editing files directly or using an interface such as WordPress). They are usually served up via HTTP protocols – this means anyone can access them without having any special privileges like being part of a group who is allowed into restricted areas online; however, there may still exist some limitations depending upon where one lives geographically speaking. +How to +llama_print_timings: load time = 576.45 ms +llama_print_timings: sample time = 283.10 ms / 400 runs ( 0.71 ms per token, 1412.91 tokens per second) +llama_print_timings: prompt eval time = 599.83 ms / 19 tokens ( 31.57 ms per token, 31.68 tokens per second) +llama_print_timings: eval time = 24513.59 ms / 399 runs ( 61.44 ms per token, 16.28 tokens per second) +llama_print_timings: total time = 25431.49 ms ``` And here is another demo of running both LLaMA-7B and [whisper.cpp](https://github.com/ggerganov/whisper.cpp) on a single M1 Pro MacBook: @@ -238,12 +243,17 @@ In order to build llama.cpp you have three different options. cmake --build . --config Release ``` -- Using `Zig`: +- Using `Zig` (version 0.11 or later): + + Building for optimization levels and CPU features can be accomplished using standard build arguments, for example AVX2, FMA, F16C, + it's also possible to cross compile for other operating systems and architectures: ```bash - zig build -Doptimize=ReleaseFast + zig build -Doptimize=ReleaseFast -Dtarget=x86_64-windows-gnu -Dcpu=x86_64+avx2+fma+f16c ``` + The `zig targets` command will give you valid options to use. + - Using `gmake` (FreeBSD): 1. Install and activate [DRM in FreeBSD](https://wiki.freebsd.org/Graphics) @@ -284,7 +294,7 @@ When built with Metal support, you can enable GPU inference with the `--gpu-laye Any value larger than 0 will offload the computation to the GPU. For example: ```bash -./main -m ./models/7B/ggml-model-q4_0.bin -n 128 -ngl 1 +./main -m ./models/7B/ggml-model-q4_0.gguf -n 128 -ngl 1 ``` ### MPI Build @@ -323,7 +333,7 @@ The above will distribute the computation across 2 processes on the first host a Finally, you're ready to run a computation using `mpirun`: ```bash -mpirun -hostfile hostfile -n 3 ./main -m ./models/7B/ggml-model-q4_0.bin -n 128 +mpirun -hostfile hostfile -n 3 ./main -m ./models/7B/ggml-model-q4_0.gguf -n 128 ``` ### BLAS Build @@ -406,10 +416,9 @@ Building the program with BLAS support may lead to some performance improvements ---> | Option | Legal values | Default | Description | |-------------------------|------------------------|---------|-------------| - | LLAMA_CUDA_MMQ_Y | Positive integer >= 32 | 64 | Tile size in y direction when using the custom CUDA kernels for prompt processing. Higher values can be faster depending on the amount of shared memory available. Power of 2 heavily recommended. | | LLAMA_CUDA_FORCE_DMMV | Boolean | false | Force the use of dequantization + matrix vector multiplication kernels instead of using kernels that do matrix vector multiplication on quantized data. By default the decision is made based on compute capability (MMVQ for 6.1/Pascal/GTX 1000 or higher). Does not affect k-quants. | | LLAMA_CUDA_DMMV_X | Positive integer >= 32 | 32 | Number of values in x direction processed by the CUDA dequantization + matrix vector multiplication kernel per iteration. Increasing this value can improve performance on fast GPUs. Power of 2 heavily recommended. Does not affect k-quants. | - | LLAMA_CUDA_MMV_Y | Positive integer | 1 | Block size in y direction for the CUDA mul mat vec kernels. Increasing this value can improve performance on fast GPUs. Power of 2 recommended. Does not affect k-quants. | + | LLAMA_CUDA_MMV_Y | Positive integer | 1 | Block size in y direction for the CUDA mul mat vec kernels. Increasing this value can improve performance on fast GPUs. Power of 2 recommended. | | LLAMA_CUDA_F16 | Boolean | false | If enabled, use half-precision floating point arithmetic for the CUDA dequantization + mul mat vec kernels and for the q4_1 and q5_1 matrix matrix multiplication kernels. Can improve performance on relatively recent GPUs. | | LLAMA_CUDA_KQUANTS_ITER | 1 or 2 | 2 | Number of values processed per iteration and per CUDA thread for Q2_K and Q6_K quantization formats. Setting this value to 1 can improve performance for slow GPUs. | @@ -507,10 +516,10 @@ python3 convert.py models/7B/ python convert.py models/7B/ --vocabtype bpe # quantize the model to 4-bits (using q4_0 method) -./quantize ./models/7B/ggml-model-f16.bin ./models/7B/ggml-model-q4_0.bin q4_0 +./quantize ./models/7B/ggml-model-f16.gguf ./models/7B/ggml-model-q4_0.gguf q4_0 # run the inference -./main -m ./models/7B/ggml-model-q4_0.bin -n 128 +./main -m ./models/7B/ggml-model-q4_0.gguf -n 128 ``` When running the larger models, make sure you have enough disk space to store all the intermediate files. @@ -530,6 +539,8 @@ As the models are currently fully loaded into memory, you will need adequate dis Several quantization methods are supported. They differ in the resulting model disk size and inference speed. +*(outdated)* + | Model | Measure | F16 | Q4_0 | Q4_1 | Q5_0 | Q5_1 | Q8_0 | |------:|--------------|-------:|-------:|-------:|-------:|-------:|-------:| | 7B | perplexity | 5.9066 | 6.1565 | 6.0912 | 5.9862 | 5.9481 | 5.9070 | @@ -566,7 +577,7 @@ Here is an example of a few-shot interaction, invoked with the command ./examples/chat-13B.sh # custom arguments using a 13B model -./main -m ./models/13B/ggml-model-q4_0.bin -n 256 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-bob.txt +./main -m ./models/13B/ggml-model-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r "User:" -f prompts/chat-with-bob.txt ``` Note the use of `--color` to distinguish between user input and generated text. Other parameters are explained in more detail in the [README](examples/main/README.md) for the `main` example program. @@ -592,6 +603,16 @@ PROMPT_TEMPLATE=./prompts/chat-with-bob.txt PROMPT_CACHE_FILE=bob.prompt.bin \ CHAT_SAVE_DIR=./chat/bob ./examples/chat-persistent.sh ``` +### Constrained output with grammars + +`llama.cpp` supports grammars to constrain model output. For example, you can force the model to output JSON only: + +```bash +./main -m ./models/13B/ggml-model-q4_0.gguf -n 256 --grammar-file grammars/json.gbnf -p 'Request: schedule a call at 8pm; Command:' +``` + +The `grammars/` folder contains a handful of sample grammars. To write your own, check out the [GBNF Guide](./grammars/README.md). + ### Instruction mode with Alpaca 1. First, download the `ggml` Alpaca model into the `./models` folder @@ -629,6 +650,8 @@ OpenLLaMA is an openly licensed reproduction of Meta's original LLaMA model. It ### Using [GPT4All](https://github.com/nomic-ai/gpt4all) +*Note: these instructions are likely obsoleted by the GGUF update* + - Obtain the `tokenizer.model` file from LLaMA model and put it to `models` - Obtain the `added_tokens.json` file from Alpaca model and put it to `models` - Obtain the `gpt4all-lora-quantized.bin` file from GPT4All model and put it to `models/gpt4all-7B` @@ -704,7 +727,7 @@ If your issue is with model generation quality, then please at least scan the fo #### How to run 1. Download/extract: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research -2. Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` +2. Run `./perplexity -m models/7B/ggml-model-q4_0.gguf -f wiki.test.raw` 3. Output: ``` perplexity : calculating perplexity over 655 chunks @@ -803,13 +826,13 @@ docker run -v /path/to/models:/models ghcr.io/ggerganov/llama.cpp:full --all-in- On completion, you are ready to play! ```bash -docker run -v /path/to/models:/models ghcr.io/ggerganov/llama.cpp:full --run -m /models/7B/ggml-model-q4_0.bin -p "Building a website can be done in 10 simple steps:" -n 512 +docker run -v /path/to/models:/models ghcr.io/ggerganov/llama.cpp:full --run -m /models/7B/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 512 ``` or with a light image: ```bash -docker run -v /path/to/models:/models ghcr.io/ggerganov/llama.cpp:light -m /models/7B/ggml-model-q4_0.bin -p "Building a website can be done in 10 simple steps:" -n 512 +docker run -v /path/to/models:/models ghcr.io/ggerganov/llama.cpp:light -m /models/7B/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 512 ``` ### Docker With CUDA @@ -840,8 +863,8 @@ The resulting images, are essentially the same as the non-CUDA images: After building locally, Usage is similar to the non-CUDA examples, but you'll need to add the `--gpus` flag. You will also want to use the `--n-gpu-layers` flag. ```bash -docker run --gpus all -v /path/to/models:/models local/llama.cpp:full-cuda --run -m /models/7B/ggml-model-q4_0.bin -p "Building a website can be done in 10 simple steps:" -n 512 --n-gpu-layers 1 -docker run --gpus all -v /path/to/models:/models local/llama.cpp:light-cuda -m /models/7B/ggml-model-q4_0.bin -p "Building a website can be done in 10 simple steps:" -n 512 --n-gpu-layers 1 +docker run --gpus all -v /path/to/models:/models local/llama.cpp:full-cuda --run -m /models/7B/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 512 --n-gpu-layers 1 +docker run --gpus all -v /path/to/models:/models local/llama.cpp:light-cuda -m /models/7B/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 512 --n-gpu-layers 1 ``` ### Contributing @@ -871,3 +894,4 @@ docker run --gpus all -v /path/to/models:/models local/llama.cpp:light-cuda -m / - [BLIS](./docs/BLIS.md) - [Performance troubleshooting](./docs/token_generation_performance_tips.md) - [GGML tips & tricks](https://github.com/ggerganov/llama.cpp/wiki/GGML-Tips-&-Tricks) +- [GBNF grammars](./grammars/README.md) diff --git a/build.zig b/build.zig index 2287d2a2c..f2769ba8c 100644 --- a/build.zig +++ b/build.zig @@ -1,68 +1,121 @@ +// Compatible with Zig Version 0.11.0 const std = @import("std"); -const commit_hash = @embedFile(".git/refs/heads/master"); +const ArrayList = std.ArrayList; +const Compile = std.Build.Step.Compile; +const ConfigHeader = std.Build.Step.ConfigHeader; +const Mode = std.builtin.Mode; +const CrossTarget = std.zig.CrossTarget; -// Zig Version: 0.11.0-dev.3986+e05c242cd -pub fn build(b: *std.build.Builder) void { - const target = b.standardTargetOptions(.{}); - const optimize = b.standardOptimizeOption(.{}); +const Maker = struct { + builder: *std.build.Builder, + target: CrossTarget, + optimize: Mode, + config_header: *ConfigHeader, + enable_lto: bool, - const config_header = b.addConfigHeader( - .{ .style = .blank, .include_path = "build-info.h" }, - .{ - .BUILD_NUMBER = 0, - .BUILD_COMMIT = commit_hash[0 .. commit_hash.len - 1], // omit newline - }, - ); + include_dirs: ArrayList([]const u8), + cflags: ArrayList([]const u8), + cxxflags: ArrayList([]const u8), + objs: ArrayList(*Compile), - const lib = b.addStaticLibrary(.{ - .name = "llama", - .target = target, - .optimize = optimize, - }); - lib.linkLibC(); - lib.linkLibCpp(); - lib.addIncludePath("."); - lib.addIncludePath("./examples"); - lib.addConfigHeader(config_header); - lib.addCSourceFiles(&.{"ggml.c"}, &.{"-std=c11"}); - lib.addCSourceFiles(&.{"llama.cpp"}, &.{"-std=c++11"}); - b.installArtifact(lib); + fn addInclude(m: *Maker, dir: []const u8) !void { + try m.include_dirs.append(dir); + } + fn addProjectInclude(m: *Maker, path: []const []const u8) !void { + try m.addInclude(try m.builder.build_root.join(m.builder.allocator, path)); + } + fn addCFlag(m: *Maker, flag: []const u8) !void { + try m.cflags.append(flag); + } + fn addCxxFlag(m: *Maker, flag: []const u8) !void { + try m.cxxflags.append(flag); + } + fn addFlag(m: *Maker, flag: []const u8) !void { + try m.addCFlag(flag); + try m.addCxxFlag(flag); + } - const examples = .{ - "main", - "baby-llama", - "embedding", - "metal", - "perplexity", - "quantize", - "quantize-stats", - "save-load-state", - "server", - "simple", - "train-text-from-scratch", - }; + fn init(builder: *std.build.Builder) !Maker { + const commit_hash = @embedFile(".git/refs/heads/master"); + const config_header = builder.addConfigHeader( + .{ .style = .blank, .include_path = "build-info.h" }, + .{ + .BUILD_NUMBER = 0, + .BUILD_COMMIT = commit_hash[0 .. commit_hash.len - 1], // omit newline + }, + ); + var m = Maker{ + .builder = builder, + .target = builder.standardTargetOptions(.{}), + .optimize = builder.standardOptimizeOption(.{}), + .config_header = config_header, + .enable_lto = false, + .include_dirs = ArrayList([]const u8).init(builder.allocator), + .cflags = ArrayList([]const u8).init(builder.allocator), + .cxxflags = ArrayList([]const u8).init(builder.allocator), + .objs = ArrayList(*Compile).init(builder.allocator), + }; + try m.addCFlag("-std=c11"); + try m.addCxxFlag("-std=c++11"); + try m.addProjectInclude(&.{}); + try m.addProjectInclude(&.{"examples"}); + return m; + } - inline for (examples) |example_name| { - const exe = b.addExecutable(.{ - .name = example_name, - .target = target, - .optimize = optimize, - }); - exe.addIncludePath("."); - exe.addIncludePath("./examples"); - exe.addConfigHeader(config_header); - exe.addCSourceFiles(&.{ - std.fmt.comptimePrint("examples/{s}/{s}.cpp", .{ example_name, example_name }), - "examples/common.cpp", - }, &.{"-std=c++11"}); - exe.linkLibrary(lib); - b.installArtifact(exe); + fn obj(m: *const Maker, name: []const u8, src: []const u8) *Compile { + const o = m.builder.addObject(.{ .name = name, .target = m.target, .optimize = m.optimize }); + if (std.mem.endsWith(u8, src, ".c")) { + o.addCSourceFiles(&.{src}, m.cflags.items); + o.linkLibC(); + } else { + o.addCSourceFiles(&.{src}, m.cxxflags.items); + o.linkLibCpp(); + } + for (m.include_dirs.items) |i| o.addIncludePath(.{ .path = i }); + o.want_lto = m.enable_lto; + return o; + } - const run_cmd = b.addRunArtifact(exe); - run_cmd.step.dependOn(b.getInstallStep()); - if (b.args) |args| run_cmd.addArgs(args); + fn exe(m: *const Maker, name: []const u8, src: []const u8, deps: []const *Compile) *Compile { + const e = m.builder.addExecutable(.{ .name = name, .target = m.target, .optimize = m.optimize }); + e.addCSourceFiles(&.{src}, m.cxxflags.items); + for (deps) |d| e.addObject(d); + for (m.objs.items) |o| e.addObject(o); + for (m.include_dirs.items) |i| e.addIncludePath(.{ .path = i }); + e.linkLibC(); + e.linkLibCpp(); + e.addConfigHeader(m.config_header); + m.builder.installArtifact(e); + e.want_lto = m.enable_lto; + return e; + } +}; - const run_step = b.step("run-" ++ example_name, "Run the app"); - run_step.dependOn(&run_cmd.step); +pub fn build(b: *std.build.Builder) !void { + var make = try Maker.init(b); + make.enable_lto = b.option(bool, "lto", "Enable LTO optimization, (default: false)") orelse false; + + if (b.option(bool, "k-quants", "Enable K-quants, (default: true)") orelse true) { + try make.addFlag("-DGGML_USE_K_QUANTS"); + const k_quants = make.obj("k_quants", "k_quants.c"); + try make.objs.append(k_quants); + } + + const ggml = make.obj("ggml", "ggml.c"); + const ggml_alloc = make.obj("ggml-alloc", "ggml-alloc.c"); + const llama = make.obj("llama", "llama.cpp"); + const common = make.obj("common", "examples/common.cpp"); + const console = make.obj("common", "examples/console.cpp"); + const grammar_parser = make.obj("grammar-parser", "examples/grammar-parser.cpp"); + + _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, llama, common, console, grammar_parser }); + _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, llama }); + _ = make.exe("perplexity", "examples/perplexity/perplexity.cpp", &.{ ggml, ggml_alloc, llama, common }); + _ = make.exe("embedding", "examples/embedding/embedding.cpp", &.{ ggml, ggml_alloc, llama, common }); + _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, llama }); + + const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, llama, common, grammar_parser }); + if (server.target.isWindows()) { + server.linkSystemLibrary("ws2_32"); } } diff --git a/ci/run.sh b/ci/run.sh old mode 100644 new mode 100755 index 8dc394964..54ba6d710 --- a/ci/run.sh +++ b/ci/run.sh @@ -159,17 +159,17 @@ function gg_run_open_llama_3b_v2 { python3 ../convert.py ${path_models} - model_f16="${path_models}/ggml-model-f16.bin" - model_q8_0="${path_models}/ggml-model-q8_0.bin" - model_q4_0="${path_models}/ggml-model-q4_0.bin" - model_q4_1="${path_models}/ggml-model-q4_1.bin" - model_q5_0="${path_models}/ggml-model-q5_0.bin" - model_q5_1="${path_models}/ggml-model-q5_1.bin" - model_q2_k="${path_models}/ggml-model-q2_k.bin" - model_q3_k="${path_models}/ggml-model-q3_k.bin" - model_q4_k="${path_models}/ggml-model-q4_k.bin" - model_q5_k="${path_models}/ggml-model-q5_k.bin" - model_q6_k="${path_models}/ggml-model-q6_k.bin" + model_f16="${path_models}/ggml-model-f16.gguf" + model_q8_0="${path_models}/ggml-model-q8_0.gguf" + model_q4_0="${path_models}/ggml-model-q4_0.gguf" + model_q4_1="${path_models}/ggml-model-q4_1.gguf" + model_q5_0="${path_models}/ggml-model-q5_0.gguf" + model_q5_1="${path_models}/ggml-model-q5_1.gguf" + model_q2_k="${path_models}/ggml-model-q2_k.gguf" + model_q3_k="${path_models}/ggml-model-q3_k.gguf" + model_q4_k="${path_models}/ggml-model-q4_k.gguf" + model_q5_k="${path_models}/ggml-model-q5_k.gguf" + model_q6_k="${path_models}/ggml-model-q6_k.gguf" wiki_test_60="${path_wiki}/wiki.test-60.raw" @@ -285,17 +285,17 @@ function gg_run_open_llama_7b_v2 { python3 ../convert.py ${path_models} - model_f16="${path_models}/ggml-model-f16.bin" - model_q8_0="${path_models}/ggml-model-q8_0.bin" - model_q4_0="${path_models}/ggml-model-q4_0.bin" - model_q4_1="${path_models}/ggml-model-q4_1.bin" - model_q5_0="${path_models}/ggml-model-q5_0.bin" - model_q5_1="${path_models}/ggml-model-q5_1.bin" - model_q2_k="${path_models}/ggml-model-q2_k.bin" - model_q3_k="${path_models}/ggml-model-q3_k.bin" - model_q4_k="${path_models}/ggml-model-q4_k.bin" - model_q5_k="${path_models}/ggml-model-q5_k.bin" - model_q6_k="${path_models}/ggml-model-q6_k.bin" + model_f16="${path_models}/ggml-model-f16.gguf" + model_q8_0="${path_models}/ggml-model-q8_0.gguf" + model_q4_0="${path_models}/ggml-model-q4_0.gguf" + model_q4_1="${path_models}/ggml-model-q4_1.gguf" + model_q5_0="${path_models}/ggml-model-q5_0.gguf" + model_q5_1="${path_models}/ggml-model-q5_1.gguf" + model_q2_k="${path_models}/ggml-model-q2_k.gguf" + model_q3_k="${path_models}/ggml-model-q3_k.gguf" + model_q4_k="${path_models}/ggml-model-q4_k.gguf" + model_q5_k="${path_models}/ggml-model-q5_k.gguf" + model_q6_k="${path_models}/ggml-model-q6_k.gguf" wiki_test="${path_wiki}/wiki.test.raw" diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt new file mode 100644 index 000000000..dead56118 --- /dev/null +++ b/common/CMakeLists.txt @@ -0,0 +1,20 @@ +# common + +set(TARGET common) + +add_library(${TARGET} OBJECT + common.h + common.cpp + console.h + console.cpp + grammar-parser.h + grammar-parser.cpp + ) + +if (BUILD_SHARED_LIBS) + set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON) +endif() + +target_include_directories(${TARGET} PUBLIC .) +target_compile_features(${TARGET} PUBLIC cxx_std_11) +target_link_libraries(${TARGET} PRIVATE llama) diff --git a/examples/common.cpp b/common/common.cpp similarity index 88% rename from examples/common.cpp rename to common/common.cpp index 21f4a0357..53002ba30 100644 --- a/examples/common.cpp +++ b/common/common.cpp @@ -170,18 +170,6 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } params.n_ctx = std::stoi(argv[i]); - } else if (arg == "-gqa" || arg == "--gqa") { - if (++i >= argc) { - invalid_param = true; - break; - } - params.n_gqa = std::stoi(argv[i]); - } else if (arg == "-eps" || arg == "--rms-norm-eps") { - if (++i >= argc) { - invalid_param = true; - break; - } - params.rms_norm_eps = std::stof(argv[i]); } else if (arg == "--rope-freq-base") { if (++i >= argc) { invalid_param = true; @@ -194,6 +182,12 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } params.rope_freq_scale = std::stof(argv[i]); + } else if (arg == "--rope-scale") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.rope_freq_scale = 1.0f/std::stof(argv[i]); } else if (arg == "--memory-f32") { params.memory_f16 = false; } else if (arg == "--top-p") { @@ -268,6 +262,21 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } params.cfg_negative_prompt = argv[i]; + } else if (arg == "--cfg-negative-prompt-file") { + if (++i >= argc) { + invalid_param = true; + break; + } + std::ifstream file(argv[i]); + if (!file) { + fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); + invalid_param = true; + break; + } + std::copy(std::istreambuf_iterator(file), std::istreambuf_iterator(), back_inserter(params.cfg_negative_prompt)); + if (params.cfg_negative_prompt.back() == '\n') { + params.cfg_negative_prompt.pop_back(); + } } else if (arg == "--cfg-scale") { if (++i >= argc) { invalid_param = true; @@ -280,7 +289,6 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } params.n_batch = std::stoi(argv[i]); - params.n_batch = std::min(512, params.n_batch); } else if (arg == "--keep") { if (++i >= argc) { invalid_param = true; @@ -379,11 +387,11 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { #else fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. It is not possible to set a tensor split.\n"); #endif // GGML_USE_CUBLAS - } else if (arg == "--mul-mat-q" || arg == "-mmq") { + } else if (arg == "--no-mul-mat-q" || arg == "-nommq") { #ifdef GGML_USE_CUBLAS - params.mul_mat_q = true; + params.mul_mat_q = false; #else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. It is not possible to use mul_mat_q kernels.\n"); + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Disabling mul_mat_q kernels has no effect.\n"); #endif // GGML_USE_CUBLAS } else if (arg == "--low-vram" || arg == "-lv") { #ifdef GGML_USE_CUBLAS @@ -409,6 +417,18 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { params.antiprompt.push_back(argv[i]); } else if (arg == "--perplexity") { params.perplexity = true; + } else if (arg == "--ppl-stride") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.ppl_stride = std::stoi(argv[i]); + } else if (arg == "--ppl-output-type") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.ppl_output_type = std::stoi(argv[i]); } else if (arg == "--hellaswag") { params.hellaswag = true; } else if (arg == "--hellaswag-tasks") { @@ -418,7 +438,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { } params.hellaswag_tasks = std::stoi(argv[i]); } else if (arg == "--ignore-eos") { - params.logit_bias[llama_token_eos()] = -INFINITY; + params.ignore_eos = true; } else if (arg == "--no-penalize-nl") { params.penalize_nl = false; } else if (arg == "-l" || arg == "--logit-bias") { @@ -537,11 +557,9 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { fprintf(stdout, " --in-suffix STRING string to suffix after user inputs with (default: empty)\n"); fprintf(stdout, " -f FNAME, --file FNAME\n"); fprintf(stdout, " prompt file to start generation.\n"); - fprintf(stdout, " -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity)\n", params.n_predict); + fprintf(stdout, " -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); fprintf(stdout, " -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); fprintf(stdout, " -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - fprintf(stdout, " -gqa N, --gqa N grouped-query attention factor (TEMP!!! use 8 for LLaMAv2 70B) (default: %d)\n", params.n_gqa); - fprintf(stdout, " -eps N, --rms-norm-eps N rms norm eps (TEMP!!! use 1e-5 for LLaMAv2) (default: %.1e)\n", params.rms_norm_eps); fprintf(stdout, " --top-k N top-k sampling (default: %d, 0 = disabled)\n", params.top_k); fprintf(stdout, " --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)params.top_p); fprintf(stdout, " --tfs N tail free sampling, parameter z (default: %.1f, 1.0 = disabled)\n", (double)params.tfs_z); @@ -561,11 +579,14 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { fprintf(stdout, " or `--logit-bias 15043-1` to decrease likelihood of token ' Hello'\n"); fprintf(stdout, " --grammar GRAMMAR BNF-like grammar to constrain generations (see samples in grammars/ dir)\n"); fprintf(stdout, " --grammar-file FNAME file to read grammar from\n"); - fprintf(stdout, " --cfg-negative-prompt PROMPT \n"); + fprintf(stdout, " --cfg-negative-prompt PROMPT\n"); fprintf(stdout, " negative prompt to use for guidance. (default: empty)\n"); + fprintf(stdout, " --cfg-negative-prompt-file FNAME\n"); + fprintf(stdout, " negative prompt file to use for guidance. (default: empty)\n"); fprintf(stdout, " --cfg-scale N strength of guidance (default: %f, 1.0 = disable)\n", params.cfg_scale); - fprintf(stdout, " --rope-freq-base N RoPE base frequency (default: %.1f)\n", params.rope_freq_base); - fprintf(stdout, " --rope-freq-scale N RoPE frequency scaling factor (default: %g)\n", params.rope_freq_scale); + fprintf(stdout, " --rope-scale N RoPE context linear scaling factor, inverse of --rope-freq-scale (default: %g)\n", 1.0f/params.rope_freq_scale); + fprintf(stdout, " --rope-freq-base N RoPE base frequency, used by NTK-aware scaling (default: %.1f)\n", params.rope_freq_base); + fprintf(stdout, " --rope-freq-scale N RoPE frequency linear scaling factor, inverse of --rope-scale (default: %g)\n", params.rope_freq_scale); fprintf(stdout, " --ignore-eos ignore end of stream token and continue generating (implies --logit-bias 2-inf)\n"); fprintf(stdout, " --no-penalize-nl do not penalize newline token\n"); fprintf(stdout, " --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); @@ -590,11 +611,11 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { fprintf(stdout, " number of layers to store in VRAM\n"); fprintf(stdout, " -ts SPLIT --tensor-split SPLIT\n"); fprintf(stdout, " how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); - fprintf(stdout, " -mg i, --main-gpu i the GPU to use for scratch and small tensors\n" ); - fprintf(stdout, " -lv, --low-vram don't allocate VRAM scratch buffer\n" ); - fprintf(stdout, " -mmq, --mul-mat-q use experimental mul_mat_q CUDA kernels instead of cuBLAS. TEMP!!!\n" ); - fprintf(stdout, " Reduces VRAM usage by 700/970/1430 MiB for 7b/13b/33b but prompt processing speed\n" ); - fprintf(stdout, " is still suboptimal, especially q2_K, q3_K, q5_K, and q6_K.\n" ); + fprintf(stdout, " -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); + fprintf(stdout, " -lv, --low-vram don't allocate VRAM scratch buffer\n"); + fprintf(stdout, " -nommq, --no-mul-mat-q\n"); + fprintf(stdout, " use cuBLAS instead of custom mul_mat_q CUDA kernels.\n"); + fprintf(stdout, " Not recommended since this is both slower and uses more VRAM.\n"); #endif fprintf(stdout, " --mtest compute maximum memory usage\n"); fprintf(stdout, " --export export the computation graph to 'llama.ggml'\n"); @@ -626,24 +647,15 @@ std::string gpt_random_prompt(std::mt19937 & rng) { return "The"; } -// TODO: not great allocating this every time -std::vector llama_tokenize(struct llama_context * ctx, const std::string & text, bool add_bos) { - // initialize to prompt numer of chars, since n_tokens <= n_prompt_chars - std::vector res(text.size() + (int) add_bos); - const int n = llama_tokenize(ctx, text.c_str(), res.data(), res.size(), add_bos); - assert(n >= 0); - res.resize(n); - - return res; -} +// +// Model utils +// struct llama_context_params llama_context_params_from_gpt_params(const gpt_params & params) { auto lparams = llama_context_default_params(); lparams.n_ctx = params.n_ctx; lparams.n_batch = params.n_batch; - lparams.n_gqa = params.n_gqa; - lparams.rms_norm_eps = params.rms_norm_eps; lparams.n_gpu_layers = params.n_gpu_layers; lparams.main_gpu = params.main_gpu; lparams.tensor_split = params.tensor_split; @@ -661,7 +673,7 @@ struct llama_context_params llama_context_params_from_gpt_params(const gpt_param return lparams; } -std::tuple llama_init_from_gpt_params(const gpt_params & params) { +std::tuple llama_init_from_gpt_params(gpt_params & params) { auto lparams = llama_context_params_from_gpt_params(params); llama_model * model = llama_load_model_from_file(params.model.c_str(), lparams); @@ -690,5 +702,45 @@ std::tuple llama_init_from_gpt_par } } + if (params.ignore_eos) { + params.logit_bias[llama_token_eos(lctx)] = -INFINITY; + } + return std::make_tuple(model, lctx); } + +// +// Vocab utils +// + +std::vector llama_tokenize( + struct llama_context * ctx, + const std::string & text, + bool add_bos) { + // upper limit for the number of tokens + int n_tokens = text.length() + add_bos; + std::vector result(n_tokens); + n_tokens = llama_tokenize(ctx, text.c_str(), result.data(), result.size(), add_bos); + if (n_tokens < 0) { + result.resize(-n_tokens); + int check = llama_tokenize(ctx, text.c_str(), result.data(), result.size(), add_bos); + GGML_ASSERT(check == -n_tokens); + } else { + result.resize(n_tokens); + } + return result; +} + +std::string llama_token_to_str(const struct llama_context * ctx, llama_token token) { + std::vector result(8, 0); + const int n_tokens = llama_token_to_str(ctx, token, result.data(), result.size()); + if (n_tokens < 0) { + result.resize(-n_tokens); + int check = llama_token_to_str(ctx, token, result.data(), result.size()); + GGML_ASSERT(check == -n_tokens); + } else { + result.resize(n_tokens); + } + + return std::string(result.data(), result.size()); +} diff --git a/examples/common.h b/common/common.h similarity index 85% rename from examples/common.h rename to common/common.h index 375bc0a3d..17d271e67 100644 --- a/examples/common.h +++ b/common/common.h @@ -22,19 +22,16 @@ struct gpt_params { int32_t n_predict = -1; // new tokens to predict int32_t n_ctx = 512; // context size int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) - int32_t n_gqa = 1; // grouped-query attention factor (TODO: move to hparams) int32_t n_keep = 0; // number of tokens to keep from initial prompt int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) int32_t n_gpu_layers = 0; // number of layers to store in VRAM int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. - float rms_norm_eps = LLAMA_DEFAULT_RMS_EPS; // rms norm epsilon float rope_freq_base = 10000.0f; // RoPE base frequency float rope_freq_scale = 1.0f; // RoPE frequency scaling factor // sampling parameters - std::unordered_map logit_bias; // logit bias for specific tokens int32_t top_k = 40; // <= 0 to use vocab size float top_p = 0.95f; // 1.0 = disabled float tfs_z = 1.00f; // 1.0 = disabled @@ -48,12 +45,14 @@ struct gpt_params { float mirostat_tau = 5.00f; // target entropy float mirostat_eta = 0.10f; // learning rate + std::unordered_map logit_bias; // logit bias for specific tokens + // Classifier-Free Guidance // https://arxiv.org/abs/2306.17806 std::string cfg_negative_prompt; // string to help guidance float cfg_scale = 1.f; // How strong is guidance - std::string model = "models/7B/ggml-model.bin"; // model path + std::string model = "models/7B/ggml-model-f16.gguf"; // model path std::string model_alias = "unknown"; // model alias std::string prompt = ""; std::string path_prompt_cache = ""; // path to file for saving/loading prompt eval state @@ -65,11 +64,15 @@ struct gpt_params { std::string lora_adapter = ""; // lora adapter path std::string lora_base = ""; // base model path for the lora adapter + int ppl_stride = 0; // stride for perplexity calculations. If left at 0, the pre-existing approach will be used. + int ppl_output_type = 0; // = 0 -> ppl output is as usual, = 1 -> ppl output is num_tokens, ppl, one per line + // (which is more convenient to use for plotting) + // bool hellaswag = false; // compute HellaSwag score over random tasks from datafile supplied in prompt size_t hellaswag_tasks = 400; // number of tasks to use when computing the HellaSwag score bool low_vram = false; // if true, reduce VRAM usage at the cost of performance - bool mul_mat_q = false; // if true, use experimental mul_mat_q kernels + bool mul_mat_q = true; // if true, use mul_mat_q kernels instead of cuBLAS bool memory_f16 = true; // use f16 instead of f32 for memory kv bool random_prompt = false; // do not randomize prompt if none provided bool use_color = false; // use color to distinguish generations and inputs @@ -83,6 +86,7 @@ struct gpt_params { bool simple_io = false; // improves compatibility with subprocesses and limited consoles bool input_prefix_bos = false; // prefix BOS to user inputs, preceding input_prefix + bool ignore_eos = false; // ignore generated EOS tokens bool instruct = false; // instruction mode (used for Alpaca models) bool penalize_nl = true; // consider newlines as a repeatable token bool perplexity = false; // compute perplexity over the prompt @@ -100,15 +104,22 @@ void gpt_print_usage(int argc, char ** argv, const gpt_params & params); std::string gpt_random_prompt(std::mt19937 & rng); -// -// Vocab utils -// - -std::vector llama_tokenize(struct llama_context * ctx, const std::string & text, bool add_bos); - // // Model utils // -std::tuple llama_init_from_gpt_params(const gpt_params & params); +std::tuple llama_init_from_gpt_params(gpt_params & params); struct llama_context_params llama_context_params_from_gpt_params(const gpt_params & params); + +// +// Vocab utils +// + +std::vector llama_tokenize( + struct llama_context * ctx, + const std::string & text, + bool add_bos); + +std::string llama_token_to_str( + const struct llama_context * ctx, + llama_token token); diff --git a/examples/console.cpp b/common/console.cpp similarity index 98% rename from examples/console.cpp rename to common/console.cpp index 8966b107f..8efa2a674 100644 --- a/examples/console.cpp +++ b/common/console.cpp @@ -10,6 +10,9 @@ #include #include #include +#ifndef ENABLE_VIRTUAL_TERMINAL_PROCESSING +#define ENABLE_VIRTUAL_TERMINAL_PROCESSING 0x0004 +#endif #else #include #include @@ -68,9 +71,10 @@ namespace console { } } if (hConsole) { - // Enable ANSI colors on Windows 10+ - if (advanced_display && !(dwMode & ENABLE_VIRTUAL_TERMINAL_PROCESSING)) { - SetConsoleMode(hConsole, dwMode | ENABLE_VIRTUAL_TERMINAL_PROCESSING); + // Check conditions combined to reduce nesting + if (advanced_display && !(dwMode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) && + !SetConsoleMode(hConsole, dwMode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)) { + advanced_display = false; } // Set console output codepage to UTF8 SetConsoleOutputCP(CP_UTF8); diff --git a/examples/console.h b/common/console.h similarity index 100% rename from examples/console.h rename to common/console.h diff --git a/examples/grammar-parser.cpp b/common/grammar-parser.cpp similarity index 100% rename from examples/grammar-parser.cpp rename to common/grammar-parser.cpp diff --git a/examples/grammar-parser.h b/common/grammar-parser.h similarity index 100% rename from examples/grammar-parser.h rename to common/grammar-parser.h diff --git a/convert-falcon-hf-to-gguf.py b/convert-falcon-hf-to-gguf.py new file mode 100755 index 000000000..43e208497 --- /dev/null +++ b/convert-falcon-hf-to-gguf.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python3 +# HF falcon--> gguf conversion + +import gguf +import os +import sys +import struct +import json +import numpy as np +import torch + +from typing import Any, List +from pathlib import Path +from transformers import AutoTokenizer + +def bytes_to_unicode(): + # ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py + """ + Returns list of utf-8 byte and a corresponding list of unicode strings. + The reversible bpe codes work on unicode strings. + This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. + When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. + This is a significant percentage of your normal, say, 32K bpe vocab. + To avoid that, we want lookup tables between utf-8 bytes and unicode strings. + And avoids mapping to whitespace/control characters the bpe code barfs on. + """ + bs = list(range(ord("!"), ord("~")+1))+list(range(ord("¡"), ord("¬")+1))+list(range(ord("®"), ord("ÿ")+1)) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8+n) + n += 1 + cs = [chr(n) for n in cs] + return dict(zip(bs, cs)) + + +def count_model_parts(dir_model: str) -> int: + num_parts = 0 + for filename in os.listdir(dir_model): + if filename.startswith("pytorch_model-"): + num_parts += 1 + + if num_parts > 0: + print("gguf: found " + str(num_parts) + " model parts") + return num_parts + + +if len(sys.argv) < 3: + print("Usage: convert-h5-to-ggml.py dir-model ftype\n") + print(" ftype == 0 -> float32") + print(" ftype == 1 -> float16") + sys.exit(1) + + +# output in the same directory as the model +dir_model = sys.argv[1] +last_dir = os.path.basename(os.path.normpath(dir_model)) + +# possible tensor data types +# ftype == 0 -> float32 +# ftype == 1 -> float16 + +# map from ftype to string +ftype_str = ["f32", "f16"] + +ftype = 1 +if len(sys.argv) > 2: + ftype = int(sys.argv[2]) + if ftype < 0 or ftype > 1: + print("Invalid ftype: " + str(ftype)) + + sys.exit(1) + +fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +print("gguf: loading model "+last_dir) + +with open(dir_model + "/config.json", "r", encoding="utf-8") as f: + hparams = json.load(f) + +if hparams["architectures"][0] != "RWForCausalLM": + print("Model architecture not supported: " + hparams["architectures"][0]) + + sys.exit() + +# get number of model parts +num_parts = count_model_parts(dir_model) + +ARCH=gguf.MODEL_ARCH.FALCON +gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) + +print("gguf: get model metadata") + +block_count = hparams["n_layer"] + +gguf_writer.add_name("Falcon") +gguf_writer.add_context_length(2048) # not in config.json +gguf_writer.add_tensor_data_layout("jploski") # qkv tensor transform +gguf_writer.add_embedding_length(hparams["hidden_size"]) +gguf_writer.add_feed_forward_length(4 * hparams["hidden_size"]) +gguf_writer.add_block_count(block_count) +gguf_writer.add_head_count(hparams["n_head"]) +if "n_head_kv" in hparams: + gguf_writer.add_head_count_kv(hparams["n_head_kv"]) +else: + gguf_writer.add_head_count_kv(1) +gguf_writer.add_layer_norm_eps(hparams["layer_norm_epsilon"]) + +# TOKENIZATION + +print("gguf: get tokenizer metadata") + +tokens: List[str] = [] +scores: List[float] = [] +toktypes: List[int] = [] +merges: List[str] = [] + + +if Path(dir_model + "/tokenizer.json").is_file(): + # gpt2 tokenizer + gguf_writer.add_tokenizer_model("gpt2") + + print("gguf: get gpt2 tokenizer merges") + + with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f: + tokenizer_json = json.load(f) + merges = tokenizer_json["model"]["merges"] + + gguf_writer.add_token_merges(merges) + + print("gguf: get gpt2 tokenizer vocab") + + vocab_size = len(tokenizer_json["model"]["vocab"]) + + # ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py + tokenizer = AutoTokenizer.from_pretrained(dir_model) + + reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} + byte_encoder = bytes_to_unicode() + byte_decoder = {v: k for k, v in byte_encoder.items()} + + for i in range(vocab_size): + if i in reverse_vocab: + try: + text = bytearray([byte_decoder[c] for c in reverse_vocab[i]]) + except KeyError: + text = bytearray() + for c in reverse_vocab[i]: + if ord(c) < 256: # single byte character + text.append(byte_decoder[ord(c)]) + else: # multibyte special token character + text.extend(c.encode('utf-8')) + else: + print(f"Key {i} not in tokenizer vocabulary. Padding with an arbitrary token.") + pad_token = f"[PAD{i}]".encode("utf8") + text = bytearray(pad_token) + + tokens.append(text) + scores.append(0.0) # dymmy + toktypes.append(gguf.TokenType.NORMAL) # dummy + + gguf_writer.add_token_list(tokens) + gguf_writer.add_token_scores(scores) + gguf_writer.add_token_types(toktypes) + +print("gguf: get special token ids") +# Look for special tokens in config.json + +if "bos_token_id" in hparams and hparams["bos_token_id"] != None: + gguf_writer.add_bos_token_id(hparams["bos_token_id"]) + +if "eos_token_id" in hparams and hparams["eos_token_id"] != None: + gguf_writer.add_eos_token_id(hparams["eos_token_id"]) + +if "unk_token_id" in hparams and hparams["unk_token_id"] != None: + gguf_writer.add_unk_token_id(hparams["unk_token_id"]) + +if "sep_token_id" in hparams and hparams["sep_token_id"] != None: + gguf_writer.add_sep_token_id(hparams["sep_token_id"]) + +if "pad_token_id" in hparams and hparams["pad_token_id"] != None: + gguf_writer.add_pad_token_id(hparams["pad_token_id"]) + + +# TENSORS + +tensor_map = gguf.get_tensor_name_map(ARCH,block_count) + +# params for qkv transform +n_head = hparams["n_head"] +n_head_kv = hparams["n_head_kv"] if "n_head_kv" in hparams else 1 + +head_dim = hparams["hidden_size"] // n_head + +# tensor info +print("gguf: get tensor metadata") + +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) + +for part_name in part_names: + print("gguf: loading model part '" + part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") + + for name in model_part.keys(): + data = model_part[name] + + old_dtype = data.dtype + + # convert any unsupported data types to float32 + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + # QKV tensor transform + # The original query_key_value tensor contains n_head_kv "kv groups", + # each consisting of n_head/n_head_kv query weights followed by one key + # and one value weight (shared by all query heads in the kv group). + # This layout makes it a big pain to work with in GGML. + # So we rearrange them here,, so that we have n_head query weights + # followed by n_head_kv key weights followed by n_head_kv value weights, + # in contiguous fashion. + # ref: https://github.com/jploski/ggml/blob/falcon40b/examples/falcon/convert-hf-to-ggml.py + + if "query_key_value" in name: + qkv = data.view(n_head_kv, n_head // n_head_kv + 2, head_dim, head_dim * n_head) + q = qkv[:, :-2 ].reshape(n_head * head_dim, head_dim * n_head) + k = qkv[:, [-2]].reshape(n_head_kv * head_dim, head_dim * n_head) + v = qkv[:, [-1]].reshape(n_head_kv * head_dim, head_dim * n_head) + data = torch.cat((q,k,v)).reshape_as(data) + + data = data.squeeze().numpy() + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print("Can not map tensor '" + name + "'") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + + gguf_writer.add_tensor(name, data) + + +print("gguf: write header") +gguf_writer.write_header_to_file() +print("gguf: write metadata") +gguf_writer.write_kv_data_to_file() +print("gguf: write tensors") +gguf_writer.write_tensors_to_file() + +gguf_writer.close() + +print("gguf: model successfully exported to '" + fname_out + "'") +print("") diff --git a/convert-gptneox-hf-to-gguf.py b/convert-gptneox-hf-to-gguf.py new file mode 100755 index 000000000..6eeff5bb1 --- /dev/null +++ b/convert-gptneox-hf-to-gguf.py @@ -0,0 +1,267 @@ +#!/usr/bin/env python3 +# HF gptneox--> gguf conversion + +import gguf +import os +import sys +import struct +import json +import numpy as np +import torch + +from typing import Any, List +from pathlib import Path +from transformers import AutoTokenizer + +# ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py + + +def bytes_to_unicode(): + """ + Returns list of utf-8 byte and a corresponding list of unicode strings. + The reversible bpe codes work on unicode strings. + This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. + When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. + This is a significant percentage of your normal, say, 32K bpe vocab. + To avoid that, we want lookup tables between utf-8 bytes and unicode strings. + And avoids mapping to whitespace/control characters the bpe code barfs on. + """ + bs = list(range(ord("!"), ord("~")+1))+list(range(ord("¡"), ord("¬")+1))+list(range(ord("®"), ord("ÿ")+1)) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8+n) + n += 1 + cs = [chr(n) for n in cs] + return dict(zip(bs, cs)) + + +def count_model_parts(dir_model: str) -> int: + num_parts = 0 + for filename in os.listdir(dir_model): + if filename.startswith("pytorch_model-"): + num_parts += 1 + + if num_parts > 0: + print("gguf: found " + str(num_parts) + " model parts") + return num_parts + + +if len(sys.argv) < 3: + print("Usage: convert-h5-to-ggml.py dir-model ftype\n") + print(" ftype == 0 -> float32") + print(" ftype == 1 -> float16") + sys.exit(1) + + +# output in the same directory as the model +dir_model = sys.argv[1] +last_dir = os.path.basename(os.path.normpath(dir_model)) + +# possible tensor data types +# ftype == 0 -> float32 +# ftype == 1 -> float16 + +# map from ftype to string +ftype_str = ["f32", "f16"] + +ftype = 1 +if len(sys.argv) > 2: + ftype = int(sys.argv[2]) + if ftype < 0 or ftype > 1: + print("Invalid ftype: " + str(ftype)) + + sys.exit(1) + +fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +print("gguf: loading model "+last_dir) + +with open(dir_model + "/config.json", "r", encoding="utf-8") as f: + hparams = json.load(f) + +if hparams["architectures"][0] != "GPTNeoXForCausalLM": + print("Model architecture not supported: " + hparams["architectures"][0]) + + sys.exit() + +# get number of model parts +num_parts = count_model_parts(dir_model) + +ARCH=gguf.MODEL_ARCH.GPTNEOX +gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) + +print("gguf: get model metadata") + +block_count = hparams["num_hidden_layers"] + +gguf_writer.add_name(last_dir) +gguf_writer.add_context_length(hparams["max_position_embeddings"]) +gguf_writer.add_embedding_length(hparams["hidden_size"]) +gguf_writer.add_block_count(block_count) +gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) +gguf_writer.add_rope_dimension_count(int(hparams["rotary_pct"]*(hparams["hidden_size"]//hparams["num_attention_heads"]))) +gguf_writer.add_head_count(hparams["num_attention_heads"]) +gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) +gguf_writer.add_layer_norm_eps(hparams["layer_norm_eps"]) + +# TOKENIZATION + +print("gguf: get tokenizer metadata") + +tokens: List[str] = [] +merges: List[str] = [] + + +if Path(dir_model + "/tokenizer.json").is_file(): + # gpt2 tokenizer + gguf_writer.add_tokenizer_model("gpt2") + + print("gguf: get gpt2 tokenizer merges") + + with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f: + tokenizer_json = json.load(f) + merges = tokenizer_json["model"]["merges"] + + gguf_writer.add_token_merges(merges) + + print("gguf: get gpt2 tokenizer vocab") + + vocab_size = len(tokenizer_json["model"]["vocab"]) + + # ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py + tokenizer = AutoTokenizer.from_pretrained(dir_model) + + reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} + byte_encoder = bytes_to_unicode() + byte_decoder = {v: k for k, v in byte_encoder.items()} + + for i in range(vocab_size): + if i in reverse_vocab: + try: + text = bytearray([byte_decoder[c] for c in reverse_vocab[i]]) + except KeyError: + text = bytearray() + for c in reverse_vocab[i]: + if ord(c) < 256: # single byte character + text.append(byte_decoder[ord(c)]) + else: # multibyte special token character + text.extend(c.encode('utf-8')) + else: + print(f"Key {i} not in tokenizer vocabulary. Padding with an arbitrary token.") + pad_token = f"[PAD{i}]".encode("utf8") + text = bytearray(pad_token) + + tokens.append(text) + + gguf_writer.add_token_list(tokens) + + if "added_tokens" in tokenizer_json and Path(dir_model + "/tokenizer_config.json").is_file(): + print("gguf: get special token ids") + + with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f: + tokenizer_config = json.load(f) + + # find special token ids + + if "bos_token" in tokenizer_config: + for key in tokenizer_json["added_tokens"]: + if key["content"] == tokenizer_config["bos_token"]: + gguf_writer.add_bos_token_id(key["id"]) + + if "eos_token" in tokenizer_config: + for key in tokenizer_json["added_tokens"]: + if key["content"] == tokenizer_config["eos_token"]: + gguf_writer.add_eos_token_id(key["id"]) + + if "unk_token" in tokenizer_config: + for key in tokenizer_json["added_tokens"]: + if key["content"] == tokenizer_config["unk_token"]: + gguf_writer.add_unk_token_id(key["id"]) + + if "sep_token" in tokenizer_config: + for key in tokenizer_json["added_tokens"]: + if key["content"] == tokenizer_config["sep_token"]: + gguf_writer.add_sep_token_id(key["id"]) + + if "pad_token" in tokenizer_config: + for key in tokenizer_json["added_tokens"]: + if key["content"] == tokenizer_config["pad_token"]: + gguf_writer.add_pad_token_id(key["id"]) + + +# TENSORS + +tensor_map = gguf.get_tensor_name_map(ARCH,block_count) + +# tensor info +print("gguf: get tensor metadata") + +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) + +for part_name in part_names: + print("gguf: loading model part '" + part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") + + for name in model_part.keys(): + data = model_part[name] + + # we don't need these + if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): + continue + + old_dtype = data.dtype + + # convert any unsupported data types to float32 + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + data = data.squeeze().numpy() + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print("Can not map tensor '" + name + "'") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + + gguf_writer.add_tensor(name, data) + + +print("gguf: write header") +gguf_writer.write_header_to_file() +print("gguf: write metadata") +gguf_writer.write_kv_data_to_file() +print("gguf: write tensors") +gguf_writer.write_tensors_to_file() + +gguf_writer.close() + +print("gguf: model successfully exported to '" + fname_out + "'") +print("") diff --git a/convert-llama-7b-pth-to-gguf.py b/convert-llama-7b-pth-to-gguf.py new file mode 100755 index 000000000..f103f5f61 --- /dev/null +++ b/convert-llama-7b-pth-to-gguf.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python3 +# 7b pth llama --> gguf conversion +# Only models with a single datafile are supported, like 7B +# HF files required in the model dir: config.json tokenizer_config.json tokenizer.json tokenizer.model + +import gguf +import os +import sys +import struct +import json +import numpy as np +import torch + +from typing import Any, List +from pathlib import Path +from sentencepiece import SentencePieceProcessor + +#NDArray = np.ndarray[Any, Any] +# compatible with python < 3.9 +NDArray: 'TypeAlias' = 'np.ndarray[Any, Any]' + + +def count_model_parts(dir_model: str) -> int: + num_parts = 0 + for filename in os.listdir(dir_model): + if filename.startswith("consolidated."): + num_parts += 1 + + if num_parts > 0: + print("gguf: found " + str(num_parts) + " model parts") + return num_parts + + +if len(sys.argv) < 3: + print("Usage: convert-h5-to-ggml.py dir-model ftype\n") + print(" ftype == 0 -> float32") + print(" ftype == 1 -> float16") + + sys.exit(1) + + +# output in the same directory as the model +dir_model = sys.argv[1] +last_dir = os.path.basename(os.path.normpath(dir_model)) + + +# possible tensor data types +# ftype == 0 -> float32 +# ftype == 1 -> float16 + +# map from ftype to string +ftype_str = ["f32", "f16"] + +ftype = 1 +if len(sys.argv) > 2: + ftype = int(sys.argv[2]) + if ftype < 0 or ftype > 1: + print("Invalid ftype: " + str(ftype)) + + sys.exit(1) + +fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +print("gguf: loading model "+last_dir) + +with open(dir_model + "/config.json", "r", encoding="utf-8") as f: + hparams = json.load(f) + +if hparams["architectures"][0] != "LlamaForCausalLM": + print("Model architecture not supported: " + hparams["architectures"][0]) + sys.exit() + +# get number of model parts +num_parts = count_model_parts(dir_model) + +if num_parts > 1: + print("gguf: Only models with a single datafile are supported.") + + sys.exit() + +ARCH=gguf.MODEL_ARCH.LLAMA +gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) + + +print("gguf: get model metadata") + +block_count = hparams["num_hidden_layers"] +head_count = hparams["num_attention_heads"] + +if "num_key_value_heads" in hparams: + head_count_kv = hparams["num_key_value_heads"] +else: + head_count_kv = head_count + +if "_name_or_path" in hparams: + hf_repo = hparams["_name_or_path"] +else: + hf_repo = "" + +if "max_sequence_length" in hparams: + ctx_length = hparams["max_sequence_length"] +elif "max_position_embeddings" in hparams: + ctx_length = hparams["max_position_embeddings"] +else: + print("gguf: can not find ctx length parameter.") + + sys.exit() + + +gguf_writer.add_name(last_dir) +gguf_writer.add_source_hf_repo(hf_repo) +gguf_writer.add_tensor_data_layout("Meta AI original pth") +gguf_writer.add_context_length(ctx_length) +gguf_writer.add_embedding_length(hparams["hidden_size"]) +gguf_writer.add_block_count(block_count) +gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) +gguf_writer.add_rope_dimension_count(hparams["hidden_size"] // hparams["num_attention_heads"]) +gguf_writer.add_head_count(head_count) +gguf_writer.add_head_count_kv(head_count_kv) +gguf_writer.add_layer_norm_rms_eps(hparams["rms_norm_eps"]) + +if "rope_scaling" in hparams and hparams["rope_scaling"] != None and "factor" in hparams["rope_scaling"]: + if "type" in hparams["rope_scaling"]: + if hparams["rope_scaling"]["type"] == "linear": + gguf_writer.add_rope_scale_linear(hparams["rope_scaling"]["factor"]) + + +# TOKENIZATION + +print("gguf: get tokenizer metadata") + +tokens: List[bytes] = [] +scores: List[float] = [] +toktypes: List[int] = [] + +if Path(dir_model + "/tokenizer.model").is_file(): + # vocab type sentencepiece + print("gguf: get sentencepiece tokenizer vocab and scores") + + tokenizer = SentencePieceProcessor(dir_model + "/tokenizer.model") + + for i in range(tokenizer.vocab_size()): + text: bytes + score: float + + piece = tokenizer.id_to_piece(i) + text = piece.encode("utf-8") + score = tokenizer.get_score(i) + + toktype = 1 # defualt to normal token type + if tokenizer.is_unknown(i): + toktype = 2 + if tokenizer.is_control(i): + toktype = 3 + + # toktype = 4 is user-defined = tokens from added_tokens.json + + if tokenizer.is_unused(i): + toktype = 5 + if tokenizer.is_byte(i): + toktype = 6 + + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + if Path(dir_model + "/added_tokens.json").is_file(): + with open(dir_model + "/added_tokens.json", "r", encoding="utf-8") as f: + addtokens_json = json.load(f) + + print("gguf: get added tokens") + + for key in addtokens_json: + tokens.append( key.encode("utf-8") ) + scores.append(-1000.0) + toktypes.append(4) # user-defined token type + + gguf_writer.add_tokenizer_model("llama") + gguf_writer.add_token_list(tokens) + gguf_writer.add_token_scores(scores) + gguf_writer.add_token_types(toktypes) + + +print("gguf: get special token ids") + +if Path(dir_model + "/tokenizer.json").is_file(): + # Look for special tokens in tokenizer.json if it exists + + with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f: + tokenizer = json.load(f) + + if "added_tokens" in tokenizer and Path(dir_model + "/tokenizer_config.json").is_file(): + + with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f: + tokenizer_config = json.load(f) + + if "bos_token" in tokenizer_config and tokenizer_config["bos_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["bos_token"]["content"]: + gguf_writer.add_bos_token_id(key["id"]) + + if "eos_token" in tokenizer_config and tokenizer_config["eos_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["eos_token"]["content"]: + gguf_writer.add_eos_token_id(key["id"]) + + if "unk_token" in tokenizer_config and tokenizer_config["unk_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["unk_token"]["content"]: + gguf_writer.add_unk_token_id(key["id"]) + + if "sep_token" in tokenizer_config and tokenizer_config["sep_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["sep_token"]["content"]: + gguf_writer.add_sep_token_id(key["id"]) + + if "pad_token" in tokenizer_config and tokenizer_config["pad_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["pad_token"]["content"]: + gguf_writer.add_pad_token_id(key["id"]) +else: + # If no tokenizer.json: Look for special tokens in config.json + + if "bos_token_id" in hparams and hparams["bos_token_id"] != None: + gguf_writer.add_bos_token_id(hparams["bos_token_id"]) + + if "eos_token_id" in hparams and hparams["eos_token_id"] != None: + gguf_writer.add_eos_token_id(hparams["eos_token_id"]) + + if "unk_token_id" in hparams and hparams["unk_token_id"] != None: + gguf_writer.add_unk_token_id(hparams["unk_token_id"]) + + if "sep_token_id" in hparams and hparams["sep_token_id"] != None: + gguf_writer.add_sep_token_id(hparams["sep_token_id"]) + + if "pad_token_id" in hparams and hparams["pad_token_id"] != None: + gguf_writer.add_pad_token_id(hparams["pad_token_id"]) + + +# TENSORS + +tensor_map = gguf.get_tensor_name_map(ARCH,block_count) + +# tensor info +print("gguf: get tensor metadata") + +part_names = (f"consolidated.{n:02}.pth" for n in range(0, num_parts)) + +for part_name in part_names: + print("gguf: loading model part '" + part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") + + for name in model_part.keys(): + data = model_part[name] + + # we don't need these + if name == "rope.freqs": + continue + + old_dtype = data.dtype + + # convert any unsupported data types to float32 + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + data = data.squeeze().numpy() + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print("Can not map tensor '" + name + "'") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + + gguf_writer.add_tensor(name, data) + + +print("gguf: write header") +gguf_writer.write_header_to_file() +print("gguf: write metadata") +gguf_writer.write_kv_data_to_file() +print("gguf: write tensors") +gguf_writer.write_tensors_to_file() + +gguf_writer.close() + + +print("gguf: model successfully exported to '" + fname_out + "'") +print("") diff --git a/convert-llama-ggmlv3-to-gguf.py b/convert-llama-ggmlv3-to-gguf.py new file mode 100755 index 000000000..3bf93627d --- /dev/null +++ b/convert-llama-ggmlv3-to-gguf.py @@ -0,0 +1,345 @@ +#!/usr/bin/env python3 +import sys, struct, math, argparse +from pathlib import Path + +import numpy as np + +import gguf + +# Note: Does not support GGML_QKK_64 +QK_K = 256 +# Items here are (block size, type size) +GGML_QUANT_SIZES = { + gguf.GGMLQuantizationType.F32 : (1, 4), + gguf.GGMLQuantizationType.F16 : (1, 2), + gguf.GGMLQuantizationType.Q4_0 : (32, 2 + 16), + gguf.GGMLQuantizationType.Q4_1 : (32, 2 + 2 + 16), + gguf.GGMLQuantizationType.Q5_0 : (32, 2 + 4 + 16), + gguf.GGMLQuantizationType.Q5_1 : (32, 2 + 2 + 4 + 16), + gguf.GGMLQuantizationType.Q8_0 : (32, 2 + 32), + gguf.GGMLQuantizationType.Q8_1 : (32, 4 + 4 + 32), + gguf.GGMLQuantizationType.Q2_K : (256, 2 + 2 + QK_K // 16 + QK_K // 4), + gguf.GGMLQuantizationType.Q3_K : (256, 2 + QK_K // 4 + QK_K // 8 + 12), + gguf.GGMLQuantizationType.Q4_K : (256, 2 + 2 + QK_K // 2 + 12), + gguf.GGMLQuantizationType.Q5_K : (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12), + gguf.GGMLQuantizationType.Q6_K : (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16), + gguf.GGMLQuantizationType.Q8_K : (256, 4 + QK_K + QK_K // 8), +} + +class Hyperparameters: + def __init__(self): + self.n_vocab = self.n_embd = self.n_mult = self.n_head = self.n_layer = self.n_rot = self.ftype = 0 + self.n_ff = 0 + + def set_n_ff(self, model): + ff_tensor_idx = model.tensor_map.get(b'layers.0.feed_forward.w1.weight') + assert ff_tensor_idx is not None, 'Missing layer 0 FF tensor' + ff_tensor = model.tensors[ff_tensor_idx] + self.n_ff = ff_tensor.dims[1] + + def load(self, data, offset): + ( + self.n_vocab, + self.n_embd, + self.n_mult, + self.n_head, + self.n_layer, + self.n_rot, + self.ftype, + ) = struct.unpack('<7I', data[offset:offset + (4 * 7)]) + return 4 * 7 + + def __str__(self): + return f'' + +class Vocab: + def __init__(self): + self.items = [] + + def load(self, data, offset, n_vocab): + orig_offset = offset + for _ in range(n_vocab): + itemlen = struct.unpack('= 0 and n_dims <= 4, f'Invalid tensor dimensions {n_dims}' + assert name_len < 4096, 'Absurd tensor name length' + quant = GGML_QUANT_SIZES.get(dtype) + assert quant is not None, 'Unknown tensor type' + (blksize, tysize) = quant + offset += 12 + self.dtype= dtype + self.dims = struct.unpack(f'<{n_dims}I', data[offset:offset + (4 * n_dims)]) + offset += 4 * n_dims + self.name = bytes(data[offset:offset + name_len]) + offset += name_len + pad = ((offset + 31) & ~31) - offset + offset += pad + n_elems = np.prod(self.dims) + n_bytes = np.int64(np.int64(n_elems) * np.int64(tysize)) // np.int64(blksize) + self.start_offset = offset + self.len_bytes = n_bytes + offset += n_bytes + # print(n_dims, name_len, dtype, self.dims, self.name, pad) + return offset - orig_offset + +class GGMLV3Model: + def __init__(self): + self.hyperparameters = None + self.vocab = None + self.tensor_map = {} + self.tensors = [] + + def validate_header(self, data, offset): + if bytes(data[offset:offset + 4]) != b'tjgg' or struct.unpack(' 0: + gguf_writer.add_token_types(toktypes) + return + print(f'* Adding {hp.n_vocab} vocab item(s)') + assert len(self.model.vocab.items) >= 3, 'Cannot handle unexpectedly short model vocab' + for (tokid, (vbytes, vscore)) in enumerate(self.model.vocab.items): + tt = 1 # Normal + # Special handling for UNK, BOS, EOS tokens. + if tokid <= 2: + if tokid == 0: + vbytes = b'' + tt = 2 + elif tokid == 1: + vbytes = b'' + tt = 3 + else: + vbytes = b'' + tt = 3 + elif len(vbytes) == 0: + tt = 3 # Control + elif tokid >= 3 and tokid <= 258 and len(vbytes) == 1: + vbytes = bytes(f'<0x{vbytes[0]:02X}>', encoding = 'UTF-8') + tt = 6 # Byte + else: + vbytes = vbytes.replace(b' ', b'\xe2\x96\x81') + toktypes.append(tt) + tokens.append(vbytes) + scores.append(vscore) + gguf_writer.add_token_list(tokens) + gguf_writer.add_token_scores(scores) + gguf_writer.add_token_types(toktypes) + gguf_writer.add_unk_token_id(0) + gguf_writer.add_bos_token_id(1) + gguf_writer.add_eos_token_id(2) + + def add_tensors(self, gguf_writer): + nm = self.name_map + data = self.data + print(f'* Adding {len(self.model.tensors)} tensor(s)') + for tensor in self.model.tensors: + name = str(tensor.name, 'UTF-8') + if name.endswith('.weight'): + name = name[:-7] + suffix = '.weight' + elif name.endswith('.bias'): + name = name[:-5] + suffix = '.bias' + mapped_name = nm.get(name) + assert mapped_name is not None, f'Bad name {name}' + mapped_name += suffix + tempdims = list(tensor.dims[:]) + if len(tempdims) > 1: + temp = tempdims[1] + tempdims[1] = tempdims[0] + tempdims[0] = temp + # print(f'+ {tensor.name} | {mapped_name} {tensor.dims} :: {tempdims}') + gguf_writer.add_tensor(mapped_name, data[tensor.start_offset:tensor.start_offset + tensor.len_bytes], raw_shape = tempdims, raw_dtype = tensor.dtype) + +def handle_metadata(cfg, hp): + import convert + assert cfg.model_metadata_dir.is_dir(), 'Metadata dir is not a directory' + hf_config_path = cfg.model_metadata_dir / "config.json" + orig_config_path = cfg.model_metadata_dir / "params.json" + # We pass a fake model here. "original" mode will check the shapes of some + # tensors if information is missing in the .json file: other than that, the + # model data isn't used so this should be safe (at least for now). + fakemodel = { + 'tok_embeddings.weight': convert.LazyTensor.__new__(convert.LazyTensor), + 'layers.0.feed_forward.w1.weight': convert.LazyTensor.__new__(convert.LazyTensor), + } + fakemodel['tok_embeddings.weight'].shape = [hp.n_vocab] + fakemodel['layers.0.feed_forward.w1.weight'].shape = [hp.n_ff] + if hf_config_path.exists(): + params = convert.Params.loadHFTransformerJson(fakemodel, hf_config_path) + elif orig_config_path.exists(): + params = convert.Params.loadOriginalParamsJson(fakemodel, orig_config_path) + else: + raise ValueError('Unable to load metadata') + vocab = convert.load_vocab(cfg.vocab_dir if cfg.vocab_dir is not None else cfg.model_metadata_dir, cfg.vocabtype) + convert.check_vocab_size(params, vocab) + return (params, vocab) + +def handle_args(): + parser = argparse.ArgumentParser(description = 'Convert GGMLv3 models to GGUF') + parser.add_argument('--input', '-i', type = Path, help = 'Input GGMLv3 filename') + parser.add_argument('--output', '-o', type = Path, help ='Output GGUF filename') + parser.add_argument('--name', help = 'Set model name') + parser.add_argument('--desc', help = 'Set model description') + parser.add_argument('--gqa', type = int, default = 1, help = 'grouped-query attention factor (use 8 for LLaMA2 70B)') + parser.add_argument('--eps', default = '5.0e-06', help = 'RMS norm eps: Use 1e-6 for LLaMA1 and OpenLLaMA, use 1e-5 for LLaMA2') + parser.add_argument('--context-length', '-c', type=int, default = 2048, help = 'Default max context length: LLaMA1 is typically 2048, LLaMA2 is typically 4096') + parser.add_argument('--model-metadata-dir', '-m', type = Path, help ='Load HuggingFace/.pth vocab and metadata from the specified directory') + parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file - only meaningful with --model-metadata-dir") + parser.add_argument("--vocabtype", choices=["spm", "bpe"], help="vocab format - only meaningful with --model-metadata-dir and/or --vocab-dir (default: spm)", default="spm") + return parser.parse_args() + +def main(): + cfg = handle_args() + print(f'* Using config: {cfg}') + print('\n=== WARNING === Be aware that this conversion script is best-effort. Use a native GGUF model if possible. === WARNING ===\n') + data = np.memmap(cfg.input, mode = 'r') + model = GGMLV3Model() + print('* Scanning GGML input file') + offset = model.load(data, 0) + print(f'* GGML model hyperparameters: {model.hyperparameters}') + vocab_override = None + params_override = None + if cfg.model_metadata_dir is not None: + (params_override, vocab_override) = handle_metadata(cfg, model.hyperparameters) + print('!! Note: When overriding params the --gqa, --eps and --context-length options are ignored.') + print(f'* Overriding params: {params_override}') + print(f'* Overriding vocab: {vocab_override}') + else: + print('\n=== WARNING === Special tokens may not be converted correctly. Use --model-metadata-dir if possible === WARNING ===\n') + converter = GGMLToGGUF(model, data, cfg, params_override = params_override, vocab_override = vocab_override) + converter.save() + print(f'* Successful completion. Output saved to: {cfg.output}') + +if __name__ == '__main__': + main() diff --git a/convert-llama-hf-to-gguf.py b/convert-llama-hf-to-gguf.py new file mode 100755 index 000000000..08fde238b --- /dev/null +++ b/convert-llama-hf-to-gguf.py @@ -0,0 +1,328 @@ +#!/usr/bin/env python3 +# HF llama --> gguf conversion + +import gguf +import os +import sys +import struct +import json +import numpy as np +import torch + +from typing import Any, List, Optional +from pathlib import Path +from sentencepiece import SentencePieceProcessor + +#NDArray = np.ndarray[Any, Any] +# compatible with python < 3.9 +NDArray: 'TypeAlias' = 'np.ndarray[Any, Any]' + +# reverse HF permute back to original pth layout +# https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py + + +def reverse_hf_permute(weights: NDArray, n_head: int, n_kv_head: Optional[int] = None) -> NDArray: + if n_kv_head is not None and n_head != n_kv_head: + n_head //= n_kv_head + + return (weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) + .swapaxes(1, 2) + .reshape(weights.shape)) + + +def count_model_parts(dir_model: str) -> int: + num_parts = 0 + + for filename in os.listdir(dir_model): + if filename.startswith("pytorch_model-"): + num_parts += 1 + + if num_parts > 0: + print("gguf: found " + str(num_parts) + " model parts") + + return num_parts + + +if len(sys.argv) < 3: + print("Usage: convert-h5-to-ggml.py dir-model ftype\n") + print(" ftype == 0 -> float32") + print(" ftype == 1 -> float16") + + sys.exit(1) + + +# output in the same directory as the model +dir_model = sys.argv[1] +last_dir = os.path.basename(os.path.normpath(dir_model)) + + +# possible tensor data types +# ftype == 0 -> float32 +# ftype == 1 -> float16 + + +# map from ftype to string +ftype_str = ["f32", "f16"] + +ftype = 1 +if len(sys.argv) > 2: + ftype = int(sys.argv[2]) + if ftype < 0 or ftype > 1: + print("Invalid ftype: " + str(ftype)) + + sys.exit(1) + +fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".gguf" + +print("gguf: loading model "+last_dir) + +with open(dir_model + "/config.json", "r", encoding="utf-8") as f: + hparams = json.load(f) + +if hparams["architectures"][0] != "LlamaForCausalLM": + print("Model architecture not supported: " + hparams["architectures"][0]) + + sys.exit() + +# get number of model parts +num_parts = count_model_parts(dir_model) + +ARCH=gguf.MODEL_ARCH.LLAMA +gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) + +print("gguf: get model metadata") + +block_count = hparams["num_hidden_layers"] +head_count = hparams["num_attention_heads"] + +if "num_key_value_heads" in hparams: + head_count_kv = hparams["num_key_value_heads"] +else: + head_count_kv = head_count + +if "_name_or_path" in hparams: + hf_repo = hparams["_name_or_path"] +else: + hf_repo = "" + +if "max_sequence_length" in hparams: + ctx_length = hparams["max_sequence_length"] +elif "max_position_embeddings" in hparams: + ctx_length = hparams["max_position_embeddings"] +else: + print("gguf: can not find ctx length parameter.") + + sys.exit() + + +gguf_writer.add_name(last_dir) +gguf_writer.add_source_hf_repo(hf_repo) +gguf_writer.add_tensor_data_layout("Meta AI original pth") +gguf_writer.add_context_length(ctx_length) +gguf_writer.add_embedding_length(hparams["hidden_size"]) +gguf_writer.add_block_count(block_count) +gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) +gguf_writer.add_rope_dimension_count(hparams["hidden_size"] // hparams["num_attention_heads"]) +gguf_writer.add_head_count(head_count) +gguf_writer.add_head_count_kv(head_count_kv) +gguf_writer.add_layer_norm_rms_eps(hparams["rms_norm_eps"]) + +if "rope_scaling" in hparams and hparams["rope_scaling"] != None and "factor" in hparams["rope_scaling"]: + if "type" in hparams["rope_scaling"]: + if hparams["rope_scaling"]["type"] == "linear": + gguf_writer.add_rope_scale_linear(hparams["rope_scaling"]["factor"]) + + +# TOKENIZATION + +print("gguf: get tokenizer metadata") + +tokens: List[bytes] = [] +scores: List[float] = [] +toktypes: List[int] = [] + +if Path(dir_model + "/tokenizer.model").is_file(): + # vocab type sentencepiece + print("gguf: get sentencepiece tokenizer vocab, scores and token types") + + tokenizer = SentencePieceProcessor(dir_model + "/tokenizer.model") + + for i in range(tokenizer.vocab_size()): + text: bytes + score: float + + piece = tokenizer.id_to_piece(i) + text = piece.encode("utf-8") + score = tokenizer.get_score(i) + + toktype = 1 # defualt to normal token type + if tokenizer.is_unknown(i): + toktype = 2 + if tokenizer.is_control(i): + toktype = 3 + + # toktype = 4 is user-defined = tokens from added_tokens.json + + if tokenizer.is_unused(i): + toktype = 5 + if tokenizer.is_byte(i): + toktype = 6 + + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + if Path(dir_model + "/added_tokens.json").is_file(): + with open(dir_model + "/added_tokens.json", "r", encoding="utf-8") as f: + addtokens_json = json.load(f) + + print("gguf: get added tokens") + + for key in addtokens_json: + tokens.append( key.encode("utf-8") ) + scores.append(-1000.0) + toktypes.append(4) # user-defined token type + + + gguf_writer.add_tokenizer_model("llama") + gguf_writer.add_token_list(tokens) + gguf_writer.add_token_scores(scores) + gguf_writer.add_token_types(toktypes) + + +print("gguf: get special token ids") + +if Path(dir_model + "/tokenizer.json").is_file(): + # Look for special tokens in tokenizer.json if it exists + + with open(dir_model + "/tokenizer.json", "r", encoding="utf-8") as f: + tokenizer = json.load(f) + + if "added_tokens" in tokenizer and Path(dir_model + "/tokenizer_config.json").is_file(): + + with open(dir_model + "/tokenizer_config.json", "r", encoding="utf-8") as f: + tokenizer_config = json.load(f) + + if "bos_token" in tokenizer_config and tokenizer_config["bos_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["bos_token"]["content"]: + gguf_writer.add_bos_token_id(key["id"]) + + if "eos_token" in tokenizer_config and tokenizer_config["eos_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["eos_token"]["content"]: + gguf_writer.add_eos_token_id(key["id"]) + + if "unk_token" in tokenizer_config and tokenizer_config["unk_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["unk_token"]["content"]: + gguf_writer.add_unk_token_id(key["id"]) + + if "sep_token" in tokenizer_config and tokenizer_config["sep_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["sep_token"]["content"]: + gguf_writer.add_sep_token_id(key["id"]) + + if "pad_token" in tokenizer_config and tokenizer_config["pad_token"] != None: + for key in tokenizer["added_tokens"]: + if key["content"] == tokenizer_config["pad_token"]["content"]: + gguf_writer.add_pad_token_id(key["id"]) +else: + # If no tokenizer.json: Look for special tokens in config.json + + if "bos_token_id" in hparams and hparams["bos_token_id"] != None: + gguf_writer.add_bos_token_id(hparams["bos_token_id"]) + + if "eos_token_id" in hparams and hparams["eos_token_id"] != None: + gguf_writer.add_eos_token_id(hparams["eos_token_id"]) + + if "unk_token_id" in hparams and hparams["unk_token_id"] != None: + gguf_writer.add_unk_token_id(hparams["unk_token_id"]) + + if "sep_token_id" in hparams and hparams["sep_token_id"] != None: + gguf_writer.add_sep_token_id(hparams["sep_token_id"]) + + if "pad_token_id" in hparams and hparams["pad_token_id"] != None: + gguf_writer.add_pad_token_id(hparams["pad_token_id"]) + + +# TENSORS + +tensor_map = gguf.get_tensor_name_map(ARCH,block_count) + +# tensor info +print("gguf: get tensor metadata") + +if num_parts == 0: + part_names = ("pytorch_model.bin",) +else: + part_names = ( + f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) + ) + +for part_name in part_names: + print("gguf: loading model part '" + part_name + "'") + model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") + + for name in model_part.keys(): + data = model_part[name] + + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue + + old_dtype = data.dtype + + # convert any unsupported data types to float32 + if data.dtype != torch.float16 and data.dtype != torch.float32: + data = data.to(torch.float32) + + data = data.squeeze().numpy() + + # reverse permute these + if name.endswith(".q_proj.weight"): + data = reverse_hf_permute(data, head_count) + if name.endswith(".k_proj.weight"): + data = reverse_hf_permute(data, head_count, head_count_kv) + + # map tensor names + if name.endswith(".weight") and name[:-7] in tensor_map: + name = tensor_map[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tensor_map: + name = tensor_map[name[:-5]] + ".bias" + else: + print("Can not map tensor '" + name + "'") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) + + gguf_writer.add_tensor(name, data) + + +print("gguf: write header") +gguf_writer.write_header_to_file() +print("gguf: write metadata") +gguf_writer.write_kv_data_to_file() +print("gguf: write tensors") +gguf_writer.write_tensors_to_file() + +gguf_writer.close() + + +print("gguf: model successfully exported to '" + fname_out + "'") +print("") diff --git a/convert-lora-to-ggml.py b/convert-lora-to-ggml.py index b4999ff5a..a94a7d0af 100755 --- a/convert-lora-to-ggml.py +++ b/convert-lora-to-ggml.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import json import os import re @@ -6,23 +6,22 @@ import struct import sys from typing import Any, Dict, Sequence, TextIO +import numpy as np import torch -from convert import DATA_TYPE_TO_FTYPE, NUMPY_TYPE_TO_DATA_TYPE, DataType +NUMPY_TYPE_TO_FTYPE: Dict[str, int] = {"float32": 0, "float16": 1} + HF_SUBLAYER_TO_GGML = { - "self_attn.q_proj": "attention.wq", - "self_attn.k_proj": "attention.wk", - "self_attn.v_proj": "attention.wv", - "self_attn.o_proj": "attention.wo", - "mlp.gate_proj": "feed_forward.w1", - "mlp.down_proj": "feed_forward.w2", - "mlp.up_proj": "feed_forward.w3", - "input_layernorm": "attention_norm", + "self_attn.q_proj": "attn_q", + "self_attn.k_proj": "attn_k", + "self_attn.v_proj": "attn_v", + "self_attn.o_proj": "attn_output", + "mlp.gate_proj": "ffn_gate", + "mlp.down_proj": "ffn_down", + "mlp.up_proj": "ffn_up", + "input_layernorm": "attn_norm", "post_attention_layernorm": "ffn_norm", - # "norm": "norm", - # "embed_tokens": "tok_embeddings", - # "lm_head": "output", } @@ -39,7 +38,7 @@ def translate_tensor_name(t: str) -> str: sys.exit(1) output_string = ( - f"layers.{nn}.{HF_SUBLAYER_TO_GGML[sub_layer]}.weight.lora{lora_type}" + f"blk.{nn}.{HF_SUBLAYER_TO_GGML[sub_layer]}.weight.lora{lora_type}" ) return output_string else: @@ -54,12 +53,14 @@ def write_file_header(fout: TextIO, params: Dict[str, Any]) -> None: # https://opendelta.readthedocs.io/en/latest/modules/deltas.html says that `lora_alpha` is an int # but some models ship a float value instead # let's convert to int, but fail if lossless conversion is not possible - assert int(params["lora_alpha"]) == params["lora_alpha"], "cannot convert float to int losslessly" + assert ( + int(params["lora_alpha"]) == params["lora_alpha"] + ), "cannot convert float to int losslessly" fout.write(struct.pack("i", int(params["lora_alpha"]))) def write_tensor_header( - self, name: str, shape: Sequence[int], data_type: DataType + self, name: str, shape: Sequence[int], data_type: np.dtype ) -> None: sname = name.encode("utf-8") fout.write( @@ -67,7 +68,7 @@ def write_tensor_header( "iii", len(shape), len(sname), - DATA_TYPE_TO_FTYPE[NUMPY_TYPE_TO_DATA_TYPE[data_type]], + NUMPY_TYPE_TO_FTYPE[data_type.name], ) ) fout.write(struct.pack("i" * len(shape), *shape[::-1])) diff --git a/convert-pth-to-ggml.py b/convert-pth-to-ggml.py deleted file mode 100644 index dd15393c3..000000000 --- a/convert-pth-to-ggml.py +++ /dev/null @@ -1,13 +0,0 @@ -# Compatibility stub - -import argparse - -import convert - -parser = argparse.ArgumentParser( - description="""[DEPRECATED - use `convert.py` instead] - Convert a LLaMA model checkpoint to a ggml compatible file""") -parser.add_argument('dir_model', help='directory containing the model checkpoint') -parser.add_argument('ftype', help='file type (0: float32, 1: float16)', type=int, choices=[0, 1], default=1) -args = parser.parse_args() -convert.main(['--outtype', 'f16' if args.ftype == 1 else 'f32', '--', args.dir_model]) diff --git a/convert.py b/convert.py old mode 100644 new mode 100755 index f3bf17980..8d34d5f29 --- a/convert.py +++ b/convert.py @@ -1,4 +1,6 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 + +import gguf import argparse import concurrent.futures import copy @@ -16,13 +18,12 @@ import signal import struct import sys import zipfile +import numpy as np + from abc import ABCMeta, abstractmethod from dataclasses import dataclass from pathlib import Path -from typing import (IO, TYPE_CHECKING, Any, Callable, Dict, Iterable, List, - Literal, Optional, Sequence, Tuple, TypeVar, Union) - -import numpy as np +from typing import (IO, TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Literal, Optional, Sequence, Tuple, TypeVar, Union) from sentencepiece import SentencePieceProcessor # type: ignore if TYPE_CHECKING: @@ -33,57 +34,47 @@ if hasattr(faulthandler, 'register') and hasattr(signal, 'SIGUSR1'): NDArray: 'TypeAlias' = 'np.ndarray[Any, Any]' +ARCH=gguf.MODEL_ARCH.LLAMA +NAMES=gguf.MODEL_TENSOR_NAMES[ARCH] + +# +# data types +# @dataclass(frozen=True) class UnquantizedDataType: name: str - -DT_F16 = UnquantizedDataType('F16') -DT_F32 = UnquantizedDataType('F32') -DT_I32 = UnquantizedDataType('I32') +DT_F16 = UnquantizedDataType('F16') +DT_F32 = UnquantizedDataType('F32') +DT_I32 = UnquantizedDataType('I32') DT_BF16 = UnquantizedDataType('BF16') - -@dataclass(frozen=True) -class QuantizedDataType: - groupsize: int - have_addends: bool - have_g_idx: bool - - -DT_Q4_0 = QuantizedDataType(groupsize=32, have_addends=False, have_g_idx=False) -DT_Q4_1 = QuantizedDataType(groupsize=32, have_addends=True, have_g_idx=False) - -DataType = Union[UnquantizedDataType, QuantizedDataType] - -DATA_TYPE_TO_FTYPE: Dict[DataType, int] = { - DT_F32: 0, - DT_F16: 1, - DT_Q4_0: 2, - DT_Q4_1: 3, -} - -FTYPE_TO_DATA_TYPE: Dict[int, DataType] = \ - {ftype: dtype for (dtype, ftype) in DATA_TYPE_TO_FTYPE.items()} +DataType = Union[UnquantizedDataType] DATA_TYPE_TO_NUMPY: Dict[DataType, 'np.dtype[Any]'] = { DT_BF16: np.dtype(np.uint16), - DT_F16: np.dtype(np.float16), - DT_F32: np.dtype(np.float32), - DT_I32: np.dtype(np.int32), + DT_F16: np.dtype(np.float16), + DT_F32: np.dtype(np.float32), + DT_I32: np.dtype(np.int32), } NUMPY_TYPE_TO_DATA_TYPE: Dict['np.dtype[Any]', DataType] = \ {dtype: data_type for (data_type, dtype) in DATA_TYPE_TO_NUMPY.items()} +SAFETENSORS_DATA_TYPES: Dict[str, DataType] = { + 'BF16': DT_BF16, + 'F16': DT_F16, + 'F32': DT_F32, + 'I32': DT_I32, +} -class GGMLFileType(enum.Enum): - AllF32 = 0 +# TODO: match this with `llama_ftype` +# TODO: rename to LLAMAFileType +# TODO: move to `gguf.py` +class GGMLFileType(enum.IntEnum): + AllF32 = 0 MostlyF16 = 1 # except 1d tensors - MostlyQ4_0 = 2 # except 1d tensors - MostlyQ4_1 = 3 # except 1d tensors - PerLayerIsQ4_1 = 4 # but tok_embeddings.weight and output.weight are F16 def type_for_tensor(self, name: str, tensor: 'LazyTensor') -> DataType: if len(tensor.shape) == 1: @@ -93,60 +84,36 @@ class GGMLFileType(enum.Enum): return DT_F32 elif self == GGMLFileType.MostlyF16: return DT_F16 - elif self == GGMLFileType.MostlyQ4_0: - return DT_Q4_0 - elif self == GGMLFileType.MostlyQ4_1: - return DT_Q4_1 - elif self == GGMLFileType.PerLayerIsQ4_1: - if name in ('output.weight', 'tok_embeddings.weight'): - return DT_F16 - else: - return DT_Q4_1 else: raise ValueError(self) -def make_tensors_list() -> List[str]: - ret = [ - 'tok_embeddings.weight', - 'norm.weight', - 'output.weight', - ] - for i in range(80): # maximum number of layer - ret += [ - f'layers.{i}.attention.wq.weight', - f'layers.{i}.attention.wk.weight', - f'layers.{i}.attention.wv.weight', - f'layers.{i}.attention.wo.weight', - f'layers.{i}.attention_norm.weight', - f'layers.{i}.feed_forward.w1.weight', - f'layers.{i}.feed_forward.w2.weight', - f'layers.{i}.feed_forward.w3.weight', - f'layers.{i}.ffn_norm.weight', - ] - return ret - - -TENSORS_LIST = make_tensors_list() -TENSORS_SET = set(TENSORS_LIST) - - -def find_n_mult(n_ff: int, n_embd: int) -> int: - # hardcoded magic range - for n_mult in range(8192, 1, -1): - calc_ff = (((8*n_embd) // 3 + n_mult - 1) // n_mult)*n_mult - if calc_ff == n_ff: - return n_mult - raise Exception(f"failed to find n_mult for (n_ff={n_ff}, n_embd={n_embd}).") +# +# hparams loading +# @dataclass class Params: - n_vocab: int - n_embd: int - n_mult: int - n_head: int - n_layer: int - n_kv_head: Optional[int] # This parameter is only used for Llama 2 + n_vocab: int + n_embd: int + n_mult: int + n_layer: int + n_ctx: int + n_ff: int + n_head: int + n_head_kv: int + f_norm_eps: float + + ftype: Optional[GGMLFileType] = None + + @staticmethod + def find_n_mult(n_ff: int, n_embd: int) -> int: + # hardcoded magic range + for n_mult in range(8192, 1, -1): + calc_ff = (((8*n_embd) // 3 + n_mult - 1) // n_mult)*n_mult + if calc_ff == n_ff: + return n_mult + raise Exception(f"failed to find n_mult for (n_ff={n_ff}, n_embd={n_embd}).") @staticmethod def guessed(model: 'LazyModel') -> 'Params': @@ -165,37 +132,57 @@ class Params: raise Exception("failed to guess 'n_layer'. This model is unknown or unsupported.\n" "Suggestion: provide 'config.json' of the model in the same directory containing model files.") - n_head=n_embd // 128 # guessed + n_head = n_embd // 128 # guessed + n_mult = 256 # guessed + + # TODO: verify this + n_ff = int(2 * (4 * n_embd) / 3) + n_ff = n_mult * ((n_ff + n_mult - 1) // n_mult) return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_mult = 256, - n_head = n_head, - n_layer = n_layer, - n_kv_head = None, + n_vocab = n_vocab, + n_embd = n_embd, + n_mult = n_mult, + n_layer = n_layer, + n_ctx = -1, + n_ff = n_ff, + n_head = n_head, + n_head_kv = n_head, + f_norm_eps = 1e-5, ) @staticmethod def loadHFTransformerJson(model: 'LazyModel', config_path: 'Path') -> 'Params': config = json.load(open(config_path)) - n_vocab = config["vocab_size"]; - n_embd = config["hidden_size"]; - n_head = config["num_attention_heads"]; - n_layer = config["num_hidden_layers"]; - n_ff = config["intermediate_size"]; - n_kv_head = config.get("num_key_value_heads") + n_vocab = config["vocab_size"] + n_embd = config["hidden_size"] + n_layer = config["num_hidden_layers"] + n_ff = config["intermediate_size"] + n_head = config["num_attention_heads"] + n_head_kv = config["num_key_value_heads"] if "num_key_value_heads" in config else n_head + f_norm_eps = config["rms_norm_eps"] - n_mult = find_n_mult(n_ff, n_embd); + n_mult = Params.find_n_mult(n_ff, n_embd) + + if "max_sequence_length" in config: + n_ctx = config["max_sequence_length"] + elif "max_position_embeddings" in config: + n_ctx = config["max_position_embeddings"] + else: + raise Exception("failed to guess 'n_ctx'. This model is unknown or unsupported.\n" + "Suggestion: provide 'config.json' of the model in the same directory containing model files.") return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_mult = n_mult, - n_head = n_head, - n_layer = n_layer, - n_kv_head = n_kv_head, + n_vocab = n_vocab, + n_embd = n_embd, + n_mult = n_mult, + n_layer = n_layer, + n_ctx = n_ctx, + n_ff = n_ff, + n_head = n_head, + n_head_kv = n_head_kv, + f_norm_eps = f_norm_eps, ) # LLaMA v2 70B params.json @@ -204,22 +191,32 @@ class Params: def loadOriginalParamsJson(model: 'LazyModel', config_path: 'Path') -> 'Params': config = json.load(open(config_path)) - n_vocab = config["vocab_size"]; - n_embd = config["dim"]; - n_head = config["n_heads"]; - n_layer = config["n_layers"]; - n_mult = config["multiple_of"]; + n_vocab = config["vocab_size"] + n_embd = config["dim"] + n_layer = config["n_layers"] + n_mult = config["multiple_of"] + n_ctx = 2048 if config["norm_eps"] == 1e-06 else 4096 # hack to determine LLaMA v1 vs v2 + n_ff = -1 + n_head = config["n_heads"] + n_head_kv = config["n_kv_heads"] if "n_kv_heads" in config else n_head + f_norm_eps = config["norm_eps"] if n_vocab == -1: n_vocab = model["tok_embeddings.weight"].shape[0] + if n_ff == -1: + n_ff = model["layers.0.feed_forward.w1.weight"].shape[0] + return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_mult = n_mult, - n_head = n_head, - n_layer = n_layer, - n_kv_head = None, + n_vocab = n_vocab, + n_embd = n_embd, + n_mult = n_mult, + n_layer = n_layer, + n_ctx = n_ctx, + n_ff = n_ff, + n_head = n_head, + n_head_kv = n_head_kv, + f_norm_eps = f_norm_eps, ) @staticmethod @@ -234,30 +231,73 @@ class Params: else: params = Params.guessed(model_plus.model) - print(f'params: n_vocab:{params.n_vocab} n_embd:{params.n_embd} n_mult:{params.n_mult} n_head:{params.n_head} n_layer:{params.n_layer}') return params -class SentencePieceVocab: - def __init__(self, fname_tokenizer: Path, fname_added_tokens: Optional[Path], vocabtype: Optional[str]) -> None: - self.vocabtype = vocabtype - if self.vocabtype == "bpe": - self.sentencepiece_tokenizer = json.loads(open(str(fname_tokenizer)).read()) - else: - self.sentencepiece_tokenizer = SentencePieceProcessor(str(fname_tokenizer)) +# +# vocab +# + +class BpeVocab: + def __init__(self, fname_tokenizer: Path, fname_added_tokens: Optional[Path]) -> None: + self.bpe_tokenizer = json.loads(open(str(fname_tokenizer), encoding="utf-8").read()) added_tokens: Dict[str, int] if fname_added_tokens is not None: - added_tokens = json.load(open(fname_added_tokens)) + added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) else: added_tokens = {} - if self.vocabtype == "bpe": - vocab_size: int = len(self.sentencepiece_tokenizer) - else: - vocab_size: int = self.sentencepiece_tokenizer.vocab_size() - expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) - actual_ids = sorted(added_tokens.values()) + + vocab_size: int = len(self.bpe_tokenizer) + expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) + actual_ids = sorted(added_tokens.values()) if expected_ids != actual_ids: raise Exception(f"Expected added token IDs to be sequential and start at {len(added_tokens)}; got {actual_ids}") + + items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) + self.added_tokens_list = [text for (text, idx) in items] + self.vocab_size_base: int = vocab_size + self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_list) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens + + def bpe_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: + tokenizer = self.bpe_tokenizer + from transformers.models.gpt2 import tokenization_gpt2 + byte_encoder = tokenization_gpt2.bytes_to_unicode() + byte_decoder = {v: k for k, v in byte_encoder.items()} + for i, item in enumerate(tokenizer): + text: bytes = item.encode("utf-8") + score: float = -i + yield text, score, gguf.TokenType.USER_DEFINED + + def added_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: + for text in self.added_tokens_list: + score = -1000.0 + yield text.encode("utf-8"), score, gguf.TokenType.USER_DEFINED + + def all_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: + yield from self.bpe_tokens() + yield from self.added_tokens() + + def __repr__(self) -> str: + return f"BpeVocab with {self.vocab_size_base} base tokens and {len(self.added_tokens_list)} added tokens>" + + +class SentencePieceVocab: + def __init__(self, fname_tokenizer: Path, fname_added_tokens: Optional[Path]) -> None: + self.sentencepiece_tokenizer = SentencePieceProcessor(str(fname_tokenizer)) + added_tokens: Dict[str, int] + if fname_added_tokens is not None: + added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) + else: + added_tokens = {} + + vocab_size: int = self.sentencepiece_tokenizer.vocab_size() + expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) + actual_ids = sorted(added_tokens.values()) + if expected_ids != actual_ids: + raise Exception(f"Expected added token IDs to be sequential and start at {len(added_tokens)}; got {actual_ids}") + items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) self.added_tokens_list = [text for (text, idx) in items] self.vocab_size_base: int = vocab_size @@ -265,117 +305,66 @@ class SentencePieceVocab: self.fname_tokenizer = fname_tokenizer self.fname_added_tokens = fname_added_tokens - def sentencepiece_tokens(self) -> Iterable[Tuple[bytes, float]]: + def sentencepiece_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: tokenizer = self.sentencepiece_tokenizer - if self.vocabtype == "bpe": - from transformers.models.gpt2 import tokenization_gpt2 - byte_encoder = tokenization_gpt2.bytes_to_unicode() - byte_decoder = {v: k for k, v in byte_encoder.items()} - for i, item in enumerate(tokenizer): - text: bytes - text = b''.join([x.to_bytes(1, byteorder='big') for x in [byte_decoder[y] for y in item]]) - score: float = -i - yield text, score - else: - for i in range(tokenizer.vocab_size()): - text: bytes - if tokenizer.is_unknown(i): - text = " \u2047 ".encode("utf-8") - elif tokenizer.is_control(i): - text = b"" - elif tokenizer.is_byte(i): - piece = tokenizer.id_to_piece(i) - if len(piece) != 6: - raise Exception(f"Invalid token: {piece}") - byte_value = int(piece[3:-1], 16) - text = struct.pack("B", byte_value) - else: - text = tokenizer.id_to_piece(i).replace("\u2581", " ").encode("utf-8") - score: float = tokenizer.get_score(i) - yield text, score + for i in range(tokenizer.vocab_size()): + piece = tokenizer.id_to_piece(i) + text: bytes = piece.encode("utf-8") + score: float = tokenizer.get_score(i) - def added_tokens(self) -> Iterable[Tuple[bytes, float]]: + toktype = gguf.TokenType.NORMAL + if tokenizer.is_unknown(i): + toktype = gguf.TokenType.UNKNOWN + if tokenizer.is_control(i): + toktype = gguf.TokenType.CONTROL + + # NOTE: I think added_tokens are user defined. + # ref: https://github.com/google/sentencepiece/blob/master/src/sentencepiece_model.proto + # if tokenizer.is_user_defined(i): toktype = gguf.TokenType.USER_DEFINED + + if tokenizer.is_unused(i): + toktype = gguf.TokenType.UNUSED + if tokenizer.is_byte(i): + toktype = gguf.TokenType.BYTE + + yield text, score, toktype + + def added_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: for text in self.added_tokens_list: score = -1000.0 - yield text.encode("utf-8"), score + yield text.encode("utf-8"), score, gguf.TokenType.USER_DEFINED - def all_tokens(self) -> Iterable[Tuple[bytes, float]]: + def all_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: yield from self.sentencepiece_tokens() yield from self.added_tokens() def __repr__(self) -> str: return f"" - -class GGMLVocab: - def __init__(self, tokens: List[Tuple[bytes, float]]): - self.tokens = tokens - self.vocab_size = len(tokens) - - def all_tokens(self) -> Iterable[Tuple[bytes, float]]: - return self.tokens - - def __repr__(self) -> str: - return f"" +Vocab = Union[BpeVocab, SentencePieceVocab] -Vocab = Union[SentencePieceVocab, GGMLVocab] +# +# data loading +# TODO: reuse (probably move to gguf.py?) +# - -def permute(weights: NDArray, n_head: int, n_kv_head: Optional[int] = None) -> NDArray: - if n_kv_head is not None and n_head != n_kv_head: - n_head //= n_kv_head +def permute(weights: NDArray, n_head: int, n_head_kv: int) -> NDArray: + #print( "permute debug " + str(weights.shape[0]) + " x " + str(weights.shape[1]) + " nhead " + str(n_head) + " nheadkv " + str(n_kv_head) ) + if n_head_kv is not None and n_head != n_head_kv: + n_head //= n_head_kv return (weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) .swapaxes(1, 2) .reshape(weights.shape)) -def dequantize_q4(qvalues_pack32: NDArray, scales: NDArray, addends: Optional[NDArray], g_idx: Optional[NDArray]) -> NDArray: - # First reinterpret each row from a list of int32s containing 8 values each - # to a list of uint8s containing 2 values each. - qvalues_pack8 = qvalues_pack32.view(np.uint8) - - # Then split out the two values per int8 (which requires an actual - # conversion because numpy doesn't natively support int4s). - qvalues = np.zeros([qvalues_pack8.shape[0], qvalues_pack8.shape[1] * 2], dtype=np.uint8) - qvalues[:, 0::2] = qvalues_pack8 & 0xf - qvalues[:, 1::2] = qvalues_pack8 >> 4 - - assert addends is None or addends.shape == scales.shape - assert qvalues.shape[0] == scales.shape[0] - assert qvalues.shape[1] % scales.shape[1] == 0 - if g_idx is None: - repeat_count = qvalues.shape[1] // scales.shape[1] - scales = scales[:, :, np.newaxis] - if addends is not None: - addends = addends[:, :, np.newaxis] - # Reshape so that the below computation broadcasts over scales and addends: - qvalues.shape = (qvalues.shape[0], scales.shape[1], int(repeat_count)) - else: - # In this case the scale and addend is selected for each column by g_idx: - assert addends is not None - scales = scales[:, g_idx] - addends = addends[:, g_idx] - if addends is None: - # Q4_0 - qvalues = qvalues.view(np.int8) - qvalues -= 8 - # And do the actual 'value = scale * qvalue + addend' computation. - values = scales * qvalues - if addends is not None: - values += addends - if g_idx is None: - values.shape = (values.shape[0], values.shape[1] * values.shape[2]) - return values - - class Tensor(metaclass=ABCMeta): data_type: DataType @abstractmethod def astype(self, data_type: DataType) -> 'Tensor': ... @abstractmethod - def permute(self, n_head: int, n_kv_head: Optional[int] = None) -> 'Tensor': ... + def permute(self, n_head: int, n_head_kv: int) -> 'Tensor': ... @abstractmethod def permute_part(self, n_part: int, n_head: int) -> 'UnquantizedTensor': ... @abstractmethod @@ -413,8 +402,8 @@ class UnquantizedTensor(Tensor): r = self.ndarray.shape[0] // 3 return UnquantizedTensor(self.ndarray[r * n_part : r * n_part + r, ...]) - def permute(self, n_head: int, n_kv_head: Optional[int] = None) -> 'UnquantizedTensor': - return UnquantizedTensor(permute(self.ndarray, n_head, n_kv_head)) + def permute(self, n_head: int, n_head_kv: int) -> 'UnquantizedTensor': + return UnquantizedTensor(permute(self.ndarray, n_head, n_head_kv)) def load_unquantized(lazy_tensor: 'LazyTensor', expected_dtype: Any = None, convert: bool = False) -> NDArray: @@ -433,183 +422,25 @@ def load_unquantized(lazy_tensor: 'LazyTensor', expected_dtype: Any = None, conv return tensor.ndarray -class GGMLQuantizedTensor(Tensor): - data_type: QuantizedDataType - - def __init__(self, ndarray: NDArray, shape: List[int], data_type: DataType) -> None: - rows, columns = shape - assert data_type in (DT_Q4_1, DT_Q4_0) # for now - assert isinstance(data_type, QuantizedDataType) # redundant, but mypy complains without this - assert columns % data_type.groupsize == 0 - words_in_block = 6 if data_type == DT_Q4_1 else 5 - self.ndarray = ndarray.view(dtype=np.uint32).reshape((rows, columns // data_type.groupsize, words_in_block)) - self.shape = shape[:] - self.data_type = data_type - - def astype(self, data_type: DataType) -> Tensor: - if data_type == self.data_type: - return self - scales = self.ndarray[:, :, 0].view(np.float32) - if self.data_type.have_addends: - addends = self.ndarray[:, :, 1].view(np.float32) - else: - addends = None - qweights = self.ndarray[:, :, -4:].reshape([self.shape[0], self.shape[1] // 8]) - - dq = dequantize_q4(qweights, scales, addends, g_idx=None) - return UnquantizedTensor(dq).astype(data_type) - - def to_ggml(self) -> 'GGMLQuantizedTensor': - return self - - def permute(self, n_head: int, n_kv_head: Optional[int] = None) -> 'GGMLQuantizedTensor': - return GGMLQuantizedTensor(permute(self.ndarray, n_head, n_kv_head), self.shape, self.data_type) - - def permute_part(self, n_part: int, n_head: int) -> 'UnquantizedTensor': - r = self.ndarray.shape[0] // 3 - return UnquantizedTensor(permute(self.ndarray[r * n_part : r * n_part + r, ...], n_head)) - - def part(self, n_part: int) -> 'UnquantizedTensor': - r = self.ndarray.shape[0] // 3 - return UnquantizedTensor(self.ndarray[r * n_part : r * n_part + r, ...]) - -GGMLCompatibleTensor = Union[UnquantizedTensor, GGMLQuantizedTensor] +GGMLCompatibleTensor = Union[UnquantizedTensor] class DeferredPermutedTensor(Tensor): - def __init__(self, base: Tensor, n_head: int, n_kv_head: Optional[int] = None) -> None: + def __init__(self, base: Tensor, n_head: int, n_head_kv: int) -> None: self.base = base self.n_head = n_head - self.n_kv_head = n_kv_head self.data_type = self.base.data_type def astype(self, data_type: DataType) -> Tensor: - return self.base.astype(data_type).permute(self.n_head, self.n_kv_head) + return self.base.astype(data_type).permute(self.n_head, self.n_head_kv) def to_ggml(self) -> GGMLCompatibleTensor: - return self.base.to_ggml().permute(self.n_head, self.n_kv_head) + return self.base.to_ggml().permute(self.n_head, self.n_head_kv) - def permute(self, n_head: int, n_kv_head: Optional[int] = None) -> Tensor: + def permute(self, n_head: int, n_head_kv: int) -> Tensor: raise Exception("shouldn't permute twice") -class GPTQForLLaMaQuantizedTensor(Tensor): - def __init__(self, model: 'LazyModel', namebase: str) -> None: - qweight = load_unquantized(model[f"{namebase}.qweight"], np.int32) - scales = load_unquantized(model[f"{namebase}.scales"], np.float32, convert=True) - - bias = model.get(f"{namebase}.bias") - if bias is not None: - # Q4_1 does not support bias; good thing the bias is always all zeros. - assert not np.any(load_unquantized(bias)) - - if f"{namebase}.zeros" in model: - zeros = load_unquantized(model[f"{namebase}.zeros"], np.float32) - else: - qzeros = load_unquantized(model[f"{namebase}.qzeros"], np.int32) - assert qzeros.dtype == np.int32 - zeros = dequantize_q4(qzeros, scales, scales, g_idx=None) - assert zeros.dtype == np.float32 - - assert zeros.shape == scales.shape - - # Output is transposed compared to the input, and addends have their sign flipped. - # Scales and zeros similarly must be transposed but only for newer - # versions of GPTQ-for-LLaMa; the older versions can be identified by - # having shape (n_embd, 1). - qweight = qweight.T - if scales.shape[1] != 1: - scales = scales.T - zeros = zeros.T - - # Output also has signs flipped for the addends. - self.qweight = qweight - self.scales = scales - self.addends = -zeros - - self.g_idx: Optional[NDArray] - if f"{namebase}.g_idx" in model: - self.g_idx = load_unquantized(model[f"{namebase}.g_idx"], np.int32) - assert self.g_idx.shape == (qweight.shape[1] * 8,) - else: - self.g_idx = None - - self.shape = [self.qweight.shape[0], self.qweight.shape[1] * 8] - self.data_type = QuantizedDataType(groupsize=self.groupsize(), have_addends=True, - have_g_idx=(self.g_idx is not None)) - - def inspect(self, row: int, col: int) -> None: - '''For debugging.''' - qweight = (self.qweight[row, col // 8] >> (4 * (col & 7))) & 0xf - if self.g_idx is not None: - group = self.g_idx[col] - else: - group = int(col // self.groupsize()) - scale = self.scales[row, group] - addend = self.addends[row, group] - with np.printoptions(precision=None, suppress=True): - print(f'scale:{scale} addend:{addend} qweight:{qweight}') - print('possible values:', np.arange(16) * scale + addend) - print('actual value:', qweight * scale + addend) - - def astype(self, data_type: DataType) -> Tensor: - if isinstance(data_type, QuantizedDataType): - assert self.g_idx is None and data_type.have_addends is True and data_type.have_g_idx is False - return self.regroup(data_type.groupsize) - - dequantized = dequantize_q4(np.ascontiguousarray(self.qweight), self.scales, self.addends, self.g_idx) - return UnquantizedTensor(dequantized).astype(data_type) - - def groupsize(self) -> int: - assert self.addends.shape == self.scales.shape - assert self.shape[1] % self.scales.shape[1] == 0 - return self.shape[1] // self.scales.shape[1] - - def regroup(self, new_groupsize: int = 32) -> 'GPTQForLLaMaQuantizedTensor': - # Old versions of GPTQ-for-LLaMa shared scales and addends between all the - # columns in a row. Newer versions share them between every set of N - # columns in a row, where N is the `groupsize` parameter, usually 128. The - # output format shares them between every set of 32 columns. To handle - # this, duplicate scales and addends for every smaller group. - # (In the above, 'row' and 'column' are in the sense of the output.) - assert self.g_idx is None - old_groupsize = self.groupsize() - assert old_groupsize >= new_groupsize and old_groupsize % new_groupsize == 0, old_groupsize - ret = copy.copy(self) - ret.addends = self.addends.repeat(old_groupsize // new_groupsize, axis=1) - ret.scales = self.scales.repeat(old_groupsize // new_groupsize, axis=1) - ret.data_type = QuantizedDataType(groupsize=new_groupsize, have_addends=True, have_g_idx=False) - return ret - - def permute(self, n_head: int, n_kv_head: Optional[int] = None) -> Tensor: - return DeferredPermutedTensor(self, n_head, n_kv_head) - - def to_ggml(self) -> GGMLQuantizedTensor: - # The output format looks like this: - # For each row: - # For each group of 32 columns: - # - addend (float32, 4 bytes) - # - scale (float32, 4 bytes) - # - weights (int4 * 32, 16 bytes) - - if self.groupsize() != 32: - raise Exception("should have been regrouped before converting to ggml") - - # Since the output format is mixed between integers and floats, we have - # to hackily view the floats as int32s just so numpy will let us - # concatenate them. - addends_view = self.addends.view(dtype=np.int32)[:, :, np.newaxis] - scales_view = self.scales.view(dtype=np.int32)[:, :, np.newaxis] - - # Split into groups of 4 columns (i.e. 32 columns of quantized data): - grouped = self.qweight.reshape([self.qweight.shape[0], self.qweight.shape[1] // 4, 4]) - - # And concatenate: - grouped = np.concatenate([scales_view, addends_view, grouped], axis=2, casting='no') - - return GGMLQuantizedTensor(grouped, self.shape, DT_Q4_1) - - @dataclass class LazyTensor: _load: Callable[[], Tensor] @@ -632,17 +463,6 @@ class LazyTensor: def validate_conversion_to(self, data_type: DataType) -> None: if data_type == self.data_type: return - if isinstance(data_type, QuantizedDataType): - if not isinstance(self.data_type, QuantizedDataType): - raise Exception(f"Can't turn an unquantized tensor into a quantized type ({data_type})") - if self.data_type.have_g_idx: - sys.stderr.write( - "Error: Input uses the newer GPTQ-for-LLaMa format (using g_idx), " - "which is not yet natively supported by GGML. " - "For now you can still convert this model by passing `--outtype f16` to dequantize, " - "but that will result in a much larger output file for no quality benefit.\n") - sys.exit(1) - assert not data_type.have_g_idx and self.data_type.have_addends and data_type.have_addends LazyModel = Dict[str, LazyTensor] @@ -713,10 +533,10 @@ def merge_multifile_models(models_plus: List[ModelPlus]) -> ModelPlus: return ModelPlus(model, paths, format, vocab) -def permute_lazy(lazy_tensor: LazyTensor, n_head: int, n_kv_head: Optional[int] = None) -> LazyTensor: +def permute_lazy(lazy_tensor: LazyTensor, n_head: int, n_head_kv: int) -> LazyTensor: def load() -> Tensor: - return lazy_tensor.load().permute(n_head, n_kv_head) - return LazyTensor(load, lazy_tensor.shape, lazy_tensor.data_type, f'permute({n_head}, {n_kv_head}) ' + lazy_tensor.description) + return lazy_tensor.load().permute(n_head, n_head_kv) + return LazyTensor(load, lazy_tensor.shape, lazy_tensor.data_type, f'permute({n_head}, {n_head_kv}) ' + lazy_tensor.description) def permute_part_lazy(lazy_tensor: LazyTensor, n_part: int, n_head: int) -> LazyTensor: def load() -> Tensor: @@ -732,66 +552,6 @@ def part_lazy(lazy_tensor: LazyTensor, n_part: int) -> LazyTensor: s[0] = s[0] // 3 return LazyTensor(load, s, lazy_tensor.data_type, 'part ' + lazy_tensor.description) -def convert_transformers_to_orig(model: LazyModel, params: Params) -> LazyModel: - out: LazyModel = {} - out["tok_embeddings.weight"] = model["model.embed_tokens.weight"] - out["norm.weight"] = model["model.norm.weight"] - out["output.weight"] = model["lm_head.weight"] - - for i in itertools.count(): - if f"model.layers.{i}.self_attn.q_proj.weight" in model: - out[f"layers.{i}.attention.wq.weight"] = permute_lazy(model[f"model.layers.{i}.self_attn.q_proj.weight"], params.n_head) - out[f"layers.{i}.attention.wk.weight"] = permute_lazy(model[f"model.layers.{i}.self_attn.k_proj.weight"], params.n_head, params.n_kv_head) - out[f"layers.{i}.attention.wv.weight"] = model[f"model.layers.{i}.self_attn.v_proj.weight"] - elif f"model.layers.{i}.self_attn.W_pack.weight" in model: - out[f"layers.{i}.attention.wq.weight"] = permute_part_lazy(model[f"model.layers.{i}.self_attn.W_pack.weight"], 0, params.n_head) - out[f"layers.{i}.attention.wk.weight"] = permute_part_lazy(model[f"model.layers.{i}.self_attn.W_pack.weight"], 1, params.n_head) - out[f"layers.{i}.attention.wv.weight"] = part_lazy(model[f"model.layers.{i}.self_attn.W_pack.weight"], 2) - else: - break - - out[f"layers.{i}.attention.wo.weight"] = model[f"model.layers.{i}.self_attn.o_proj.weight"] - - out[f"layers.{i}.feed_forward.w1.weight"] = model[f"model.layers.{i}.mlp.gate_proj.weight"] - out[f"layers.{i}.feed_forward.w2.weight"] = model[f"model.layers.{i}.mlp.down_proj.weight"] - out[f"layers.{i}.feed_forward.w3.weight"] = model[f"model.layers.{i}.mlp.up_proj.weight"] - - out[f"layers.{i}.attention_norm.weight"] = model[f"model.layers.{i}.input_layernorm.weight"] - out[f"layers.{i}.ffn_norm.weight"] = model[f"model.layers.{i}.post_attention_layernorm.weight"] - return out - - -def handle_quantization(model: LazyModel) -> LazyModel: - '''Convert a model with entries for 'foo.qweight', 'foo.scales', etc. - (which resolve to UnquantizedTensors with the raw data) to one with entries - for 'foo.weight' (which resolve to QuantizedTensors). - ''' - def convert(name: str) -> Tuple[str, LazyTensor]: - if name.endswith(".qweight"): - namebase = name.rsplit('.', 1)[0] - orig_name = namebase + ".weight" - - lazy_tensor = model[name] - assert len(lazy_tensor.shape) == 2 - real_shape = [lazy_tensor.shape[1], lazy_tensor.shape[0] * 8] - - # Calculate type. This replicates the logic in - # GPTQForLLaMaQuantizedTensor (which is executed when the modelis - # actually loaded). - lazy_scales = model[f"{namebase}.scales"] - scales_width = 1 if lazy_scales.shape[1] == 1 else lazy_scales.shape[0] - assert real_shape[1] % scales_width == 0 - groupsize = real_shape[1] // scales_width - have_g_idx = f"{namebase}.g_idx" in model - data_type = QuantizedDataType(groupsize=groupsize, have_addends=True, have_g_idx=have_g_idx) - - def load() -> Tensor: - return GPTQForLLaMaQuantizedTensor(model, namebase) - - return (orig_name, LazyTensor(load, real_shape, data_type, '[quantized]')) - else: - return (name, model[name]) - return dict(convert(name) for name in model) # Functionality that simulates `torch.load` but where individual tensors are # only loaded into memory on demand, not all at once. @@ -885,14 +645,6 @@ def lazy_load_torch_file(outer_fp: IO[bytes], path: Path) -> ModelPlus: return ModelPlus(model=as_dict, paths=[path], format='torch', vocab=None) -SAFETENSORS_DATA_TYPES: Dict[str, DataType] = { - 'BF16': DT_BF16, - 'F16': DT_F16, - 'F32': DT_F32, - 'I32': DT_I32, -} - - def lazy_load_safetensors_file(fp: IO[bytes], path: Path) -> ModelPlus: header_size, = struct.unpack(' bytes: return ret -def lazy_load_ggml_file(fp: io.BufferedReader, path: Path) -> ModelPlus: - magic = must_read(fp, 4)[::-1] - if magic in (b'ggmf', b'ggjt'): - version, = struct.unpack("i", must_read(fp, 4)) - assert version == 1 - else: - assert magic == b'ggml' - version = None - n_vocab, n_embd, n_mult, n_head, n_layer, rot, file_type = struct.unpack('<7i', must_read(fp, 28)) - - tokens: List[Tuple[bytes, float]] = [] - for i in range(n_vocab): - if i == 32000: - # HACK: GPT4All messed with the format without changing the magic - # number. Specifically, they changed the vocab section to contain - # `n_vocab - 1` tokens instead of `n_vocab` (i.e. omitting the - # extra pad token). Try to detect if we're reading a file like - # this. - orig_pos = fp.tell() - fp.seek(20, io.SEEK_CUR) - is_gpt4all = fp.read(21) == b'tok_embeddings.weight' - fp.seek(orig_pos) - if is_gpt4all: - break - - length, = struct.unpack("i", must_read(fp, 4)) - text = must_read(fp, length) - if magic != b'ggml': - score, = struct.unpack("f", must_read(fp, 4)) - tokens.append((text, score)) - vocab = GGMLVocab(tokens) if magic != b'ggml' else None - - model: LazyModel = {} - # Use mmap for the actual data to avoid race conditions with the file offset. - off = fp.raw.tell() - mapped = memoryview(mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)) - fp.raw.seek(off) # needed on Windows - - def read_tensor() -> None: # this is a function so that variables captured in `load` don't change - shape_len, name_len, ftype = struct.unpack("iii", must_read(fp, 12)) - assert 0 <= shape_len <= 3 - shape: List[int] = list(struct.unpack(f"{shape_len}i", must_read(fp, 4 * shape_len))) - shape = shape[::-1] - name = must_read(fp, name_len).decode('utf-8') - data_type = FTYPE_TO_DATA_TYPE[ftype] - - if magic == b'ggjt': - fp.seek((fp.tell() + 31) & -32) - - if data_type == DT_Q4_1: - # See GPTQForLLaMaQuantizedTensor.ggml_ndarray() - size = 24 * (shape[1] // 32) * shape[0] - elif data_type == DT_Q4_0: - size = 20 * (shape[1] // 32) * shape[0] - else: - numpy_dtype = DATA_TYPE_TO_NUMPY[data_type] - elm_count = math.prod(shape) - size = elm_count * numpy_dtype.itemsize - offset = fp.tell() - buf = mapped[offset:offset+size] - fp.seek(size, io.SEEK_CUR) - - def load() -> Tensor: - if isinstance(data_type, QuantizedDataType): - ndarray = np.frombuffer(buf, dtype=np.uint32) - return GGMLQuantizedTensor(ndarray, shape, data_type) - else: - return UnquantizedTensor(np.frombuffer(buf, dtype=numpy_dtype).reshape(shape)) - description = f'ggml offset={offset} type={data_type} path={path}' - model[name] = LazyTensor(load, shape, data_type, description) - - while fp.read(1) != b'': - fp.seek(-1, io.SEEK_CUR) - read_tensor() - - return ModelPlus(model=model, paths=[path], format='ggml', vocab=vocab) - - @functools.lru_cache(maxsize=None) def lazy_load_file(path: Path) -> ModelPlus: fp = open(path, 'rb') @@ -1010,9 +684,6 @@ def lazy_load_file(path: Path) -> ModelPlus: if first8[:2] == b'PK': # A zip file, i.e. PyTorch format return lazy_load_torch_file(fp, path) - elif first8[2:4] == b'gg': - # GGML format - return lazy_load_ggml_file(fp, path) elif struct.unpack(' ModelPlus: In = TypeVar('In') Out = TypeVar('Out') - def bounded_parallel_map(func: Callable[[In], Out], iterable: Iterable[In], concurrency: int) -> Iterable[Out]: '''Parallel map, but with backpressure. If the caller doesn't call `next` fast enough, this will stop calling `func` at some point rather than @@ -1043,8 +713,7 @@ def bounded_parallel_map(func: Callable[[In], Out], iterable: Iterable[In], conc def check_vocab_size(params: Params, vocab: Vocab) -> None: if params.n_vocab != vocab.vocab_size: - # GGMLVocab comes from the same file as the model so shouldn't mismatch: - assert isinstance(vocab, SentencePieceVocab) + assert isinstance(vocab, BpeVocab) or isinstance(vocab, SentencePieceVocab) if params.n_vocab == vocab.vocab_size_base: print("Ignoring added_tokens.json since model matches vocab size without it.") vocab.added_tokens_list = [] @@ -1061,98 +730,161 @@ def check_vocab_size(params: Params, vocab: Vocab) -> None: class OutputFile: def __init__(self, fname_out: Path) -> None: - self.fout = open(fname_out, "wb") + self.gguf = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - def write_file_header(self, params: Params, file_type: GGMLFileType) -> None: - self.fout.write(b"ggjt"[::-1]) # magic - values = [ - 1, # file version - params.n_vocab, - params.n_embd, - params.n_mult, - params.n_head, - params.n_layer, - params.n_embd // params.n_head, # rot (obsolete) - file_type.value, - ] - self.fout.write(struct.pack("i" * len(values), *values)) + def add_meta_arch(self, params: Params) -> None: + ver = None + if (params.n_ctx == 4096): + ver = "v2" - def write_tensor_header(self, name: str, shape: Sequence[int], data_type: DataType) -> None: - sname = name.encode('utf-8') - self.fout.write(struct.pack("iii", len(shape), len(sname), DATA_TYPE_TO_FTYPE[data_type])) - self.fout.write(struct.pack("i" * len(shape), *shape[::-1])) - self.fout.write(sname) - self.fout.seek((self.fout.tell() + 31) & -32) + self.gguf.add_name ("LLaMA" if ver == None else "LLaMA " + ver) + self.gguf.add_context_length (params.n_ctx) + self.gguf.add_embedding_length (params.n_embd) + self.gguf.add_block_count (params.n_layer) + self.gguf.add_feed_forward_length (params.n_ff) + self.gguf.add_rope_dimension_count(params.n_embd // params.n_head) + self.gguf.add_head_count (params.n_head) + self.gguf.add_head_count_kv (params.n_head_kv) + self.gguf.add_layer_norm_rms_eps (params.f_norm_eps) - def write_vocab(self, vocab: Vocab) -> None: - for text, score in vocab.all_tokens(): - self.fout.write(struct.pack("i", len(text))) - self.fout.write(text) - self.fout.write(struct.pack("f", score)) + if params.ftype: + self.gguf.add_file_type(params.ftype) + + def add_meta_vocab(self, vocab: Vocab) -> None: + tokens = [] + scores = [] + toktypes = [] + # NOTE: `all_tokens` returns the the base vocabulary and added tokens + # TODO: add special tokens? + for text, score, toktype in vocab.all_tokens(): + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + self.gguf.add_tokenizer_model("llama") + self.gguf.add_token_list(tokens) + self.gguf.add_token_scores(scores) + self.gguf.add_token_types(toktypes) + + def add_tensor_info(self, name: str, tensor: LazyTensor) -> None: + n_elements = 1 + for dim in tensor.shape: + n_elements *= dim + data_type = DATA_TYPE_TO_NUMPY[tensor.data_type] + data_nbytes = n_elements * data_type.itemsize + self.gguf.add_tensor_info(name, tensor.shape, data_type, data_nbytes) + + def write_meta(self) -> None: + self.gguf.write_header_to_file() + self.gguf.write_kv_data_to_file() + + def write_tensor_info(self) -> None: + self.gguf.write_ti_data_to_file() + + def close(self) -> None: + self.gguf.close() @staticmethod - def write_vocab_only(fname_out: Path, vocab: Vocab) -> None: - of = OutputFile(fname_out) - params = Params(n_vocab=vocab.vocab_size, n_embd=0, n_mult=0, n_head=1, n_layer=0) - of = OutputFile(fname_out) - of.write_file_header(params, file_type=GGMLFileType.AllF32) - of.write_vocab(vocab) - of.fout.close() - - @staticmethod - def write_all(fname_out: Path, params: Params, file_type: GGMLFileType, model: LazyModel, vocab: Vocab) -> None: + def write_vocab_only(fname_out: Path, params: Params, vocab: Vocab) -> None: check_vocab_size(params, vocab) + of = OutputFile(fname_out) - of.write_file_header(params, file_type) - print("Writing vocab...") - of.write_vocab(vocab) + + # meta data + of.add_meta_arch(params) + of.add_meta_vocab(vocab) + of.write_meta() + + of.close() + + @staticmethod + def write_all(fname_out: Path, params: Params, model: LazyModel, vocab: Vocab) -> None: + check_vocab_size(params, vocab) + + of = OutputFile(fname_out) + + # meta data + of.add_meta_arch(params) + of.add_meta_vocab(vocab) + + # tensor info + for name, lazy_tensor in model.items(): + of.add_tensor_info(name, lazy_tensor) + + of.write_meta() + of.write_tensor_info() def do_item(item: Tuple[str, LazyTensor]) -> NDArray: name, lazy_tensor = item return lazy_tensor.load().to_ggml().ndarray + # tensor data ndarrays = bounded_parallel_map(do_item, model.items(), concurrency=8) for i, ((name, lazy_tensor), ndarray) in enumerate(zip(model.items(), ndarrays)): size = ' x '.join(f"{dim:6d}" for dim in lazy_tensor.shape) padi = len(str(len(model))) print(f"[{i+1:{padi}d}/{len(model)}] Writing tensor {name:38s} | size {size:16} | type {lazy_tensor.data_type}") - of.write_tensor_header(name, lazy_tensor.shape, lazy_tensor.data_type) - ndarray.tofile(of.fout) - of.fout.close() + of.gguf.write_tensor_data(ndarray) + of.close() def pick_output_type(model: LazyModel, output_type_str: Optional[str]) -> GGMLFileType: - wq_type = model["layers.0.attention.wq.weight"].data_type - if output_type_str == "f32" or (output_type_str is None and wq_type in (DT_F32, DT_BF16)): + wq_type = model[NAMES[gguf.MODEL_TENSOR.ATTN_Q].format(bid=0)+".weight"].data_type + + if output_type_str == "f32" or (output_type_str is None and wq_type == DT_F32): return GGMLFileType.AllF32 - if output_type_str == "f16" or (output_type_str is None and wq_type == DT_F16): + if output_type_str == "f16" or (output_type_str is None and wq_type in (DT_F16, DT_BF16)): return GGMLFileType.MostlyF16 - if output_type_str == "q4_1" or (output_type_str is None and isinstance(wq_type, QuantizedDataType) and - wq_type.have_addends): - if isinstance(model["output.weight"].data_type, QuantizedDataType): - return GGMLFileType.MostlyQ4_1 - else: - return GGMLFileType.PerLayerIsQ4_1 - if output_type_str == "q4_0" or (output_type_str is None and isinstance(wq_type, QuantizedDataType)): - return GGMLFileType.MostlyQ4_0 + name_to_type = {name: lazy_tensor.data_type for (name, lazy_tensor) in model.items()} + raise Exception(f"Unexpected combination of types: {name_to_type}") - -def do_necessary_conversions(model: LazyModel, params: Params) -> LazyModel: - model = handle_quantization(model) - - if "lm_head.weight" in model: - model = convert_transformers_to_orig(model, params) - model = filter_and_sort_tensors(model) - - return model - - def convert_to_output_type(model: LazyModel, output_type: GGMLFileType) -> LazyModel: return {name: tensor.astype(output_type.type_for_tensor(name, tensor)) for (name, tensor) in model.items()} +def convert_model_names(model: LazyModel, params: Params) -> LazyModel: + tmap = gguf.get_tensor_name_map(ARCH, params.n_layer) + + tmp = model + + # HF models permut or pack some of the tensors, so we need to undo that + for i in itertools.count(): + if f"model.layers.{i}.self_attn.q_proj.weight" in model: + print(f"Permuting layer {i}") + tmp[f"model.layers.{i}.self_attn.q_proj.weight"] = permute_lazy(model[f"model.layers.{i}.self_attn.q_proj.weight"], params.n_head, params.n_head) + tmp[f"model.layers.{i}.self_attn.k_proj.weight"] = permute_lazy(model[f"model.layers.{i}.self_attn.k_proj.weight"], params.n_head, params.n_head_kv) + #tmp[f"model.layers.{i}.self_attn.v_proj.weight"] = model[f"model.layers.{i}.self_attn.v_proj.weight"] + elif f"model.layers.{i}.self_attn.W_pack.weight" in model: + print(f"Unpacking and permuting layer {i}") + tmp[f"model.layers.{i}.self_attn.q_proj.weight"] = permute_part_lazy(model[f"model.layers.{i}.self_attn.W_pack.weight"], 0, params.n_head, params.n_head) + tmp[f"model.layers.{i}.self_attn.k_proj.weight"] = permute_part_lazy(model[f"model.layers.{i}.self_attn.W_pack.weight"], 1, params.n_head, params.n_head_kv) + tmp[f"model.layers.{i}.self_attn.v_proj.weight"] = part_lazy (model[f"model.layers.{i}.self_attn.W_pack.weight"], 2) + else: + break + + out: LazyModel = {} + for name, lazy_tensor in model.items(): + name_new = name + + if name in tmap: + name_new = tmap[name] + elif name.endswith(".weight") and name[:-7] in tmap: + name_new = tmap[name[:-7]] + ".weight" + elif name.endswith(".bias") and name[:-5] in tmap: + name_new = tmap[name[:-5]] + ".bias" + else: + raise Exception(f"Unexpected tensor name: {name}") + + if gguf.should_skip_tensor_TMP(ARCH, params.n_layer, name_new): + print(f"skipping tensor {name_new}") + continue + else: + print(f"{name:48s} -> {name_new:40s} | {lazy_tensor.data_type} | {lazy_tensor.shape}") + out[name_new] = lazy_tensor + + return out def nth_multifile_path(path: Path, n: int) -> Optional[Path]: '''Given any path belonging to a multi-file model (e.g. foo.bin.1), return @@ -1203,11 +935,6 @@ def load_some_model(path: Path) -> ModelPlus: # Try the PyTorch patterns too, with lower priority globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt", "pytorch_model.bin"] files = [file for glob in globs for file in path.glob(glob)] - if not files: - # Try GGML too, but with lower priority, since if both a non-GGML - # model and a GGML model exist in the same directory, we assume the - # latter was converted from the former. - files = list(path.glob("ggml-model*.bin*")) if not files: raise Exception(f"Can't find model in directory {path}") if len(files) > 1: @@ -1224,19 +951,14 @@ def load_some_model(path: Path) -> ModelPlus: return model_plus -def filter_and_sort_tensors(model: LazyModel) -> LazyModel: - return {name: model[name] for name in TENSORS_LIST if name in model} - - -def load_vocab(path: Path, vocabtype: Optional[str]) -> SentencePieceVocab: - print(f"vocabtype: {vocabtype}") +def load_vocab(path: Path, vocabtype: Optional[str]) -> Union[BpeVocab, SentencePieceVocab]: # Be extra-friendly and accept either a file or a directory. Also, if it's # a directory, it might be the model directory, and tokenizer.model might # be in the parent of that. if path.is_dir(): vocab_file = "tokenizer.model" if vocabtype == 'bpe': - vocab_file = "vocab.json" + vocab_file = "vocab.json" path2 = path / vocab_file # Use `.parent` instead of /.. to handle the symlink case better. path3 = path.parent / vocab_file @@ -1246,23 +968,26 @@ def load_vocab(path: Path, vocabtype: Optional[str]) -> SentencePieceVocab: path = path3 else: raise FileNotFoundError( - f"Could not find tokenizer.model in {path} or its parent; " + f"Could not find {vocab_file} in {path} or its parent; " "if it's in another directory, pass the directory as --vocab-dir") + + print(f"Loading vocab file '{path}', type '{vocabtype}'") + added_tokens_path = path.parent / "added_tokens.json" - print(f"Loading vocab file {path}") - return SentencePieceVocab(path, added_tokens_path if added_tokens_path.exists() else None, - vocabtype) + if vocabtype == "bpe": + return BpeVocab(path, added_tokens_path if added_tokens_path.exists() else None) + elif vocabtype == "spm": + return SentencePieceVocab(path, added_tokens_path if added_tokens_path.exists() else None) + else: + raise ValueError(f"Unsupported vocabulary type {vocabtype}") def default_outfile(model_paths: List[Path], file_type: GGMLFileType) -> Path: namestr = { - GGMLFileType.AllF32: "f32", + GGMLFileType.AllF32: "f32", GGMLFileType.MostlyF16: "f16", - GGMLFileType.MostlyQ4_0: "q4_0", - GGMLFileType.MostlyQ4_1: "q4_1", - GGMLFileType.PerLayerIsQ4_1: "q4_1", }[file_type] - ret = model_paths[0].parent / f"ggml-model-{namestr}.bin" + ret = model_paths[0].parent / f"ggml-model-{namestr}.gguf" if ret in model_paths: sys.stderr.write( f"Error: Default output path ({ret}) would overwrite the input. " @@ -1281,44 +1006,68 @@ def do_dump_model(model_plus: ModelPlus) -> None: def main(args_in: Optional[List[str]] = None) -> None: parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") - parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") - parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outtype", choices=["f32", "f16", "q4_1", "q4_0"], help="output format (default: based on input)") - parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, - help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") - parser.add_argument("--vocabtype", default='spm', choices=["spm", "bpe"], help="vocab format (default: spm)") + parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") + parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") + parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") + parser.add_argument("--outtype", choices=["f32", "f16"], help="output format (default: based on input)") + parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") + parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") + parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") + parser.add_argument("--vocabtype", choices=["spm", "bpe"], help="vocab format (default: spm)", default="spm") + parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") args = parser.parse_args(args_in) - vocab: Vocab if args.dump_single: model_plus = lazy_load_file(args.model) do_dump_model(model_plus) - elif args.vocab_only: + + model_plus = load_some_model(args.model) + + params = Params.load(model_plus) + if params.n_ctx == -1: + if args.ctx is None: + raise Exception("The model doesn't have a context size, and you didn't specify one with --ctx\n" + "Please specify one with --ctx:\n" + " - LLaMA v1: --ctx 2048\n" + " - LLaMA v2: --ctx 4096\n") + params.n_ctx = args.ctx + + if args.outtype: + params.ftype = { + "f32": GGMLFileType.AllF32, + "f16": GGMLFileType.MostlyF16, + }[args.outtype] + + print(f"params = {params}") + + vocab: Vocab + if args.vocab_only: vocab = load_vocab(args.vocab_dir or args.model, args.vocabtype) assert args.outfile, "need --outfile if using --vocab-only" outfile = args.outfile - OutputFile.write_vocab_only(outfile, vocab) + OutputFile.write_vocab_only(outfile, params, vocab) print(f"Wrote {outfile}") else: - model_plus = load_some_model(args.model) if args.dump: do_dump_model(model_plus) return + if model_plus.vocab is not None and args.vocab_dir is None: vocab = model_plus.vocab else: vocab_dir = args.vocab_dir if args.vocab_dir else model_plus.paths[0].parent vocab = load_vocab(vocab_dir, args.vocabtype) - params = Params.load(model_plus) - model = model_plus.model - model = do_necessary_conversions(model, params) - output_type = pick_output_type(model, args.outtype) - model = convert_to_output_type(model, output_type) - outfile = args.outfile or default_outfile(model_plus.paths, output_type) - OutputFile.write_all(outfile, params, output_type, model, vocab) + + model = model_plus.model + model = convert_model_names(model, params) + ftype = pick_output_type(model, args.outtype) + model = convert_to_output_type(model, ftype) + outfile = args.outfile or default_outfile(model_plus.paths, ftype) + + params.ftype = ftype + print(f"Writing {outfile}, format {ftype}") + + OutputFile.write_all(outfile, params, model, vocab) print(f"Wrote {outfile}") diff --git a/docs/token_generation_performance_tips.md b/docs/token_generation_performance_tips.md index 69ba6173c..c9acff7d4 100644 --- a/docs/token_generation_performance_tips.md +++ b/docs/token_generation_performance_tips.md @@ -3,7 +3,7 @@ ## Verifying that the model is running on the GPU with cuBLAS Make sure you compiled llama with the correct env variables according to [this guide](../README.md#cublas), so that llama accepts the `-ngl N` (or `--n-gpu-layers N`) flag. When running llama, you may configure `N` to be very large, and llama will offload the maximum possible number of layers to the GPU, even if it's less than the number you configured. For example: ```shell -./main -m "path/to/model.bin" -ngl 200000 -p "Please sir, may I have some " +./main -m "path/to/model.gguf" -ngl 200000 -p "Please sir, may I have some " ``` When running llama, before it starts the inference work, it will output diagnostic information that shows whether cuBLAS is offloading work to the GPU. Look for these lines: @@ -25,9 +25,9 @@ GPU: A6000 (48GB VRAM) CPU: 7 physical cores RAM: 32GB -Model: `TheBloke_Wizard-Vicuna-30B-Uncensored-GGML/Wizard-Vicuna-30B-Uncensored.ggmlv3.q4_0.bin` (30B parameters, 4bit quantization, GGML) +Model: `TheBloke_Wizard-Vicuna-30B-Uncensored-GGML/Wizard-Vicuna-30B-Uncensored.q4_0.gguf` (30B parameters, 4bit quantization, GGML) -Run command: `./main -m "path/to/model.bin" -p "-p "An extremely detailed description of the 10 best ethnic dishes will follow, with recipes: " -n 1000 [additional benchmark flags]` +Run command: `./main -m "path/to/model.gguf" -p "An extremely detailed description of the 10 best ethnic dishes will follow, with recipes: " -n 1000 [additional benchmark flags]` Result: diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index a7b26776a..d2176c910 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -6,27 +6,6 @@ find_package(Threads REQUIRED) # ... -# common - -set(TARGET common) - -add_library(${TARGET} OBJECT - common.h - common.cpp - console.h - console.cpp - grammar-parser.h - grammar-parser.cpp - ) - -if (BUILD_SHARED_LIBS) - set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON) -endif() - -target_include_directories(${TARGET} PUBLIC .) -target_compile_features(${TARGET} PUBLIC cxx_std_11) -target_link_libraries(${TARGET} PRIVATE llama) - # examples include_directories(${CMAKE_CURRENT_SOURCE_DIR}) @@ -42,8 +21,10 @@ else() add_subdirectory(benchmark) add_subdirectory(baby-llama) add_subdirectory(train-text-from-scratch) + add_subdirectory(convert-llama2c-to-ggml) add_subdirectory(simple) add_subdirectory(embd-input) + add_subdirectory(llama-bench) if (LLAMA_METAL) add_subdirectory(metal) endif() diff --git a/examples/convert-llama2c-to-ggml/CMakeLists.txt b/examples/convert-llama2c-to-ggml/CMakeLists.txt new file mode 100644 index 000000000..e262d44f9 --- /dev/null +++ b/examples/convert-llama2c-to-ggml/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET convert-llama2c-to-ggml) +add_executable(${TARGET} convert-llama2c-to-ggml.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/convert-llama2c-to-ggml/README.md b/examples/convert-llama2c-to-ggml/README.md new file mode 100644 index 000000000..fd561fcbc --- /dev/null +++ b/examples/convert-llama2c-to-ggml/README.md @@ -0,0 +1,30 @@ +## Convert llama2.c model to ggml + +This example reads weights from project [llama2.c](https://github.com/karpathy/llama2.c) and saves them in ggml compatible format. The vocab that is available in `models/ggml-vocab.bin` is used by default. + +To convert the model first download the models from the [llma2.c](https://github.com/karpathy/llama2.c) repository: + +`$ make -j` + +After successful compilation, following usage options are available: +``` +usage: ./convert-llama2c-to-ggml [options] + +options: + -h, --help show this help message and exit + --copy-vocab-from-model FNAME model path from which to copy vocab (default 'tokenizer.bin') + --llama2c-model FNAME [REQUIRED] model path from which to load Karpathy's llama2.c model + --llama2c-output-model FNAME model path to save the converted llama2.c model (default ak_llama_model.bin') +``` + +An example command using a model from [karpathy/tinyllamas](https://huggingface.co/karpathy/tinyllamas) is as follows: + +`$ ./convert-llama2c-to-ggml --copy-vocab-from-model ../llama2.c/tokenizer.bin --llama2c-model stories42M.bin --llama2c-output-model stories42M.ggmlv3.bin` + +For now the generated model is in the legacy GGJTv3 format, so you need to convert it to gguf manually: + +`$ python ./convert-llama-ggmlv3-to-gguf.py --eps 1e-5 --input stories42M.ggmlv3.bin --output stories42M.gguf.bin` + +Now you can use the model with a command like: + +`$ ./main -m stories42M.gguf.bin -p "One day, Lily met a Shoggoth" -n 500 -c 256` diff --git a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp new file mode 100644 index 000000000..f8a58dc7a --- /dev/null +++ b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp @@ -0,0 +1,863 @@ +#include "ggml.h" +#include "llama.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +#define LLAMA_FILE_MAGIC_GGJT 0x67676a74u // 'ggjt' +#define LLAMA_FILE_VERSION_GGJT_V3 3 + +//////////////////////////////////////// llama2.c model structs and functions to load models, alloc memory etc. +typedef struct { + int dim; // transformer dimension + int hidden_dim; // for ffn layers + int n_layers; // number of layers + int n_heads; // number of query heads + int n_kv_heads; // number of key/value heads (can be < query heads because of multiquery) + int vocab_size; // vocabulary size, usually 256 (byte-level) + int seq_len; // max sequence length +} Config; + +typedef struct { + // token embedding table + float* token_embedding_table; // (vocab_size, dim) + // weights for rmsnorms + float* rms_att_weight; // (layer, dim) rmsnorm weights + float* rms_ffn_weight; // (layer, dim) + // weights for matmuls + float* wq; // (layer, dim, dim) + float* wk; // (layer, dim, dim) + float* wv; // (layer, dim, dim) + float* wo; // (layer, dim, dim) + // weights for ffn + float* w1; // (layer, hidden_dim, dim) + float* w2; // (layer, dim, hidden_dim) + float* w3; // (layer, hidden_dim, dim) + // final rmsnorm + float* rms_final_weight; // (dim,) + // freq_cis for RoPE relatively positional embeddings + // float* freq_cis_real; // (seq_len, dim/2) + // float* freq_cis_imag; // (seq_len, dim/2) + // (optional) classifier weights for the logits, on the last layer + float* wcls; +} TransformerWeights; + +void malloc_weights(TransformerWeights* w, Config* p, bool shared_weights) { + // we calloc instead of malloc to keep valgrind happy + w->token_embedding_table = new float[p->vocab_size * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->token_embedding_table\n",__func__,p->vocab_size , p->dim, p->vocab_size * p->dim); + + w->rms_att_weight = new float[p->n_layers * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->rms_att_weight\n",__func__,p->n_layers, p->dim, p->n_layers * p->dim); + + w->rms_ffn_weight = new float[p->n_layers * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->rms_ffn_weight\n",__func__,p->n_layers , p->dim, p->n_layers * p->dim); + + w->wq = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wq\n",__func__,p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->wk = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wk\n",__func__,p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->wv = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wv\n",__func__, p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->wo = new float[p->n_layers * p->dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->wo\n",__func__,p->n_layers, p->dim, p->dim, p->n_layers * p->dim * p->dim); + + w->w1 = new float[p->n_layers * p->hidden_dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->w1\n",__func__,p->n_layers, p->hidden_dim, p->dim, p->n_layers * p->hidden_dim * p->dim); + + w->w2 = new float[p->n_layers * p->hidden_dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->w2\n",__func__,p->n_layers, p->dim, p->hidden_dim, p->n_layers * p->hidden_dim * p->dim); + + w->w3 = new float[p->n_layers * p->hidden_dim * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] x [%d] = [%d] float space for w->w3\n",__func__,p->n_layers, p->hidden_dim, p->dim, p->n_layers * p->hidden_dim * p->dim); + + w->rms_final_weight = new float[p->dim](); + printf("[%s:AK] Allocating [%d] float space for w->rms_final_weight\n",__func__,p->dim); + + if (shared_weights) { + w->wcls = NULL; + } else { + w->wcls = new float[p->vocab_size * p->dim](); + printf("[%s:AK] Allocating [%d] x [%d] = [%d] float space for w->wcls\n",__func__,p->vocab_size , p->dim, p->vocab_size * p->dim); + } +} + +int checkpoint_init_weights(TransformerWeights *w, Config* p, FILE* f, bool shared_weights) { + if (fread(w->token_embedding_table, sizeof(float), p->vocab_size * p->dim, f) != static_cast(p->vocab_size * p->dim)) return 1; + if (fread(w->rms_att_weight, sizeof(float), p->n_layers * p->dim, f) != static_cast(p->n_layers * p->dim)) return 1; + if (fread(w->wq, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->wk, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->wv, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->wo, sizeof(float), p->n_layers * p->dim * p->dim, f) != static_cast(p->n_layers * p->dim * p->dim)) return 1; + if (fread(w->rms_ffn_weight, sizeof(float), p->n_layers * p->dim, f) != static_cast(p->n_layers * p->dim)) return 1; + if (fread(w->w1, sizeof(float), p->n_layers * p->dim * p->hidden_dim, f) != static_cast(p->n_layers * p->dim * p->hidden_dim)) return 1; + if (fread(w->w2, sizeof(float), p->n_layers * p->hidden_dim * p->dim, f) != static_cast(p->n_layers * p->hidden_dim * p->dim)) return 1; + if (fread(w->w3, sizeof(float), p->n_layers * p->dim * p->hidden_dim, f) != static_cast(p->n_layers * p->dim * p->hidden_dim)) return 1; + if (fread(w->rms_final_weight, sizeof(float), p->dim, f) != static_cast(p->dim)) return 1; + + // Skip freq_cis_real & freq_cis_imag + int head_size = p->dim / p->n_heads; + fseek(f, p->seq_len * head_size * sizeof(float), SEEK_CUR); + + if (!shared_weights && fread(w->wcls, sizeof(float), p->vocab_size * p->dim, f) != static_cast(p->vocab_size * p->dim)) return 1; + + // Check we didn't forget to read anything + auto curr = ftell(f); + fseek(f, 0, SEEK_END); + auto end = ftell(f); + if (curr != end) { + printf("Error: failed to read the checkpoint file to the end (curr = %ld, end = %ld)\n", curr, end); + return 1; + } + + return 0; +} + +void free_weights(TransformerWeights* w) { + delete w->token_embedding_table; + delete w->rms_att_weight; + delete w->rms_ffn_weight; + delete w->wq; + delete w->wk; + delete w->wv; + delete w->wo; + delete w->w1; + delete w->w2; + delete w->w3; + delete w->rms_final_weight; + if (w->wcls) delete w->wcls; +} + +void print_sample_weights(TransformerWeights *w){ + printf("----- Quick print of first of the weight vales of all the variables\n"); + printf("%f\n", w->token_embedding_table[0]); + printf("%f\n", w->rms_att_weight[0]); + printf("%f\n", w->rms_ffn_weight[0]); + + printf("%f\n", w->wq[0]); + printf("%f\n", w->wk[0]); + printf("%f\n", w->wv[0]); + printf("%f\n", w->wo[0]); + printf("%f\n", w->w1[0]); + printf("%f\n", w->w2[0]); + printf("%f\n", w->w3[0]); + printf("%f\n", w->rms_att_weight[0]); + if (w->wcls) printf("%f\n", w->wcls[0]); +} +//////////////////////////////////////////////////////////////////////////////////////////////////////////// + +//////////////////////////////////////// ggml structs and functions required to load models, configs and save the model. + +struct llama_vocab { + using id = int32_t; + using token = std::string; + using ttype = llama_token_type; + + struct token_data { + token text; + float score; + ttype type; + }; + + std::unordered_map token_to_id; + std::vector id_to_token; +}; + +struct my_llama_hparams { + uint32_t n_vocab = 32000; + uint32_t n_ctx = 512; // this is provided as user input? + uint32_t n_embd = 4096; + uint32_t n_mult = 4; + uint32_t n_head = 32; + uint32_t n_layer = 32; + uint32_t n_rot = 64; + bool operator!=(const my_llama_hparams& other) const { + return memcmp(this, &other, sizeof(my_llama_hparams)); + } +}; + +struct my_llama_layer { + // normalization + struct ggml_tensor * attention_norm; + + // attention + struct ggml_tensor * wq; + struct ggml_tensor * wk; + struct ggml_tensor * wv; + struct ggml_tensor * wo; + + // normalization + struct ggml_tensor * ffn_norm; + + // ff + struct ggml_tensor * w1; + struct ggml_tensor * w2; + struct ggml_tensor * w3; +}; + +struct my_llama_model { + struct ggml_context * ctx = NULL; + + my_llama_hparams hparams; + + struct ggml_tensor * tok_embeddings; + + struct ggml_tensor * norm; + struct ggml_tensor * output; + + std::vector layers; + + uint32_t train_its = 0; + uint32_t train_samples = 0; + uint32_t train_tokens = 0; +}; + +struct train_params { + const char * fn_vocab_model; + const char * fn_llama2c_model; + const char * fn_llama2c_output_model; + const char * fn_train_data; + const char * fn_checkpoint_in; + const char * fn_checkpoint_out; + const char * fn_model_out; + + uint32_t seed; + + int n_ctx; + int n_embd; + int n_mult; + int n_head; + int n_layer; + int n_rotmax; + + int n_threads; + int n_batch; + int n_examples; + int n_predict; + + int print_info_interval; + int print_details_interval; + + bool samples_start_after_nl; + bool use_adam; + bool use_flash; + bool use_scratch; + + // only adam + int warmup; + int cos_decay_steps; + float cos_decay_restart; + float cos_decay_alpha; + + int lbfgs_n_iter; + int adam_n_iter; + float adam_alpha; + float adam_decay; + + int mem_model_gb; + int mem_compute_gb; + int mem_compute0_gb; + int mem_compute1_gb; +}; + +uint32_t get_n_ff(const struct my_llama_hparams* hparams) { + const uint32_t n_ff = ((2*(4*hparams->n_embd)/3 + hparams->n_mult - 1)/hparams->n_mult)*hparams->n_mult; + return n_ff; +} + +void print_params(struct my_llama_hparams * params) { + printf("%s: n_vocab: %d\n", __func__, params->n_vocab); + printf("%s: n_ctx: %d\n", __func__, params->n_ctx); + printf("%s: n_embd: %d\n", __func__, params->n_embd); + printf("%s: n_mult: %d\n", __func__, params->n_mult); + printf("%s: n_head: %d\n", __func__, params->n_head); + printf("%s: n_ff: %d\n", __func__, get_n_ff(params)); + printf("%s: n_layer: %d\n", __func__, params->n_layer); + printf("%s: n_rot: %d\n", __func__, params->n_rot); +} + +void init_model(struct my_llama_model * model) { + const auto & hparams = model->hparams; + + const uint32_t n_embd = hparams.n_embd; + const uint32_t n_layer = hparams.n_layer; + const uint32_t n_vocab = hparams.n_vocab; + + const uint32_t n_ff = get_n_ff(&hparams); + struct ggml_context * ctx = model->ctx; + + model->train_its = 0; + model->train_samples = 0; + model->train_tokens = 0; + + model->tok_embeddings = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); + printf("[%s:GG] Allocating [%d] x [%d] = [%d] float space for model->tok_embeddings\n",__func__,n_embd , n_vocab, n_embd * n_vocab); + + model->norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); + printf("[%s:GG] Allocating [%d] float space for model->norm\n",__func__,n_embd); + + model->output = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for model->output\n",__func__,n_embd, n_vocab, n_embd * n_vocab); + + // printing the per-layer allocations here so we dont print in the for loop. + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wq for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wk for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wv for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wo for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + + printf("[%s:GG] Allocating [%d] float space for layer.ffn_norm for [%d] layers\n",__func__,n_embd, n_layer); + + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w1 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w2 for [%d] layers\n",__func__, n_embd, n_ff, n_ff * n_embd, n_layer); + printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w3 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + + ggml_set_name(model->tok_embeddings, "tok_embeddings.weight"); + ggml_set_name(model->norm, "norm.weight"); + ggml_set_name(model->output, "output.weight"); + + model->layers.resize(n_layer); + for (uint32_t i = 0; i < n_layer; ++i) { + auto & layer = model->layers[i]; + + std::string layers_i = "layers." + std::to_string(i); + + layer.attention_norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); + + layer.wq = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + layer.wk = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + layer.wv = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + layer.wo = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_embd); + + layer.ffn_norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); + + layer.w1 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + layer.w2 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); + layer.w3 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + + ggml_set_name(layer.attention_norm, (layers_i + ".attention_norm.weight").c_str()); + + ggml_set_name(layer.wq, (layers_i + ".attention.wq.weight").c_str()); + ggml_set_name(layer.wk, (layers_i + ".attention.wk.weight").c_str()); + ggml_set_name(layer.wv, (layers_i + ".attention.wv.weight").c_str()); + ggml_set_name(layer.wo, (layers_i + ".attention.wo.weight").c_str()); + + ggml_set_name(layer.ffn_norm, (layers_i + ".ffn_norm.weight").c_str()); + + ggml_format_name(layer.w1, "%s.feed_forward.w1.weight", layers_i.c_str()); + ggml_format_name(layer.w2, "%s.feed_forward.w2.weight", layers_i.c_str()); + ggml_format_name(layer.w3, "%s.feed_forward.w3.weight", layers_i.c_str()); + } +} + +float get_f32_2d(struct ggml_tensor * tensor, int64_t i0, int64_t i1) { + float * ptr = (float *) ((char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1]); + return *ptr; +} + +int32_t get_i32_2d(struct ggml_tensor * tensor, int64_t i0, int64_t i1) { + int32_t * ptr = (int32_t *) ((char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1]); + return *ptr; +} + +void print_row(struct ggml_tensor * probs, int i) { + for (int k = 0; k < probs->ne[0]; ++k) { + float p = get_f32_2d(probs, k, i); + printf(" %f", p); + } + printf("\n"); +} + +void print_matrix(struct ggml_tensor * probs) { + assert(probs->n_dims == 2); + for (int i = 0; i < probs->ne[1]; ++i) { + for (int k = 0; k < probs->ne[0]; ++k) { + float p = get_f32_2d(probs, k, i); + printf(" %.2f", p); + } + printf("\n"); + } +} + +#ifdef __GNUC__ +#ifdef __MINGW32__ +__attribute__((format(gnu_printf, 1, 2))) +#else +__attribute__((format(printf, 1, 2))) +#endif +#endif +static std::string format(const char * fmt, ...) { + va_list ap, ap2; + va_start(ap, fmt); + va_copy(ap2, ap); + int size = vsnprintf(NULL, 0, fmt, ap); + GGML_ASSERT(size >= 0 && size < INT_MAX); + std::vector buf(size + 1); + int size2 = vsnprintf(buf.data(), size + 1, fmt, ap2); + GGML_ASSERT(size2 == size); + va_end(ap2); + va_end(ap); + return std::string(buf.data(), size); +} + +struct llama_file { + // use FILE * so we don't have to re-open the file to mmap + FILE * fp; + size_t size; + + llama_file(const char * fname, const char * mode) { + fp = std::fopen(fname, mode); + if (fp == NULL) { + size = 0; + } else { + seek(0, SEEK_END); + size = tell(); + seek(0, SEEK_SET); + } + } + + size_t tell() const { +#ifdef _WIN32 + __int64 ret = _ftelli64(fp); +#else + long ret = std::ftell(fp); +#endif + GGML_ASSERT(ret != -1); // this really shouldn't fail + return (size_t) ret; + } + + void seek(size_t offset, int whence) { +#ifdef _WIN32 + int ret = _fseeki64(fp, (__int64) offset, whence); +#else + int ret = std::fseek(fp, (long) offset, whence); +#endif + GGML_ASSERT(ret == 0); // same + } + + void read_raw(void * ptr, size_t size) { + if (size == 0) { + return; + } + errno = 0; + std::size_t ret = std::fread(ptr, size, 1, fp); + if (ferror(fp)) { + throw std::runtime_error(format("read error: %s", strerror(errno))); + } + if (ret != 1) { + throw std::runtime_error(std::string("unexpectedly reached end of file")); + } + } + + std::uint32_t read_u32() { + std::uint32_t ret; + read_raw(&ret, sizeof(ret)); + return ret; + } + std::float_t read_f32() { + std::float_t ret; + read_raw(&ret, sizeof(ret)); + return ret; + } + + std::string read_string(std::uint32_t len) { + std::vector chars(len); + read_raw(chars.data(), len); + return std::string(chars.data(), len); + } + + void write_raw(const void * ptr, size_t size) { + if (size == 0) { + return; + } + errno = 0; + size_t ret = std::fwrite(ptr, size, 1, fp); + if (ret != 1) { + throw std::runtime_error(format("write error: %s", strerror(errno))); + } + } + + void write_u32(std::uint32_t val) { + write_raw(&val, sizeof(val)); + } + + ~llama_file() { + if (fp) { + std::fclose(fp); + } + } +}; + +void write_tensor(struct llama_file * file, struct ggml_tensor * tensor) { + if (tensor == NULL) { + file->write_u32(0); + file->write_u32(0); + file->write_u32(GGML_TYPE_F32); + file->seek((0-file->tell()) & 31, SEEK_CUR); + return; + } + const char * name = ggml_get_name(tensor); + uint32_t name_len = strlen(name); + uint32_t nd = tensor->n_dims; + uint32_t ne[4] = { (uint32_t)tensor->ne[0], + (uint32_t)tensor->ne[1], + (uint32_t)tensor->ne[2], + (uint32_t)tensor->ne[3] }; + file->write_u32(nd); + file->write_u32(name_len); + file->write_u32(tensor->type); + file->write_raw(ne, sizeof(ne[0]) * nd); + file->write_raw(name, name_len); + file->seek((0-file->tell()) & 31, SEEK_CUR); + file->write_raw(tensor->data, ggml_nbytes(tensor)); +} + +bool is_ggml_file(const char *filename) { + llama_file file(filename, "rb"); + if (file.size < 4) { + return false; + } + uint32_t magic = file.read_u32(); + return magic == GGUF_MAGIC; +} + +void load_vocab(const char *filename, Config *config, struct llama_vocab *vocab) { +#pragma message("TODO: implement reading vocabulary using gguf") +// // heuristic to infer whether vocab is from ggml or from llama2.c vocabulary +// if (is_ggml_file(filename)) { +// +// struct llama_context_params llama_params = llama_context_default_params(); +// llama_params.vocab_only = true; +// +// struct llama_model * lmodel = llama_load_model_from_file(filename, llama_params); +// struct llama_context * lctx = llama_new_context_with_model(lmodel, llama_params); +// +// const int n_vocab = llama_n_vocab(lctx); +// vocab->id_to_token.resize(n_vocab); +// for (int i=0; iid_to_token[i].text = llama_token_get_text(lctx, i); +// vocab->id_to_token[i].score = llama_token_get_score(lctx, i); +// vocab->id_to_token[i].type = llama_token_get_type(lctx, i); +// vocab->token_to_id.emplace(vocab->id_to_token[i].text, i); +// } +// llama_free(lctx); +// llama_free_model(lmodel); +// } else + { // assume llama2.c vocabulary + printf("Assuming llama2.c vocabulary since %s is not a ggml file\n", filename); + llama_file file(filename, "rb"); + const int n_vocab = config->vocab_size; + /* uint32_t max_token_length = */ file.read_u32(); // unused + vocab->id_to_token.resize(n_vocab); + for (int i=0; i single byte tokens. + char byte_val; + if (sscanf(text.c_str(), "<0x%02hhX>", &byte_val) == 1) { + char cstr[2] = { byte_val, 0 }; + text = cstr; + } + vocab->id_to_token[i].text = text; + vocab->id_to_token[i].score = score; + vocab->id_to_token[i].type = LLAMA_TOKEN_TYPE_UNDEFINED; + vocab->token_to_id.emplace(text, i); + } + } +} + +void stuff_karpathy_weights_into_gg(struct ggml_tensor * gg_weights, float * karpathy_weights){ + int ct; + switch (gg_weights->n_dims){ + case 1: + ct = 0; + for (int i0 = 0; i0 < gg_weights->ne[0]; i0++){ + float * ptr = (float *) ((char *) gg_weights->data + i0*gg_weights->nb[0]); + *ptr = karpathy_weights[ct]; + ct++; + } + break; + case 2: + ct = 0; + for (int i1 = 0; i1 < gg_weights->ne[1]; i1++) { + for (int i0 = 0; i0 < gg_weights->ne[0]; i0++) { + float * ptr = (float *) ((char *) gg_weights->data + i0*gg_weights->nb[0] + i1*gg_weights->nb[1]); + *ptr = karpathy_weights[ct]; + ct++; + } + } + break; + case 3: + ct = 0; + for (int i2 = 0; i2 < gg_weights->ne[2]; i2++) { + for (int i1 = 0; i1 < gg_weights->ne[1]; i1++) { + for (int i0 = 0; i0 < gg_weights->ne[0]; i0++) { + float * ptr = (float *) ((char *) gg_weights->data + i0*gg_weights->nb[0] + i1*gg_weights->nb[1] + i2*gg_weights->nb[2]); + *ptr = karpathy_weights[ct]; + ct++; + } + } + } + break; + } +} + +void save_as_llama_model(struct llama_vocab * vocab, struct my_llama_model * model, TransformerWeights* w, const char * filename) { + struct llama_file file(filename, "wb"); + if (file.fp == NULL) { + return; + } + +#pragma message("TODO: implement file saving using gguf") + // write_magic + file.write_u32(LLAMA_FILE_MAGIC_GGJT); // magic + file.write_u32(LLAMA_FILE_VERSION_GGJT_V3); // version + // write_hparams + file.write_u32(model->hparams.n_vocab); + file.write_u32(model->hparams.n_embd); + file.write_u32(model->hparams.n_mult); + file.write_u32(model->hparams.n_head); + file.write_u32(model->hparams.n_layer); + file.write_u32(model->hparams.n_rot); + file.write_u32(LLAMA_FTYPE_ALL_F32); + + // write_vocab - for now we are just writing the existing BPE voc. assuming karpathy's vocabulary is the same. idk. + uint32_t n_vocab = model->hparams.n_vocab; + for (uint32_t i = 0; i < n_vocab; i++) { + const auto & token_data = vocab->id_to_token.at(i); + file.write_u32((uint32_t) token_data.text.size()); + file.write_raw(token_data.text.data(), token_data.text.size()); + file.write_raw(&token_data.score, sizeof(token_data.score)); + } + + // stuff AK weights into GG weights one by one. + // w->token_embedding_table -> model->tok_embeddings + // float* -> struct ggml_tensor + stuff_karpathy_weights_into_gg(model->tok_embeddings, w->token_embedding_table); + stuff_karpathy_weights_into_gg(model->output, w->wcls ? w->wcls : w->token_embedding_table); + + stuff_karpathy_weights_into_gg(model->norm, w->rms_final_weight); + //print_row(model->norm, 0); + + // for rms-att-weight + int row_length = model->hparams.n_embd; + const auto & hparams = model->hparams; + //int n_ff = model->hparams.n_embd; + int n_ff = get_n_ff(&hparams); + + for (uint32_t i = 0; i < model->hparams.n_layer; ++i){ + auto & layer = model->layers[i]; + // 1d + stuff_karpathy_weights_into_gg(layer.attention_norm, &w->rms_att_weight[i*row_length]); + stuff_karpathy_weights_into_gg(layer.ffn_norm , &w->rms_ffn_weight[i*row_length]); + + // from 3d matrix layer x dim x dim to 2d matrix dim x dim + stuff_karpathy_weights_into_gg(layer.wq , &w->wq[i*row_length*row_length]); + stuff_karpathy_weights_into_gg(layer.wk , &w->wk[i*row_length*row_length]); + stuff_karpathy_weights_into_gg(layer.wv , &w->wv[i*row_length*row_length]); + stuff_karpathy_weights_into_gg(layer.wo , &w->wo[i*row_length*row_length]); + + stuff_karpathy_weights_into_gg(layer.w1 , &w->w1[i*row_length*n_ff]); + stuff_karpathy_weights_into_gg(layer.w2 , &w->w2[i*n_ff*row_length]); + stuff_karpathy_weights_into_gg(layer.w3 , &w->w3[i*row_length*n_ff]); + } + // write tensors + write_tensor(&file, model->tok_embeddings); + write_tensor(&file, model->norm); + write_tensor(&file, model->output); // ? + for (uint32_t i = 0; i < model->hparams.n_layer; ++i) { + auto & layer = model->layers[i]; + + write_tensor(&file, layer.attention_norm); + write_tensor(&file, layer.wq); + write_tensor(&file, layer.wk); + write_tensor(&file, layer.wv); + write_tensor(&file, layer.wo); + write_tensor(&file, layer.ffn_norm); + write_tensor(&file, layer.w1); + write_tensor(&file, layer.w2); + write_tensor(&file, layer.w3); + } +} + +struct train_params get_default_train_params() { + struct train_params params; + params.fn_vocab_model = "tokenizer.bin"; + params.fn_llama2c_output_model = "ak_llama_model.bin"; + params.fn_train_data = "shakespeare.txt"; + params.fn_checkpoint_in = "checkpoint.bin"; + params.fn_checkpoint_out = "checkpoint.bin"; + params.fn_model_out = "ggml-checkpoint-f32.bin"; + + params.seed = -1; + + params.n_ctx = 128; + params.n_embd = 256; + params.n_mult = 256; + params.n_head = 8; + params.n_layer = 16; + params.n_rotmax = 64; + + params.n_threads = 6; + params.n_batch = 8; + params.n_examples = 8; + params.n_predict = 1024; + + params.print_info_interval = 1; + params.print_details_interval = 2; + + params.samples_start_after_nl = false; + params.use_adam = true; + params.use_flash = true; + params.use_scratch = true; + + // only adam + params.warmup = 100; + params.cos_decay_steps = 1000; + params.cos_decay_restart = 1.1f; + params.cos_decay_alpha = 0.0f; + + params.lbfgs_n_iter = 16; + params.adam_n_iter = 16; + params.adam_alpha = 1e-3f; + params.adam_decay = 1e-3f; + + params.mem_model_gb = 2; + params.mem_compute_gb = 24; + params.mem_compute0_gb = 8; + params.mem_compute1_gb = 2; + + return params; +} + +void print_usage(int /*argc*/, char ** argv, const struct train_params * params) { + fprintf(stderr, "usage: %s [options]\n", argv[0]); + fprintf(stderr, "\n"); + fprintf(stderr, "options:\n"); + fprintf(stderr, " -h, --help show this help message and exit\n"); + fprintf(stderr, " --copy-vocab-from-model FNAME llama2.c vocabulary or ggmlv3 model path from which to copy vocab (default '%s')\n", params->fn_vocab_model); + fprintf(stderr, " --llama2c-model FNAME [REQUIRED] model path from which to load Karpathy's llama2.c model\n"); + fprintf(stderr, " --llama2c-output-model FNAME model path to save the converted llama2.c model (default %s')\n", params->fn_llama2c_output_model); + fprintf(stderr, "\n"); +} + +bool params_parse(int argc, char ** argv, struct train_params * params) { + bool invalid_param = false; + bool reqd_param_found = false; + std::string arg; + struct train_params default_params = get_default_train_params(); + const std::string arg_prefix = "--"; + + for (int i = 1; i < argc; i++) { + arg = argv[i]; + if (arg.compare(0, arg_prefix.size(), arg_prefix) == 0) { + std::replace(arg.begin(), arg.end(), '_', '-'); + } + + if (arg == "--copy-vocab-from-model") { + if (++i >= argc) { + invalid_param = true; + break; + } + params->fn_vocab_model = argv[i]; + } else if (arg == "--llama2c-model") { + if (++i >= argc) { + invalid_param = true; + break; + } + reqd_param_found = true; + params->fn_llama2c_model = argv[i]; + } else if (arg == "--llama2c-output-model") { + if (++i >= argc) { + invalid_param = true; + break; + } + params->fn_llama2c_output_model = argv[i]; + } else if (arg == "-h" || arg == "--help") { + print_usage(argc, argv, &default_params); + exit(0); + } else { + fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); + print_usage(argc, argv, &default_params); + exit(1); + } + } + if (invalid_param) { + fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); + print_usage(argc, argv, &default_params); + exit(1); + } + if (!reqd_param_found){ + fprintf(stderr, "error: please specify a llama2.c .bin file to be converted with argument --llama2c-model\n"); + print_usage(argc, argv, &default_params); + exit(1); + } + + return true; +} + +int main(int argc, char ** argv) { + struct train_params params = get_default_train_params(); + if (!params_parse(argc, argv, ¶ms)) { + return 1; + } + Config config; + TransformerWeights weights; + { + FILE *file = fopen(params.fn_llama2c_model, "rb"); + if (!file) { printf("Unable to open the checkpoint file %s!\n", params.fn_llama2c_model); return 1; } + // read in the config header + if(fread(&config, sizeof(Config), 1, file) != 1) { return 1; } + auto shared_weights = config.vocab_size > 0; + config.vocab_size = abs(config.vocab_size); + + // read in the Transformer weights + malloc_weights(&weights, &config, shared_weights); + if(checkpoint_init_weights(&weights, &config, file, shared_weights)) { return 1; } + fclose(file); + } + + struct llama_vocab vocab; + load_vocab(params.fn_vocab_model, &config, &vocab); + + struct my_llama_model model; + model.hparams.n_vocab = config.vocab_size; //llama_n_vocab(lctx); + model.hparams.n_ctx = params.n_ctx; + model.hparams.n_embd = config.dim; //params.n_embd; + model.hparams.n_mult = 32;//params.n_mult; + model.hparams.n_head = config.n_heads; //params.n_head; + model.hparams.n_layer = config.n_layers; //params.n_layer; + model.hparams.n_rot = std::min((uint32_t)params.n_rotmax, model.hparams.n_embd / model.hparams.n_head); + print_params(&model.hparams); + struct ggml_init_params lcparams; + lcparams.mem_size = 1024ll*1024ll*1024ll*((size_t) params.mem_model_gb); + lcparams.mem_buffer = NULL; + lcparams.no_alloc = false; + + model.ctx = ggml_init(lcparams); + + init_model(&model); + save_as_llama_model(&vocab, &model, &weights, params.fn_llama2c_output_model); + + printf("Saving llama.c model file %s in ggml format at %s\n", params.fn_llama2c_model, params.fn_llama2c_output_model); + + ggml_free(model.ctx); + free_weights(&weights); + return 0; +} diff --git a/examples/embd-input/embd-input-lib.cpp b/examples/embd-input/embd-input-lib.cpp index 2185b9b0e..8a6ad882e 100644 --- a/examples/embd-input/embd-input-lib.cpp +++ b/examples/embd-input/embd-input-lib.cpp @@ -167,7 +167,7 @@ llama_token sampling_id(struct MyModel* mymodel) { llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; // TODO: Apply penalties - // float nl_logit = logits[llama_token_nl()]; + // float nl_logit = logits[llama_token_nl(ctx)]; // auto last_n_repeat = std::min(std::min((int)last_n_tokens.size(), repeat_last_n), n_ctx); // llama_sample_repetition_penalty(ctx, &candidates_p, // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, @@ -176,7 +176,7 @@ llama_token sampling_id(struct MyModel* mymodel) { // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, // last_n_repeat, alpha_frequency, alpha_presence); // if (!penalize_nl) { - // logits[llama_token_nl()] = nl_logit; + // logits[llama_token_nl(ctx)] = nl_logit; // } if (temp <= 0) { @@ -211,7 +211,7 @@ const char * sampling(struct MyModel * mymodel) { llama_context * ctx = mymodel->ctx; int id = sampling_id(mymodel); static std::string ret; - if (id == llama_token_eos()) { + if (id == llama_token_eos(ctx)) { ret = ""; } else { ret = llama_token_to_str(ctx, id); diff --git a/examples/embd-input/embd_input.py b/examples/embd-input/embd_input.py old mode 100644 new mode 100755 index be2896614..f146acdc1 --- a/examples/embd-input/embd_input.py +++ b/examples/embd-input/embd_input.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import ctypes from ctypes import cdll, c_char_p, c_void_p, POINTER, c_float, c_int import numpy as np diff --git a/examples/embd-input/llava.py b/examples/embd-input/llava.py old mode 100644 new mode 100755 index bcbdd2bed..06fad55f4 --- a/examples/embd-input/llava.py +++ b/examples/embd-input/llava.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import sys import os sys.path.insert(0, os.path.dirname(__file__)) diff --git a/examples/embd-input/minigpt4.py b/examples/embd-input/minigpt4.py old mode 100644 new mode 100755 index 15c9b77c0..7b13e4a5c --- a/examples/embd-input/minigpt4.py +++ b/examples/embd-input/minigpt4.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import sys import os sys.path.insert(0, os.path.dirname(__file__)) diff --git a/examples/embd-input/panda_gpt.py b/examples/embd-input/panda_gpt.py old mode 100644 new mode 100755 index 0cfac5f32..891ad7cc9 --- a/examples/embd-input/panda_gpt.py +++ b/examples/embd-input/panda_gpt.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import sys import os sys.path.insert(0, os.path.dirname(__file__)) diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index 5192d6df5..38395c75b 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -67,28 +67,35 @@ int main(int argc, char ** argv) { fprintf(stderr, "%s: prompt: '%s'\n", __func__, params.prompt.c_str()); fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); for (int i = 0; i < (int) embd_inp.size(); i++) { - fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_str(ctx, embd_inp[i])); + fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_str(ctx, embd_inp[i]).c_str()); } fprintf(stderr, "\n"); } - if (params.embedding){ - if (embd_inp.size() > 0) { - if (llama_eval(ctx, embd_inp.data(), embd_inp.size(), n_past, params.n_threads)) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return 1; - } - } - - const int n_embd = llama_n_embd(ctx); - const auto embeddings = llama_get_embeddings(ctx); - - for (int i = 0; i < n_embd; i++) { - printf("%f ", embeddings[i]); - } - printf("\n"); + if (embd_inp.size() > (size_t)params.n_ctx) { + fprintf(stderr, "%s: error: prompt is longer than the context window (%zu tokens, n_ctx = %d)\n", + __func__, embd_inp.size(), params.n_ctx); + return 1; } + while (!embd_inp.empty()) { + int n_tokens = std::min(params.n_batch, (int) embd_inp.size()); + if (llama_eval(ctx, embd_inp.data(), n_tokens, n_past, params.n_threads)) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return 1; + } + n_past += n_tokens; + embd_inp.erase(embd_inp.begin(), embd_inp.begin() + n_tokens); + } + + const int n_embd = llama_n_embd(ctx); + const auto embeddings = llama_get_embeddings(ctx); + + for (int i = 0; i < n_embd; i++) { + printf("%f ", embeddings[i]); + } + printf("\n"); + llama_print_timings(ctx); llama_free(ctx); llama_free_model(model); diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp new file mode 100644 index 000000000..dee00df87 --- /dev/null +++ b/examples/gguf/gguf.cpp @@ -0,0 +1,246 @@ +#include "ggml.h" +#include "llama.h" + +#include +#include +#include +#include +#include +#include + +#undef MIN +#undef MAX +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + +template +static std::string to_string(const T & val) { + std::stringstream ss; + ss << val; + return ss.str(); +} + +bool gguf_ex_write(const std::string & fname) { + struct gguf_context * ctx = gguf_init_empty(); + + gguf_set_val_u8 (ctx, "some.parameter.uint8", 0x12); + gguf_set_val_i8 (ctx, "some.parameter.int8", -0x13); + gguf_set_val_u16 (ctx, "some.parameter.uint16", 0x1234); + gguf_set_val_i16 (ctx, "some.parameter.int16", -0x1235); + gguf_set_val_u32 (ctx, "some.parameter.uint32", 0x12345678); + gguf_set_val_i32 (ctx, "some.parameter.int32", -0x12345679); + gguf_set_val_f32 (ctx, "some.parameter.float32", 0.123456789f); + gguf_set_val_bool(ctx, "some.parameter.bool", true); + gguf_set_val_str (ctx, "some.parameter.string", "hello world"); + + gguf_set_arr_data(ctx, "some.parameter.arr.i16", GGUF_TYPE_INT16, std::vector{ 1, 2, 3, 4, }.data(), 4); + gguf_set_arr_data(ctx, "some.parameter.arr.f32", GGUF_TYPE_FLOAT32, std::vector{ 3.145f, 2.718f, 1.414f, }.data(), 3); + gguf_set_arr_str (ctx, "some.parameter.arr.str", std::vector{ "hello", "world", "!" }.data(), 3); + + struct ggml_init_params params = { + /*.mem_size =*/ 128ull*1024ull*1024ull, + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ false, + }; + + struct ggml_context * ctx_data = ggml_init(params); + + const int n_tensors = 10; + + // tensor infos + for (int i = 0; i < n_tensors; ++i) { + const std::string name = "tensor_" + to_string(i); + + int64_t ne[GGML_MAX_DIMS] = { 1 }; + int32_t n_dims = rand() % GGML_MAX_DIMS + 1; + + for (int j = 0; j < n_dims; ++j) { + ne[j] = rand() % 10 + 1; + } + + struct ggml_tensor * cur = ggml_new_tensor(ctx_data, GGML_TYPE_F32, n_dims, ne); + ggml_set_name(cur, name.c_str()); + + { + float * data = (float *) cur->data; + for (int j = 0; j < ggml_nelements(cur); ++j) { + data[j] = 100 + i; + } + } + + gguf_add_tensor(ctx, cur); + } + + gguf_write_to_file(ctx, fname.c_str(), false); + + fprintf(stdout, "%s: wrote file '%s;\n", __func__, fname.c_str()); + + ggml_free(ctx_data); + gguf_free(ctx); + + return true; +} + +// just read tensor info +bool gguf_ex_read_0(const std::string & fname) { + struct gguf_init_params params = { + /*.no_alloc = */ false, + /*.ctx = */ NULL, + }; + + struct gguf_context * ctx = gguf_init_from_file(fname.c_str(), params); + + fprintf(stdout, "%s: version: %d\n", __func__, gguf_get_version(ctx)); + fprintf(stdout, "%s: alignment: %zu\n", __func__, gguf_get_alignment(ctx)); + fprintf(stdout, "%s: data offset: %zu\n", __func__, gguf_get_data_offset(ctx)); + + // kv + { + const int n_kv = gguf_get_n_kv(ctx); + + fprintf(stdout, "%s: n_kv: %d\n", __func__, n_kv); + + for (int i = 0; i < n_kv; ++i) { + const char * key = gguf_get_key(ctx, i); + + fprintf(stdout, "%s: kv[%d]: key = %s\n", __func__, i, key); + } + } + + // find kv string + { + const char * findkey = "some.parameter.string"; + + const int keyidx = gguf_find_key(ctx, findkey); + if (keyidx == -1) { + fprintf(stdout, "%s: find key: %s not found.\n", __func__, findkey); + } else { + const char * key_value = gguf_get_val_str(ctx, keyidx); + fprintf(stdout, "%s: find key: %s found, kv[%d] value = %s\n", __func__, findkey, keyidx, key_value); + } + } + + // tensor info + { + const int n_tensors = gguf_get_n_tensors(ctx); + + fprintf(stdout, "%s: n_tensors: %d\n", __func__, n_tensors); + + for (int i = 0; i < n_tensors; ++i) { + const char * name = gguf_get_tensor_name (ctx, i); + const size_t offset = gguf_get_tensor_offset(ctx, i); + + fprintf(stdout, "%s: tensor[%d]: name = %s, offset = %zu\n", __func__, i, name, offset); + } + } + + gguf_free(ctx); + + return true; +} + +// read and create ggml_context containing the tensors and their data +bool gguf_ex_read_1(const std::string & fname) { + struct ggml_context * ctx_data = NULL; + + struct gguf_init_params params = { + /*.no_alloc = */ false, + /*.ctx = */ &ctx_data, + }; + + struct gguf_context * ctx = gguf_init_from_file(fname.c_str(), params); + + fprintf(stdout, "%s: version: %d\n", __func__, gguf_get_version(ctx)); + fprintf(stdout, "%s: alignment: %zu\n", __func__, gguf_get_alignment(ctx)); + fprintf(stdout, "%s: data offset: %zu\n", __func__, gguf_get_data_offset(ctx)); + + // kv + { + const int n_kv = gguf_get_n_kv(ctx); + + fprintf(stdout, "%s: n_kv: %d\n", __func__, n_kv); + + for (int i = 0; i < n_kv; ++i) { + const char * key = gguf_get_key(ctx, i); + + fprintf(stdout, "%s: kv[%d]: key = %s\n", __func__, i, key); + } + } + + // tensor info + { + const int n_tensors = gguf_get_n_tensors(ctx); + + fprintf(stdout, "%s: n_tensors: %d\n", __func__, n_tensors); + + for (int i = 0; i < n_tensors; ++i) { + const char * name = gguf_get_tensor_name (ctx, i); + const size_t offset = gguf_get_tensor_offset(ctx, i); + + fprintf(stdout, "%s: tensor[%d]: name = %s, offset = %zu\n", __func__, i, name, offset); + } + } + + // data + { + const int n_tensors = gguf_get_n_tensors(ctx); + + for (int i = 0; i < n_tensors; ++i) { + fprintf(stdout, "%s: reading tensor %d data\n", __func__, i); + + const char * name = gguf_get_tensor_name(ctx, i); + + struct ggml_tensor * cur = ggml_get_tensor(ctx_data, name); + + fprintf(stdout, "%s: tensor[%d]: n_dims = %d, name = %s, data = %p\n", __func__, i, cur->n_dims, cur->name, cur->data); + + // print first 10 elements + const float * data = (const float *) cur->data; + + printf("%s data[:10] : ", name); + for (int j = 0; j < MIN(10, ggml_nelements(cur)); ++j) { + printf("%f ", data[j]); + } + printf("\n\n"); + + // check data + { + const float * data = (const float *) cur->data; + for (int j = 0; j < ggml_nelements(cur); ++j) { + if (data[j] != 100 + i) { + fprintf(stderr, "%s: tensor[%d]: data[%d] = %f\n", __func__, i, j, data[j]); + return false; + } + } + } + } + } + + fprintf(stdout, "%s: ctx_data size: %zu\n", __func__, ggml_get_mem_size(ctx_data)); + + ggml_free(ctx_data); + gguf_free(ctx); + + return true; +} + +int main(int argc, char ** argv) { + if (argc < 3) { + fprintf(stdout, "usage: %s data.gguf r|w\n", argv[0]); + return -1; + } + + const std::string fname(argv[1]); + const std::string mode (argv[2]); + + GGML_ASSERT((mode == "r" || mode == "w") && "mode must be r or w"); + + if (mode == "w") { + GGML_ASSERT(gguf_ex_write(fname) && "failed to write gguf file"); + } else if (mode == "r") { + GGML_ASSERT(gguf_ex_read_0(fname) && "failed to read gguf file"); + GGML_ASSERT(gguf_ex_read_1(fname) && "failed to read gguf file"); + } + + return 0; +} diff --git a/examples/gptneox-wip/cmpnct_gpt2bpe.hpp b/examples/gptneox-wip/cmpnct_gpt2bpe.hpp new file mode 100644 index 000000000..9d433f4b1 --- /dev/null +++ b/examples/gptneox-wip/cmpnct_gpt2bpe.hpp @@ -0,0 +1,1133 @@ +#ifndef CMPNCT_GPT2BPE +#define CMPNCT_GPT2BPE + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +// Unicode GPT2 Byte Pair Encoding Tokenizer +// Adapted from https://github.com/cmp-nct/ggllm.cpp [MIT License] +// Removed loading of merges from HF json and parts made for a specific vocab + + +//----------------- +// Unicode library (from cmpnct_unicode.cpp) +//----------------- + +// Minimal library for high performance handling and categorization of UTF8 strings and characters +// Using std::string + +enum CNCTCharType { + DIGIT, // a numerical char in any language + LETTER, // a letter in any language + WHITESPACE, // any form of whitespace + ACCENT_MARK, // letter modifiers like ´ in é + PUNCTUATION, // punctuation including brackets + SYMBOL, // math, currency, other symbols + CONTROL, // control characters + MIXED, // a mix of the above + UNIDENTIFIED // something more exotic like emoji or separators +}; + +struct CNCTUnicode; + +struct CNCTString { + std::string str; + size_t utf8_chars; + + CNCTCharType char_type=UNIDENTIFIED; + bool is_sequential=false; + + size_t seq_offset_bytes=0; + size_t seq_offset_utf8_chars=0; + + bool operator==(const std::string &other) const; + bool operator==(const char other) const; + bool operator==(const CNCTString &other) const; + CNCTString &operator+=(const std::string &other); + CNCTString &operator+=(const char other); + friend CNCTString operator+(CNCTString lhs, const std::string &rhs); + friend CNCTString operator+(CNCTString lhs, const char rhs); + CNCTString& operator+=(const CNCTString& other); + friend CNCTString operator+(CNCTString lhs, const CNCTString& rhs); +}; + +struct CNCTUnicode { + static bool check_code_range(int c, const std::vector>& ranges); + static CNCTCharType get_code_type(int c); + static CNCTCharType get_code_type(const std::string &utf8_char); + static int utf8_len(const char c); + static int strlen_utf8(std::string src); + static std::vector split_utf8(const std::string &src); + static std::vector split_utf8_enhanced(const std::string &src); + static CNCTCharType string_identify(const std::string& str); + static bool string_test(const std::string& str, CNCTCharType chartype); +}; + +static const std::vector> digit_ranges = { +{0x30, 0x39}, {0xB2, 0xB3}, {0xB9, 0xB9}, {0x660, 0x669}, {0x6F0, 0x6F9}, {0x7C0, 0x7C9}, {0x966, 0x96F}, {0x9E6, 0x9EF}, {0xA66, 0xA6F}, {0xAE6, 0xAEF}, {0xB66, 0xB6F}, {0xBE6, 0xBEF}, {0xC66, 0xC6F}, +{0xCE6, 0xCEF}, {0xD66, 0xD6F}, {0xDE6, 0xDEF}, {0xE50, 0xE59}, {0xED0, 0xED9}, {0xF20, 0xF29}, {0x1040, 0x1049}, {0x1090, 0x1099}, {0x1369, 0x1371}, {0x17E0, 0x17E9}, {0x1810, 0x1819}, {0x1946, 0x194F}, +{0x19D0, 0x19DA}, {0x1A80, 0x1A89}, {0x1A90, 0x1A99}, {0x1B50, 0x1B59}, {0x1BB0, 0x1BB9}, {0x1C40, 0x1C49}, {0x1C50, 0x1C59}, {0x2070, 0x2070}, {0x2074, 0x2079}, {0x2080, 0x2089}, {0x2460, 0x2468}, +{0x2474, 0x247C}, {0x2488, 0x2490}, {0x24EA, 0x24EA}, {0x24F5, 0x24FD}, {0x24FF, 0x24FF}, {0x2776, 0x277E}, {0x2780, 0x2788}, {0x278A, 0x2792}, {0xA620, 0xA629}, {0xA8D0, 0xA8D9}, {0xA900, 0xA909}, +{0xA9D0, 0xA9D9}, {0xA9F0, 0xA9F9}, {0xAA50, 0xAA59}, {0xABF0, 0xABF9}, {0xFF10, 0xFF19}, {0x104A0, 0x104A9}, {0x10A40, 0x10A43}, {0x10D30, 0x10D39}, {0x10E60, 0x10E68}, {0x11052, 0x1105A}, +{0x11066, 0x1106F}, {0x110F0, 0x110F9}, {0x11136, 0x1113F}, {0x111D0, 0x111D9}, {0x112F0, 0x112F9}, {0x11450, 0x11459}, {0x114D0, 0x114D9}, {0x11650, 0x11659}, {0x116C0, 0x116C9}, {0x11730, 0x11739}, +{0x118E0, 0x118E9}, {0x11950, 0x11959}, {0x11C50, 0x11C59}, {0x11D50, 0x11D59}, {0x11DA0, 0x11DA9}, {0x16A60, 0x16A69}, {0x16B50, 0x16B59}, {0x1D7CE, 0x1D7FF}, {0x1E140, 0x1E149}, {0x1E2F0, 0x1E2F9}, +{0x1E950, 0x1E959}, {0x1F100, 0x1F10A}, {0x1FBF0, 0x1FBF9}, +}; + +static const std::vector> letter_ranges = { +{0x41, 0x5A}, {0x61, 0x7A}, {0xAA, 0xAA}, {0xB5, 0xB5}, {0xBA, 0xBA}, {0xC0, 0xD6}, {0xD8, 0xF6}, {0xF8, 0x2C1}, {0x2C6, 0x2D1}, {0x2E0, 0x2E4}, {0x2EC, 0x2EC}, {0x2EE, 0x2EE}, {0x370, 0x374}, +{0x376, 0x377}, {0x37A, 0x37D}, {0x37F, 0x37F}, {0x386, 0x386}, {0x388, 0x38A}, {0x38C, 0x38C}, {0x38E, 0x3A1}, {0x3A3, 0x3F5}, {0x3F7, 0x481}, {0x48A, 0x52F}, {0x531, 0x556}, {0x559, 0x559}, +{0x560, 0x588}, {0x5D0, 0x5EA}, {0x5EF, 0x5F2}, {0x620, 0x64A}, {0x66E, 0x66F}, {0x671, 0x6D3}, {0x6D5, 0x6D5}, {0x6E5, 0x6E6}, {0x6EE, 0x6EF}, {0x6FA, 0x6FC}, {0x6FF, 0x6FF}, {0x710, 0x710}, +{0x712, 0x72F}, {0x74D, 0x7A5}, {0x7B1, 0x7B1}, {0x7CA, 0x7EA}, {0x7F4, 0x7F5}, {0x7FA, 0x7FA}, {0x800, 0x815}, {0x81A, 0x81A}, {0x824, 0x824}, {0x828, 0x828}, {0x840, 0x858}, {0x860, 0x86A}, +{0x8A0, 0x8B4}, {0x8B6, 0x8C7}, {0x904, 0x939}, {0x93D, 0x93D}, {0x950, 0x950}, {0x958, 0x961}, {0x971, 0x980}, {0x985, 0x98C}, {0x98F, 0x990}, {0x993, 0x9A8}, {0x9AA, 0x9B0}, {0x9B2, 0x9B2}, +{0x9B6, 0x9B9}, {0x9BD, 0x9BD}, {0x9CE, 0x9CE}, {0x9DC, 0x9DD}, {0x9DF, 0x9E1}, {0x9F0, 0x9F1}, {0x9FC, 0x9FC}, {0xA05, 0xA0A}, {0xA0F, 0xA10}, {0xA13, 0xA28}, {0xA2A, 0xA30}, {0xA32, 0xA33}, +{0xA35, 0xA36}, {0xA38, 0xA39}, {0xA59, 0xA5C}, {0xA5E, 0xA5E}, {0xA72, 0xA74}, {0xA85, 0xA8D}, {0xA8F, 0xA91}, {0xA93, 0xAA8}, {0xAAA, 0xAB0}, {0xAB2, 0xAB3}, {0xAB5, 0xAB9}, {0xABD, 0xABD}, +{0xAD0, 0xAD0}, {0xAE0, 0xAE1}, {0xAF9, 0xAF9}, {0xB05, 0xB0C}, {0xB0F, 0xB10}, {0xB13, 0xB28}, {0xB2A, 0xB30}, {0xB32, 0xB33}, {0xB35, 0xB39}, {0xB3D, 0xB3D}, {0xB5C, 0xB5D}, {0xB5F, 0xB61}, +{0xB71, 0xB71}, {0xB83, 0xB83}, {0xB85, 0xB8A}, {0xB8E, 0xB90}, {0xB92, 0xB95}, {0xB99, 0xB9A}, {0xB9C, 0xB9C}, {0xB9E, 0xB9F}, {0xBA3, 0xBA4}, {0xBA8, 0xBAA}, {0xBAE, 0xBB9}, {0xBD0, 0xBD0}, +{0xC05, 0xC0C}, {0xC0E, 0xC10}, {0xC12, 0xC28}, {0xC2A, 0xC39}, {0xC3D, 0xC3D}, {0xC58, 0xC5A}, {0xC60, 0xC61}, {0xC80, 0xC80}, {0xC85, 0xC8C}, {0xC8E, 0xC90}, {0xC92, 0xCA8}, {0xCAA, 0xCB3}, +{0xCB5, 0xCB9}, {0xCBD, 0xCBD}, {0xCDE, 0xCDE}, {0xCE0, 0xCE1}, {0xCF1, 0xCF2}, {0xD04, 0xD0C}, {0xD0E, 0xD10}, {0xD12, 0xD3A}, {0xD3D, 0xD3D}, {0xD4E, 0xD4E}, {0xD54, 0xD56}, {0xD5F, 0xD61}, +{0xD7A, 0xD7F}, {0xD85, 0xD96}, {0xD9A, 0xDB1}, {0xDB3, 0xDBB}, {0xDBD, 0xDBD}, {0xDC0, 0xDC6}, {0xE01, 0xE30}, {0xE32, 0xE33}, {0xE40, 0xE46}, {0xE81, 0xE82}, {0xE84, 0xE84}, {0xE86, 0xE8A}, +{0xE8C, 0xEA3}, {0xEA5, 0xEA5}, {0xEA7, 0xEB0}, {0xEB2, 0xEB3}, {0xEBD, 0xEBD}, {0xEC0, 0xEC4}, {0xEC6, 0xEC6}, {0xEDC, 0xEDF}, {0xF00, 0xF00}, {0xF40, 0xF47}, {0xF49, 0xF6C}, {0xF88, 0xF8C}, +{0x1000, 0x102A}, {0x103F, 0x103F}, {0x1050, 0x1055}, {0x105A, 0x105D}, {0x1061, 0x1061}, {0x1065, 0x1066}, {0x106E, 0x1070}, {0x1075, 0x1081}, {0x108E, 0x108E}, {0x10A0, 0x10C5}, {0x10C7, 0x10C7}, +{0x10CD, 0x10CD}, {0x10D0, 0x10FA}, {0x10FC, 0x1248}, {0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258}, {0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D}, {0x1290, 0x12B0}, {0x12B2, 0x12B5}, +{0x12B8, 0x12BE}, {0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6}, {0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A}, {0x1380, 0x138F}, {0x13A0, 0x13F5}, {0x13F8, 0x13FD}, {0x1401, 0x166C}, +{0x166F, 0x167F}, {0x1681, 0x169A}, {0x16A0, 0x16EA}, {0x16F1, 0x16F8}, {0x1700, 0x170C}, {0x170E, 0x1711}, {0x1720, 0x1731}, {0x1740, 0x1751}, {0x1760, 0x176C}, {0x176E, 0x1770}, {0x1780, 0x17B3}, +{0x17D7, 0x17D7}, {0x17DC, 0x17DC}, {0x1820, 0x1878}, {0x1880, 0x1884}, {0x1887, 0x18A8}, {0x18AA, 0x18AA}, {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1950, 0x196D}, {0x1970, 0x1974}, {0x1980, 0x19AB}, +{0x19B0, 0x19C9}, {0x1A00, 0x1A16}, {0x1A20, 0x1A54}, {0x1AA7, 0x1AA7}, {0x1B05, 0x1B33}, {0x1B45, 0x1B4B}, {0x1B83, 0x1BA0}, {0x1BAE, 0x1BAF}, {0x1BBA, 0x1BE5}, {0x1C00, 0x1C23}, {0x1C4D, 0x1C4F}, +{0x1C5A, 0x1C7D}, {0x1C80, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CBF}, {0x1CE9, 0x1CEC}, {0x1CEE, 0x1CF3}, {0x1CF5, 0x1CF6}, {0x1CFA, 0x1CFA}, {0x1D00, 0x1DBF}, {0x1E00, 0x1F15}, {0x1F18, 0x1F1D}, +{0x1F20, 0x1F45}, {0x1F48, 0x1F4D}, {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B}, {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4}, {0x1FB6, 0x1FBC}, {0x1FBE, 0x1FBE}, {0x1FC2, 0x1FC4}, +{0x1FC6, 0x1FCC}, {0x1FD0, 0x1FD3}, {0x1FD6, 0x1FDB}, {0x1FE0, 0x1FEC}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFC}, {0x2071, 0x2071}, {0x207F, 0x207F}, {0x2090, 0x209C}, {0x2102, 0x2102}, {0x2107, 0x2107}, +{0x210A, 0x2113}, {0x2115, 0x2115}, {0x2119, 0x211D}, {0x2124, 0x2124}, {0x2126, 0x2126}, {0x2128, 0x2128}, {0x212A, 0x212D}, {0x212F, 0x2139}, {0x213C, 0x213F}, {0x2145, 0x2149}, {0x214E, 0x214E}, +{0x2183, 0x2184}, {0x2C00, 0x2C2E}, {0x2C30, 0x2C5E}, {0x2C60, 0x2CE4}, {0x2CEB, 0x2CEE}, {0x2CF2, 0x2CF3}, {0x2D00, 0x2D25}, {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D6F}, +{0x2D80, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE}, {0x2E2F, 0x2E2F}, {0x3005, 0x3006}, +{0x3031, 0x3035}, {0x303B, 0x303C}, {0x3041, 0x3096}, {0x309D, 0x309F}, {0x30A1, 0x30FA}, {0x30FC, 0x30FF}, {0x3105, 0x312F}, {0x3131, 0x318E}, {0x31A0, 0x31BF}, {0x31F0, 0x31FF}, {0x3400, 0x4DBF}, +{0x4E00, 0x9FFC}, {0xA000, 0xA48C}, {0xA4D0, 0xA4FD}, {0xA500, 0xA60C}, {0xA610, 0xA61F}, {0xA62A, 0xA62B}, {0xA640, 0xA66E}, {0xA67F, 0xA69D}, {0xA6A0, 0xA6E5}, {0xA717, 0xA71F}, {0xA722, 0xA788}, +{0xA78B, 0xA7BF}, {0xA7C2, 0xA7CA}, {0xA7F5, 0xA801}, {0xA803, 0xA805}, {0xA807, 0xA80A}, {0xA80C, 0xA822}, {0xA840, 0xA873}, {0xA882, 0xA8B3}, {0xA8F2, 0xA8F7}, {0xA8FB, 0xA8FB}, {0xA8FD, 0xA8FE}, +{0xA90A, 0xA925}, {0xA930, 0xA946}, {0xA960, 0xA97C}, {0xA984, 0xA9B2}, {0xA9CF, 0xA9CF}, {0xA9E0, 0xA9E4}, {0xA9E6, 0xA9EF}, {0xA9FA, 0xA9FE}, {0xAA00, 0xAA28}, {0xAA40, 0xAA42}, {0xAA44, 0xAA4B}, +{0xAA60, 0xAA76}, {0xAA7A, 0xAA7A}, {0xAA7E, 0xAAAF}, {0xAAB1, 0xAAB1}, {0xAAB5, 0xAAB6}, {0xAAB9, 0xAABD}, {0xAAC0, 0xAAC0}, {0xAAC2, 0xAAC2}, {0xAADB, 0xAADD}, {0xAAE0, 0xAAEA}, {0xAAF2, 0xAAF4}, +{0xAB01, 0xAB06}, {0xAB09, 0xAB0E}, {0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E}, {0xAB30, 0xAB5A}, {0xAB5C, 0xAB69}, {0xAB70, 0xABE2}, {0xAC00, 0xD7A3}, {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, +{0xF900, 0xFA6D}, {0xFA70, 0xFAD9}, {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB1D}, {0xFB1F, 0xFB28}, {0xFB2A, 0xFB36}, {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41}, {0xFB43, 0xFB44}, +{0xFB46, 0xFBB1}, {0xFBD3, 0xFD3D}, {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFB}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC}, {0xFF21, 0xFF3A}, {0xFF41, 0xFF5A}, {0xFF66, 0xFFBE}, {0xFFC2, 0xFFC7}, +{0xFFCA, 0xFFCF}, {0xFFD2, 0xFFD7}, {0xFFDA, 0xFFDC}, {0x10000, 0x1000B}, {0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D}, {0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA}, +{0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x10300, 0x1031F}, {0x1032D, 0x10340}, {0x10342, 0x10349}, {0x10350, 0x10375}, {0x10380, 0x1039D}, {0x103A0, 0x103C3}, {0x103C8, 0x103CF}, {0x10400, 0x1049D}, +{0x104B0, 0x104D3}, {0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563}, {0x10600, 0x10736}, {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808}, {0x1080A, 0x10835}, +{0x10837, 0x10838}, {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10860, 0x10876}, {0x10880, 0x1089E}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x10900, 0x10915}, {0x10920, 0x10939}, {0x10980, 0x109B7}, +{0x109BE, 0x109BF}, {0x10A00, 0x10A00}, {0x10A10, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35}, {0x10A60, 0x10A7C}, {0x10A80, 0x10A9C}, {0x10AC0, 0x10AC7}, {0x10AC9, 0x10AE4}, {0x10B00, 0x10B35}, +{0x10B40, 0x10B55}, {0x10B60, 0x10B72}, {0x10B80, 0x10B91}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2}, {0x10D00, 0x10D23}, {0x10E80, 0x10EA9}, {0x10EB0, 0x10EB1}, {0x10F00, 0x10F1C}, +{0x10F27, 0x10F27}, {0x10F30, 0x10F45}, {0x10FB0, 0x10FC4}, {0x10FE0, 0x10FF6}, {0x11003, 0x11037}, {0x11083, 0x110AF}, {0x110D0, 0x110E8}, {0x11103, 0x11126}, {0x11144, 0x11144}, {0x11147, 0x11147}, +{0x11150, 0x11172}, {0x11176, 0x11176}, {0x11183, 0x111B2}, {0x111C1, 0x111C4}, {0x111DA, 0x111DA}, {0x111DC, 0x111DC}, {0x11200, 0x11211}, {0x11213, 0x1122B}, {0x11280, 0x11286}, {0x11288, 0x11288}, +{0x1128A, 0x1128D}, {0x1128F, 0x1129D}, {0x1129F, 0x112A8}, {0x112B0, 0x112DE}, {0x11305, 0x1130C}, {0x1130F, 0x11310}, {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333}, {0x11335, 0x11339}, +{0x1133D, 0x1133D}, {0x11350, 0x11350}, {0x1135D, 0x11361}, {0x11400, 0x11434}, {0x11447, 0x1144A}, {0x1145F, 0x11461}, {0x11480, 0x114AF}, {0x114C4, 0x114C5}, {0x114C7, 0x114C7}, {0x11580, 0x115AE}, +{0x115D8, 0x115DB}, {0x11600, 0x1162F}, {0x11644, 0x11644}, {0x11680, 0x116AA}, {0x116B8, 0x116B8}, {0x11700, 0x1171A}, {0x11800, 0x1182B}, {0x118A0, 0x118DF}, {0x118FF, 0x11906}, {0x11909, 0x11909}, +{0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x1192F}, {0x1193F, 0x1193F}, {0x11941, 0x11941}, {0x119A0, 0x119A7}, {0x119AA, 0x119D0}, {0x119E1, 0x119E1}, {0x119E3, 0x119E3}, {0x11A00, 0x11A00}, +{0x11A0B, 0x11A32}, {0x11A3A, 0x11A3A}, {0x11A50, 0x11A50}, {0x11A5C, 0x11A89}, {0x11A9D, 0x11A9D}, {0x11AC0, 0x11AF8}, {0x11C00, 0x11C08}, {0x11C0A, 0x11C2E}, {0x11C40, 0x11C40}, {0x11C72, 0x11C8F}, +{0x11D00, 0x11D06}, {0x11D08, 0x11D09}, {0x11D0B, 0x11D30}, {0x11D46, 0x11D46}, {0x11D60, 0x11D65}, {0x11D67, 0x11D68}, {0x11D6A, 0x11D89}, {0x11D98, 0x11D98}, {0x11EE0, 0x11EF2}, {0x11FB0, 0x11FB0}, +{0x12000, 0x12399}, {0x12480, 0x12543}, {0x13000, 0x1342E}, {0x14400, 0x14646}, {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16AD0, 0x16AED}, {0x16B00, 0x16B2F}, {0x16B40, 0x16B43}, {0x16B63, 0x16B77}, +{0x16B7D, 0x16B8F}, {0x16E40, 0x16E7F}, {0x16F00, 0x16F4A}, {0x16F50, 0x16F50}, {0x16F93, 0x16F9F}, {0x16FE0, 0x16FE1}, {0x16FE3, 0x16FE3}, {0x17000, 0x187F7}, {0x18800, 0x18CD5}, {0x18D00, 0x18D08}, +{0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88}, {0x1BC90, 0x1BC99}, {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, +{0x1D49E, 0x1D49F}, {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, +{0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D6C0}, {0x1D6C2, 0x1D6DA}, {0x1D6DC, 0x1D6FA}, +{0x1D6FC, 0x1D714}, {0x1D716, 0x1D734}, {0x1D736, 0x1D74E}, {0x1D750, 0x1D76E}, {0x1D770, 0x1D788}, {0x1D78A, 0x1D7A8}, {0x1D7AA, 0x1D7C2}, {0x1D7C4, 0x1D7CB}, {0x1E100, 0x1E12C}, {0x1E137, 0x1E13D}, +{0x1E14E, 0x1E14E}, {0x1E2C0, 0x1E2EB}, {0x1E800, 0x1E8C4}, {0x1E900, 0x1E943}, {0x1E94B, 0x1E94B}, {0x1EE00, 0x1EE03}, {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24}, {0x1EE27, 0x1EE27}, +{0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37}, {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42}, {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B}, {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, +{0x1EE54, 0x1EE54}, {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B}, {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62}, {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72}, +{0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E}, {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3}, {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x20000, 0x2A6DD}, {0x2A700, 0x2B734}, +{0x2B740, 0x2B81D}, {0x2B820, 0x2CEA1}, {0x2CEB0, 0x2EBE0}, {0x2F800, 0x2FA1D}, {0x30000, 0x3134A}, +}; + +static const std::vector> whitespace_ranges = { +{0x9, 0xD}, {0x1C, 0x20}, {0x85, 0x85}, {0xA0, 0xA0}, {0x1680, 0x1680}, {0x2000, 0x200A}, {0x2028, 0x2029}, {0x202F, 0x202F}, {0x205F, 0x205F}, {0x3000, 0x3000}, +}; + +static const std::vector> accent_mark_ranges = { +{0x300, 0x36F}, {0x483, 0x489}, {0x591, 0x5BD}, {0x5BF, 0x5BF}, {0x5C1, 0x5C2}, {0x5C4, 0x5C5}, {0x5C7, 0x5C7}, {0x610, 0x61A}, {0x64B, 0x65F}, {0x670, 0x670}, {0x6D6, 0x6DC}, {0x6DF, 0x6E4}, +{0x6E7, 0x6E8}, {0x6EA, 0x6ED}, {0x711, 0x711}, {0x730, 0x74A}, {0x7A6, 0x7B0}, {0x7EB, 0x7F3}, {0x7FD, 0x7FD}, {0x816, 0x819}, {0x81B, 0x823}, {0x825, 0x827}, {0x829, 0x82D}, {0x859, 0x85B}, +{0x8D3, 0x8E1}, {0x8E3, 0x903}, {0x93A, 0x93C}, {0x93E, 0x94F}, {0x951, 0x957}, {0x962, 0x963}, {0x981, 0x983}, {0x9BC, 0x9BC}, {0x9BE, 0x9C4}, {0x9C7, 0x9C8}, {0x9CB, 0x9CD}, {0x9D7, 0x9D7}, +{0x9E2, 0x9E3}, {0x9FE, 0x9FE}, {0xA01, 0xA03}, {0xA3C, 0xA3C}, {0xA3E, 0xA42}, {0xA47, 0xA48}, {0xA4B, 0xA4D}, {0xA51, 0xA51}, {0xA70, 0xA71}, {0xA75, 0xA75}, {0xA81, 0xA83}, {0xABC, 0xABC}, +{0xABE, 0xAC5}, {0xAC7, 0xAC9}, {0xACB, 0xACD}, {0xAE2, 0xAE3}, {0xAFA, 0xAFF}, {0xB01, 0xB03}, {0xB3C, 0xB3C}, {0xB3E, 0xB44}, {0xB47, 0xB48}, {0xB4B, 0xB4D}, {0xB55, 0xB57}, {0xB62, 0xB63}, +{0xB82, 0xB82}, {0xBBE, 0xBC2}, {0xBC6, 0xBC8}, {0xBCA, 0xBCD}, {0xBD7, 0xBD7}, {0xC00, 0xC04}, {0xC3E, 0xC44}, {0xC46, 0xC48}, {0xC4A, 0xC4D}, {0xC55, 0xC56}, {0xC62, 0xC63}, {0xC81, 0xC83}, +{0xCBC, 0xCBC}, {0xCBE, 0xCC4}, {0xCC6, 0xCC8}, {0xCCA, 0xCCD}, {0xCD5, 0xCD6}, {0xCE2, 0xCE3}, {0xD00, 0xD03}, {0xD3B, 0xD3C}, {0xD3E, 0xD44}, {0xD46, 0xD48}, {0xD4A, 0xD4D}, {0xD57, 0xD57}, +{0xD62, 0xD63}, {0xD81, 0xD83}, {0xDCA, 0xDCA}, {0xDCF, 0xDD4}, {0xDD6, 0xDD6}, {0xDD8, 0xDDF}, {0xDF2, 0xDF3}, {0xE31, 0xE31}, {0xE34, 0xE3A}, {0xE47, 0xE4E}, {0xEB1, 0xEB1}, {0xEB4, 0xEBC}, +{0xEC8, 0xECD}, {0xF18, 0xF19}, {0xF35, 0xF35}, {0xF37, 0xF37}, {0xF39, 0xF39}, {0xF3E, 0xF3F}, {0xF71, 0xF84}, {0xF86, 0xF87}, {0xF8D, 0xF97}, {0xF99, 0xFBC}, {0xFC6, 0xFC6}, {0x102B, 0x103E}, +{0x1056, 0x1059}, {0x105E, 0x1060}, {0x1062, 0x1064}, {0x1067, 0x106D}, {0x1071, 0x1074}, {0x1082, 0x108D}, {0x108F, 0x108F}, {0x109A, 0x109D}, {0x135D, 0x135F}, {0x1712, 0x1714}, {0x1732, 0x1734}, +{0x1752, 0x1753}, {0x1772, 0x1773}, {0x17B4, 0x17D3}, {0x17DD, 0x17DD}, {0x180B, 0x180D}, {0x1885, 0x1886}, {0x18A9, 0x18A9}, {0x1920, 0x192B}, {0x1930, 0x193B}, {0x1A17, 0x1A1B}, {0x1A55, 0x1A5E}, +{0x1A60, 0x1A7C}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0}, {0x1B00, 0x1B04}, {0x1B34, 0x1B44}, {0x1B6B, 0x1B73}, {0x1B80, 0x1B82}, {0x1BA1, 0x1BAD}, {0x1BE6, 0x1BF3}, {0x1C24, 0x1C37}, {0x1CD0, 0x1CD2}, +{0x1CD4, 0x1CE8}, {0x1CED, 0x1CED}, {0x1CF4, 0x1CF4}, {0x1CF7, 0x1CF9}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF}, {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2D7F, 0x2D7F}, {0x2DE0, 0x2DFF}, {0x302A, 0x302F}, +{0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D}, {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA802, 0xA802}, {0xA806, 0xA806}, {0xA80B, 0xA80B}, {0xA823, 0xA827}, {0xA82C, 0xA82C}, {0xA880, 0xA881}, +{0xA8B4, 0xA8C5}, {0xA8E0, 0xA8F1}, {0xA8FF, 0xA8FF}, {0xA926, 0xA92D}, {0xA947, 0xA953}, {0xA980, 0xA983}, {0xA9B3, 0xA9C0}, {0xA9E5, 0xA9E5}, {0xAA29, 0xAA36}, {0xAA43, 0xAA43}, {0xAA4C, 0xAA4D}, +{0xAA7B, 0xAA7D}, {0xAAB0, 0xAAB0}, {0xAAB2, 0xAAB4}, {0xAAB7, 0xAAB8}, {0xAABE, 0xAABF}, {0xAAC1, 0xAAC1}, {0xAAEB, 0xAAEF}, {0xAAF5, 0xAAF6}, {0xABE3, 0xABEA}, {0xABEC, 0xABED}, {0xFB1E, 0xFB1E}, +{0xFE00, 0xFE0F}, {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x102E0, 0x102E0}, {0x10376, 0x1037A}, {0x10A01, 0x10A03}, {0x10A05, 0x10A06}, {0x10A0C, 0x10A0F}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A3F}, +{0x10AE5, 0x10AE6}, {0x10D24, 0x10D27}, {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11000, 0x11002}, {0x11038, 0x11046}, {0x1107F, 0x11082}, {0x110B0, 0x110BA}, {0x11100, 0x11102}, {0x11127, 0x11134}, +{0x11145, 0x11146}, {0x11173, 0x11173}, {0x11180, 0x11182}, {0x111B3, 0x111C0}, {0x111C9, 0x111CC}, {0x111CE, 0x111CF}, {0x1122C, 0x11237}, {0x1123E, 0x1123E}, {0x112DF, 0x112EA}, {0x11300, 0x11303}, +{0x1133B, 0x1133C}, {0x1133E, 0x11344}, {0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11357, 0x11357}, {0x11362, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x11435, 0x11446}, {0x1145E, 0x1145E}, +{0x114B0, 0x114C3}, {0x115AF, 0x115B5}, {0x115B8, 0x115C0}, {0x115DC, 0x115DD}, {0x11630, 0x11640}, {0x116AB, 0x116B7}, {0x1171D, 0x1172B}, {0x1182C, 0x1183A}, {0x11930, 0x11935}, {0x11937, 0x11938}, +{0x1193B, 0x1193E}, {0x11940, 0x11940}, {0x11942, 0x11943}, {0x119D1, 0x119D7}, {0x119DA, 0x119E0}, {0x119E4, 0x119E4}, {0x11A01, 0x11A0A}, {0x11A33, 0x11A39}, {0x11A3B, 0x11A3E}, {0x11A47, 0x11A47}, +{0x11A51, 0x11A5B}, {0x11A8A, 0x11A99}, {0x11C2F, 0x11C36}, {0x11C38, 0x11C3F}, {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x11D31, 0x11D36}, {0x11D3A, 0x11D3A}, {0x11D3C, 0x11D3D}, {0x11D3F, 0x11D45}, +{0x11D47, 0x11D47}, {0x11D8A, 0x11D8E}, {0x11D90, 0x11D91}, {0x11D93, 0x11D97}, {0x11EF3, 0x11EF6}, {0x16AF0, 0x16AF4}, {0x16B30, 0x16B36}, {0x16F4F, 0x16F4F}, {0x16F51, 0x16F87}, {0x16F8F, 0x16F92}, +{0x16FE4, 0x16FE4}, {0x16FF0, 0x16FF1}, {0x1BC9D, 0x1BC9E}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, {0x1D242, 0x1D244}, {0x1DA00, 0x1DA36}, +{0x1DA3B, 0x1DA6C}, {0x1DA75, 0x1DA75}, {0x1DA84, 0x1DA84}, {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, +{0x1E130, 0x1E136}, {0x1E2EC, 0x1E2EF}, {0x1E8D0, 0x1E8D6}, {0x1E944, 0x1E94A}, {0xE0100, 0xE01EF}, +}; + +static const std::vector> punctuation_ranges = { +{0x21, 0x23}, {0x25, 0x2A}, {0x2C, 0x2F}, {0x3A, 0x3B}, {0x3F, 0x40}, {0x5B, 0x5D}, {0x5F, 0x5F}, {0x7B, 0x7B}, {0x7D, 0x7D}, {0xA1, 0xA1}, {0xA7, 0xA7}, {0xAB, 0xAB}, {0xB6, 0xB7}, {0xBB, 0xBB}, +{0xBF, 0xBF}, {0x37E, 0x37E}, {0x387, 0x387}, {0x55A, 0x55F}, {0x589, 0x58A}, {0x5BE, 0x5BE}, {0x5C0, 0x5C0}, {0x5C3, 0x5C3}, {0x5C6, 0x5C6}, {0x5F3, 0x5F4}, {0x609, 0x60A}, {0x60C, 0x60D}, +{0x61B, 0x61B}, {0x61E, 0x61F}, {0x66A, 0x66D}, {0x6D4, 0x6D4}, {0x700, 0x70D}, {0x7F7, 0x7F9}, {0x830, 0x83E}, {0x85E, 0x85E}, {0x964, 0x965}, {0x970, 0x970}, {0x9FD, 0x9FD}, {0xA76, 0xA76}, +{0xAF0, 0xAF0}, {0xC77, 0xC77}, {0xC84, 0xC84}, {0xDF4, 0xDF4}, {0xE4F, 0xE4F}, {0xE5A, 0xE5B}, {0xF04, 0xF12}, {0xF14, 0xF14}, {0xF3A, 0xF3D}, {0xF85, 0xF85}, {0xFD0, 0xFD4}, {0xFD9, 0xFDA}, +{0x104A, 0x104F}, {0x10FB, 0x10FB}, {0x1360, 0x1368}, {0x1400, 0x1400}, {0x166E, 0x166E}, {0x169B, 0x169C}, {0x16EB, 0x16ED}, {0x1735, 0x1736}, {0x17D4, 0x17D6}, {0x17D8, 0x17DA}, {0x1800, 0x180A}, +{0x1944, 0x1945}, {0x1A1E, 0x1A1F}, {0x1AA0, 0x1AA6}, {0x1AA8, 0x1AAD}, {0x1B5A, 0x1B60}, {0x1BFC, 0x1BFF}, {0x1C3B, 0x1C3F}, {0x1C7E, 0x1C7F}, {0x1CC0, 0x1CC7}, {0x1CD3, 0x1CD3}, {0x2010, 0x2027}, +{0x2030, 0x2043}, {0x2045, 0x2051}, {0x2053, 0x205E}, {0x207D, 0x207E}, {0x208D, 0x208E}, {0x2308, 0x230B}, {0x2329, 0x232A}, {0x2768, 0x2775}, {0x27C5, 0x27C6}, {0x27E6, 0x27EF}, {0x2983, 0x2998}, +{0x29D8, 0x29DB}, {0x29FC, 0x29FD}, {0x2CF9, 0x2CFC}, {0x2CFE, 0x2CFF}, {0x2D70, 0x2D70}, {0x2E00, 0x2E2E}, {0x2E30, 0x2E4F}, {0x2E52, 0x2E52}, {0x3001, 0x3003}, {0x3008, 0x3011}, {0x3014, 0x301F}, +{0x3030, 0x3030}, {0x303D, 0x303D}, {0x30A0, 0x30A0}, {0x30FB, 0x30FB}, {0xA4FE, 0xA4FF}, {0xA60D, 0xA60F}, {0xA673, 0xA673}, {0xA67E, 0xA67E}, {0xA6F2, 0xA6F7}, {0xA874, 0xA877}, {0xA8CE, 0xA8CF}, +{0xA8F8, 0xA8FA}, {0xA8FC, 0xA8FC}, {0xA92E, 0xA92F}, {0xA95F, 0xA95F}, {0xA9C1, 0xA9CD}, {0xA9DE, 0xA9DF}, {0xAA5C, 0xAA5F}, {0xAADE, 0xAADF}, {0xAAF0, 0xAAF1}, {0xABEB, 0xABEB}, {0xFD3E, 0xFD3F}, +{0xFE10, 0xFE19}, {0xFE30, 0xFE52}, {0xFE54, 0xFE61}, {0xFE63, 0xFE63}, {0xFE68, 0xFE68}, {0xFE6A, 0xFE6B}, {0xFF01, 0xFF03}, {0xFF05, 0xFF0A}, {0xFF0C, 0xFF0F}, {0xFF1A, 0xFF1B}, {0xFF1F, 0xFF20}, +{0xFF3B, 0xFF3D}, {0xFF3F, 0xFF3F}, {0xFF5B, 0xFF5B}, {0xFF5D, 0xFF5D}, {0xFF5F, 0xFF65}, {0x10100, 0x10102}, {0x1039F, 0x1039F}, {0x103D0, 0x103D0}, {0x1056F, 0x1056F}, {0x10857, 0x10857}, +{0x1091F, 0x1091F}, {0x1093F, 0x1093F}, {0x10A50, 0x10A58}, {0x10A7F, 0x10A7F}, {0x10AF0, 0x10AF6}, {0x10B39, 0x10B3F}, {0x10B99, 0x10B9C}, {0x10EAD, 0x10EAD}, {0x10F55, 0x10F59}, {0x11047, 0x1104D}, +{0x110BB, 0x110BC}, {0x110BE, 0x110C1}, {0x11140, 0x11143}, {0x11174, 0x11175}, {0x111C5, 0x111C8}, {0x111CD, 0x111CD}, {0x111DB, 0x111DB}, {0x111DD, 0x111DF}, {0x11238, 0x1123D}, {0x112A9, 0x112A9}, +{0x1144B, 0x1144F}, {0x1145A, 0x1145B}, {0x1145D, 0x1145D}, {0x114C6, 0x114C6}, {0x115C1, 0x115D7}, {0x11641, 0x11643}, {0x11660, 0x1166C}, {0x1173C, 0x1173E}, {0x1183B, 0x1183B}, {0x11944, 0x11946}, +{0x119E2, 0x119E2}, {0x11A3F, 0x11A46}, {0x11A9A, 0x11A9C}, {0x11A9E, 0x11AA2}, {0x11C41, 0x11C45}, {0x11C70, 0x11C71}, {0x11EF7, 0x11EF8}, {0x11FFF, 0x11FFF}, {0x12470, 0x12474}, {0x16A6E, 0x16A6F}, +{0x16AF5, 0x16AF5}, {0x16B37, 0x16B3B}, {0x16B44, 0x16B44}, {0x16E97, 0x16E9A}, {0x16FE2, 0x16FE2}, {0x1BC9F, 0x1BC9F}, {0x1DA87, 0x1DA8B}, {0x1E95E, 0x1E95F}, +}; + +static const std::vector> symbol_ranges = { +{0x24, 0x24}, {0x2B, 0x2B}, {0x3C, 0x3E}, {0x5E, 0x5E}, {0x60, 0x60}, {0x7C, 0x7C}, {0x7E, 0x7E}, {0xA2, 0xA6}, {0xA8, 0xA9}, {0xAC, 0xAC}, {0xAE, 0xB1}, {0xB4, 0xB4}, {0xB8, 0xB8}, {0xD7, 0xD7}, +{0xF7, 0xF7}, {0x2C2, 0x2C5}, {0x2D2, 0x2DF}, {0x2E5, 0x2EB}, {0x2ED, 0x2ED}, {0x2EF, 0x2FF}, {0x375, 0x375}, {0x384, 0x385}, {0x3F6, 0x3F6}, {0x482, 0x482}, {0x58D, 0x58F}, {0x606, 0x608}, +{0x60B, 0x60B}, {0x60E, 0x60F}, {0x6DE, 0x6DE}, {0x6E9, 0x6E9}, {0x6FD, 0x6FE}, {0x7F6, 0x7F6}, {0x7FE, 0x7FF}, {0x9F2, 0x9F3}, {0x9FA, 0x9FB}, {0xAF1, 0xAF1}, {0xB70, 0xB70}, {0xBF3, 0xBFA}, +{0xC7F, 0xC7F}, {0xD4F, 0xD4F}, {0xD79, 0xD79}, {0xE3F, 0xE3F}, {0xF01, 0xF03}, {0xF13, 0xF13}, {0xF15, 0xF17}, {0xF1A, 0xF1F}, {0xF34, 0xF34}, {0xF36, 0xF36}, {0xF38, 0xF38}, {0xFBE, 0xFC5}, +{0xFC7, 0xFCC}, {0xFCE, 0xFCF}, {0xFD5, 0xFD8}, {0x109E, 0x109F}, {0x1390, 0x1399}, {0x166D, 0x166D}, {0x17DB, 0x17DB}, {0x1940, 0x1940}, {0x19DE, 0x19FF}, {0x1B61, 0x1B6A}, {0x1B74, 0x1B7C}, +{0x1FBD, 0x1FBD}, {0x1FBF, 0x1FC1}, {0x1FCD, 0x1FCF}, {0x1FDD, 0x1FDF}, {0x1FED, 0x1FEF}, {0x1FFD, 0x1FFE}, {0x2044, 0x2044}, {0x2052, 0x2052}, {0x207A, 0x207C}, {0x208A, 0x208C}, {0x20A0, 0x20BF}, +{0x2100, 0x2101}, {0x2103, 0x2106}, {0x2108, 0x2109}, {0x2114, 0x2114}, {0x2116, 0x2118}, {0x211E, 0x2123}, {0x2125, 0x2125}, {0x2127, 0x2127}, {0x2129, 0x2129}, {0x212E, 0x212E}, {0x213A, 0x213B}, +{0x2140, 0x2144}, {0x214A, 0x214D}, {0x214F, 0x214F}, {0x218A, 0x218B}, {0x2190, 0x2307}, {0x230C, 0x2328}, {0x232B, 0x2426}, {0x2440, 0x244A}, {0x249C, 0x24E9}, {0x2500, 0x2767}, {0x2794, 0x27C4}, +{0x27C7, 0x27E5}, {0x27F0, 0x2982}, {0x2999, 0x29D7}, {0x29DC, 0x29FB}, {0x29FE, 0x2B73}, {0x2B76, 0x2B95}, {0x2B97, 0x2BFF}, {0x2CE5, 0x2CEA}, {0x2E50, 0x2E51}, {0x2E80, 0x2E99}, {0x2E9B, 0x2EF3}, +{0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB}, {0x3004, 0x3004}, {0x3012, 0x3013}, {0x3020, 0x3020}, {0x3036, 0x3037}, {0x303E, 0x303F}, {0x309B, 0x309C}, {0x3190, 0x3191}, {0x3196, 0x319F}, {0x31C0, 0x31E3}, +{0x3200, 0x321E}, {0x322A, 0x3247}, {0x3250, 0x3250}, {0x3260, 0x327F}, {0x328A, 0x32B0}, {0x32C0, 0x33FF}, {0x4DC0, 0x4DFF}, {0xA490, 0xA4C6}, {0xA700, 0xA716}, {0xA720, 0xA721}, {0xA789, 0xA78A}, +{0xA828, 0xA82B}, {0xA836, 0xA839}, {0xAA77, 0xAA79}, {0xAB5B, 0xAB5B}, {0xAB6A, 0xAB6B}, {0xFB29, 0xFB29}, {0xFBB2, 0xFBC1}, {0xFDFC, 0xFDFD}, {0xFE62, 0xFE62}, {0xFE64, 0xFE66}, {0xFE69, 0xFE69}, +{0xFF04, 0xFF04}, {0xFF0B, 0xFF0B}, {0xFF1C, 0xFF1E}, {0xFF3E, 0xFF3E}, {0xFF40, 0xFF40}, {0xFF5C, 0xFF5C}, {0xFF5E, 0xFF5E}, {0xFFE0, 0xFFE6}, {0xFFE8, 0xFFEE}, {0xFFFC, 0xFFFD}, {0x10137, 0x1013F}, +{0x10179, 0x10189}, {0x1018C, 0x1018E}, {0x10190, 0x1019C}, {0x101A0, 0x101A0}, {0x101D0, 0x101FC}, {0x10877, 0x10878}, {0x10AC8, 0x10AC8}, {0x1173F, 0x1173F}, {0x11FD5, 0x11FF1}, {0x16B3C, 0x16B3F}, +{0x16B45, 0x16B45}, {0x1BC9C, 0x1BC9C}, {0x1D000, 0x1D0F5}, {0x1D100, 0x1D126}, {0x1D129, 0x1D164}, {0x1D16A, 0x1D16C}, {0x1D183, 0x1D184}, {0x1D18C, 0x1D1A9}, {0x1D1AE, 0x1D1E8}, {0x1D200, 0x1D241}, +{0x1D245, 0x1D245}, {0x1D300, 0x1D356}, {0x1D6C1, 0x1D6C1}, {0x1D6DB, 0x1D6DB}, {0x1D6FB, 0x1D6FB}, {0x1D715, 0x1D715}, {0x1D735, 0x1D735}, {0x1D74F, 0x1D74F}, {0x1D76F, 0x1D76F}, {0x1D789, 0x1D789}, +{0x1D7A9, 0x1D7A9}, {0x1D7C3, 0x1D7C3}, {0x1D800, 0x1D9FF}, {0x1DA37, 0x1DA3A}, {0x1DA6D, 0x1DA74}, {0x1DA76, 0x1DA83}, {0x1DA85, 0x1DA86}, {0x1E14F, 0x1E14F}, {0x1E2FF, 0x1E2FF}, {0x1ECAC, 0x1ECAC}, +{0x1ECB0, 0x1ECB0}, {0x1ED2E, 0x1ED2E}, {0x1EEF0, 0x1EEF1}, {0x1F000, 0x1F02B}, {0x1F030, 0x1F093}, {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CF}, {0x1F0D1, 0x1F0F5}, {0x1F10D, 0x1F1AD}, +{0x1F1E6, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F6D7}, {0x1F6E0, 0x1F6EC}, {0x1F6F0, 0x1F6FC}, {0x1F700, 0x1F773}, {0x1F780, 0x1F7D8}, +{0x1F7E0, 0x1F7EB}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, {0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, {0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F978}, {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1FA53}, +{0x1FA60, 0x1FA6D}, {0x1FA70, 0x1FA74}, {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8}, {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6}, {0x1FB00, 0x1FB92}, {0x1FB94, 0x1FBCA}, +}; + +static const std::vector> control_ranges = { +{0x0, 0x8}, {0xE, 0x1B}, {0x7F, 0x84}, {0x86, 0x9F}, {0xAD, 0xAD}, {0x378, 0x379}, {0x380, 0x383}, {0x38B, 0x38B}, {0x38D, 0x38D}, {0x3A2, 0x3A2}, {0x530, 0x530}, {0x557, 0x558}, {0x58B, 0x58C}, +{0x590, 0x590}, {0x5C8, 0x5CF}, {0x5EB, 0x5EE}, {0x5F5, 0x605}, {0x61C, 0x61D}, {0x6DD, 0x6DD}, {0x70E, 0x70F}, {0x74B, 0x74C}, {0x7B2, 0x7BF}, {0x7FB, 0x7FC}, {0x82E, 0x82F}, {0x83F, 0x83F}, +{0x85C, 0x85D}, {0x85F, 0x85F}, {0x86B, 0x89F}, {0x8B5, 0x8B5}, {0x8C8, 0x8D2}, {0x8E2, 0x8E2}, {0x984, 0x984}, {0x98D, 0x98E}, {0x991, 0x992}, {0x9A9, 0x9A9}, {0x9B1, 0x9B1}, {0x9B3, 0x9B5}, +{0x9BA, 0x9BB}, {0x9C5, 0x9C6}, {0x9C9, 0x9CA}, {0x9CF, 0x9D6}, {0x9D8, 0x9DB}, {0x9DE, 0x9DE}, {0x9E4, 0x9E5}, {0x9FF, 0xA00}, {0xA04, 0xA04}, {0xA0B, 0xA0E}, {0xA11, 0xA12}, {0xA29, 0xA29}, +{0xA31, 0xA31}, {0xA34, 0xA34}, {0xA37, 0xA37}, {0xA3A, 0xA3B}, {0xA3D, 0xA3D}, {0xA43, 0xA46}, {0xA49, 0xA4A}, {0xA4E, 0xA50}, {0xA52, 0xA58}, {0xA5D, 0xA5D}, {0xA5F, 0xA65}, {0xA77, 0xA80}, +{0xA84, 0xA84}, {0xA8E, 0xA8E}, {0xA92, 0xA92}, {0xAA9, 0xAA9}, {0xAB1, 0xAB1}, {0xAB4, 0xAB4}, {0xABA, 0xABB}, {0xAC6, 0xAC6}, {0xACA, 0xACA}, {0xACE, 0xACF}, {0xAD1, 0xADF}, {0xAE4, 0xAE5}, +{0xAF2, 0xAF8}, {0xB00, 0xB00}, {0xB04, 0xB04}, {0xB0D, 0xB0E}, {0xB11, 0xB12}, {0xB29, 0xB29}, {0xB31, 0xB31}, {0xB34, 0xB34}, {0xB3A, 0xB3B}, {0xB45, 0xB46}, {0xB49, 0xB4A}, {0xB4E, 0xB54}, +{0xB58, 0xB5B}, {0xB5E, 0xB5E}, {0xB64, 0xB65}, {0xB78, 0xB81}, {0xB84, 0xB84}, {0xB8B, 0xB8D}, {0xB91, 0xB91}, {0xB96, 0xB98}, {0xB9B, 0xB9B}, {0xB9D, 0xB9D}, {0xBA0, 0xBA2}, {0xBA5, 0xBA7}, +{0xBAB, 0xBAD}, {0xBBA, 0xBBD}, {0xBC3, 0xBC5}, {0xBC9, 0xBC9}, {0xBCE, 0xBCF}, {0xBD1, 0xBD6}, {0xBD8, 0xBE5}, {0xBFB, 0xBFF}, {0xC0D, 0xC0D}, {0xC11, 0xC11}, {0xC29, 0xC29}, {0xC3A, 0xC3C}, +{0xC45, 0xC45}, {0xC49, 0xC49}, {0xC4E, 0xC54}, {0xC57, 0xC57}, {0xC5B, 0xC5F}, {0xC64, 0xC65}, {0xC70, 0xC76}, {0xC8D, 0xC8D}, {0xC91, 0xC91}, {0xCA9, 0xCA9}, {0xCB4, 0xCB4}, {0xCBA, 0xCBB}, +{0xCC5, 0xCC5}, {0xCC9, 0xCC9}, {0xCCE, 0xCD4}, {0xCD7, 0xCDD}, {0xCDF, 0xCDF}, {0xCE4, 0xCE5}, {0xCF0, 0xCF0}, {0xCF3, 0xCFF}, {0xD0D, 0xD0D}, {0xD11, 0xD11}, {0xD45, 0xD45}, {0xD49, 0xD49}, +{0xD50, 0xD53}, {0xD64, 0xD65}, {0xD80, 0xD80}, {0xD84, 0xD84}, {0xD97, 0xD99}, {0xDB2, 0xDB2}, {0xDBC, 0xDBC}, {0xDBE, 0xDBF}, {0xDC7, 0xDC9}, {0xDCB, 0xDCE}, {0xDD5, 0xDD5}, {0xDD7, 0xDD7}, +{0xDE0, 0xDE5}, {0xDF0, 0xDF1}, {0xDF5, 0xE00}, {0xE3B, 0xE3E}, {0xE5C, 0xE80}, {0xE83, 0xE83}, {0xE85, 0xE85}, {0xE8B, 0xE8B}, {0xEA4, 0xEA4}, {0xEA6, 0xEA6}, {0xEBE, 0xEBF}, {0xEC5, 0xEC5}, +{0xEC7, 0xEC7}, {0xECE, 0xECF}, {0xEDA, 0xEDB}, {0xEE0, 0xEFF}, {0xF48, 0xF48}, {0xF6D, 0xF70}, {0xF98, 0xF98}, {0xFBD, 0xFBD}, {0xFCD, 0xFCD}, {0xFDB, 0xFFF}, {0x10C6, 0x10C6}, {0x10C8, 0x10CC}, +{0x10CE, 0x10CF}, {0x1249, 0x1249}, {0x124E, 0x124F}, {0x1257, 0x1257}, {0x1259, 0x1259}, {0x125E, 0x125F}, {0x1289, 0x1289}, {0x128E, 0x128F}, {0x12B1, 0x12B1}, {0x12B6, 0x12B7}, {0x12BF, 0x12BF}, +{0x12C1, 0x12C1}, {0x12C6, 0x12C7}, {0x12D7, 0x12D7}, {0x1311, 0x1311}, {0x1316, 0x1317}, {0x135B, 0x135C}, {0x137D, 0x137F}, {0x139A, 0x139F}, {0x13F6, 0x13F7}, {0x13FE, 0x13FF}, {0x169D, 0x169F}, +{0x16F9, 0x16FF}, {0x170D, 0x170D}, {0x1715, 0x171F}, {0x1737, 0x173F}, {0x1754, 0x175F}, {0x176D, 0x176D}, {0x1771, 0x1771}, {0x1774, 0x177F}, {0x17DE, 0x17DF}, {0x17EA, 0x17EF}, {0x17FA, 0x17FF}, +{0x180E, 0x180F}, {0x181A, 0x181F}, {0x1879, 0x187F}, {0x18AB, 0x18AF}, {0x18F6, 0x18FF}, {0x191F, 0x191F}, {0x192C, 0x192F}, {0x193C, 0x193F}, {0x1941, 0x1943}, {0x196E, 0x196F}, {0x1975, 0x197F}, +{0x19AC, 0x19AF}, {0x19CA, 0x19CF}, {0x19DB, 0x19DD}, {0x1A1C, 0x1A1D}, {0x1A5F, 0x1A5F}, {0x1A7D, 0x1A7E}, {0x1A8A, 0x1A8F}, {0x1A9A, 0x1A9F}, {0x1AAE, 0x1AAF}, {0x1AC1, 0x1AFF}, {0x1B4C, 0x1B4F}, +{0x1B7D, 0x1B7F}, {0x1BF4, 0x1BFB}, {0x1C38, 0x1C3A}, {0x1C4A, 0x1C4C}, {0x1C89, 0x1C8F}, {0x1CBB, 0x1CBC}, {0x1CC8, 0x1CCF}, {0x1CFB, 0x1CFF}, {0x1DFA, 0x1DFA}, {0x1F16, 0x1F17}, {0x1F1E, 0x1F1F}, +{0x1F46, 0x1F47}, {0x1F4E, 0x1F4F}, {0x1F58, 0x1F58}, {0x1F5A, 0x1F5A}, {0x1F5C, 0x1F5C}, {0x1F5E, 0x1F5E}, {0x1F7E, 0x1F7F}, {0x1FB5, 0x1FB5}, {0x1FC5, 0x1FC5}, {0x1FD4, 0x1FD5}, {0x1FDC, 0x1FDC}, +{0x1FF0, 0x1FF1}, {0x1FF5, 0x1FF5}, {0x1FFF, 0x1FFF}, {0x200B, 0x200F}, {0x202A, 0x202E}, {0x2060, 0x206F}, {0x2072, 0x2073}, {0x208F, 0x208F}, {0x209D, 0x209F}, {0x20C0, 0x20CF}, {0x20F1, 0x20FF}, +{0x218C, 0x218F}, {0x2427, 0x243F}, {0x244B, 0x245F}, {0x2B74, 0x2B75}, {0x2B96, 0x2B96}, {0x2C2F, 0x2C2F}, {0x2C5F, 0x2C5F}, {0x2CF4, 0x2CF8}, {0x2D26, 0x2D26}, {0x2D28, 0x2D2C}, {0x2D2E, 0x2D2F}, +{0x2D68, 0x2D6E}, {0x2D71, 0x2D7E}, {0x2D97, 0x2D9F}, {0x2DA7, 0x2DA7}, {0x2DAF, 0x2DAF}, {0x2DB7, 0x2DB7}, {0x2DBF, 0x2DBF}, {0x2DC7, 0x2DC7}, {0x2DCF, 0x2DCF}, {0x2DD7, 0x2DD7}, {0x2DDF, 0x2DDF}, +{0x2E53, 0x2E7F}, {0x2E9A, 0x2E9A}, {0x2EF4, 0x2EFF}, {0x2FD6, 0x2FEF}, {0x2FFC, 0x2FFF}, {0x3040, 0x3040}, {0x3097, 0x3098}, {0x3100, 0x3104}, {0x3130, 0x3130}, {0x318F, 0x318F}, {0x31E4, 0x31EF}, +{0x321F, 0x321F}, {0x9FFD, 0x9FFF}, {0xA48D, 0xA48F}, {0xA4C7, 0xA4CF}, {0xA62C, 0xA63F}, {0xA6F8, 0xA6FF}, {0xA7C0, 0xA7C1}, {0xA7CB, 0xA7F4}, {0xA82D, 0xA82F}, {0xA83A, 0xA83F}, {0xA878, 0xA87F}, +{0xA8C6, 0xA8CD}, {0xA8DA, 0xA8DF}, {0xA954, 0xA95E}, {0xA97D, 0xA97F}, {0xA9CE, 0xA9CE}, {0xA9DA, 0xA9DD}, {0xA9FF, 0xA9FF}, {0xAA37, 0xAA3F}, {0xAA4E, 0xAA4F}, {0xAA5A, 0xAA5B}, {0xAAC3, 0xAADA}, +{0xAAF7, 0xAB00}, {0xAB07, 0xAB08}, {0xAB0F, 0xAB10}, {0xAB17, 0xAB1F}, {0xAB27, 0xAB27}, {0xAB2F, 0xAB2F}, {0xAB6C, 0xAB6F}, {0xABEE, 0xABEF}, {0xABFA, 0xABFF}, {0xD7A4, 0xD7AF}, {0xD7C7, 0xD7CA}, +{0xD7FC, 0xF8FF}, {0xFA6E, 0xFA6F}, {0xFADA, 0xFAFF}, {0xFB07, 0xFB12}, {0xFB18, 0xFB1C}, {0xFB37, 0xFB37}, {0xFB3D, 0xFB3D}, {0xFB3F, 0xFB3F}, {0xFB42, 0xFB42}, {0xFB45, 0xFB45}, {0xFBC2, 0xFBD2}, +{0xFD40, 0xFD4F}, {0xFD90, 0xFD91}, {0xFDC8, 0xFDEF}, {0xFDFE, 0xFDFF}, {0xFE1A, 0xFE1F}, {0xFE53, 0xFE53}, {0xFE67, 0xFE67}, {0xFE6C, 0xFE6F}, {0xFE75, 0xFE75}, {0xFEFD, 0xFF00}, {0xFFBF, 0xFFC1}, +{0xFFC8, 0xFFC9}, {0xFFD0, 0xFFD1}, {0xFFD8, 0xFFD9}, {0xFFDD, 0xFFDF}, {0xFFE7, 0xFFE7}, {0xFFEF, 0xFFFB}, {0xFFFE, 0xFFFF}, {0x1000C, 0x1000C}, {0x10027, 0x10027}, {0x1003B, 0x1003B}, +{0x1003E, 0x1003E}, {0x1004E, 0x1004F}, {0x1005E, 0x1007F}, {0x100FB, 0x100FF}, {0x10103, 0x10106}, {0x10134, 0x10136}, {0x1018F, 0x1018F}, {0x1019D, 0x1019F}, {0x101A1, 0x101CF}, {0x101FE, 0x1027F}, +{0x1029D, 0x1029F}, {0x102D1, 0x102DF}, {0x102FC, 0x102FF}, {0x10324, 0x1032C}, {0x1034B, 0x1034F}, {0x1037B, 0x1037F}, {0x1039E, 0x1039E}, {0x103C4, 0x103C7}, {0x103D6, 0x103FF}, {0x1049E, 0x1049F}, +{0x104AA, 0x104AF}, {0x104D4, 0x104D7}, {0x104FC, 0x104FF}, {0x10528, 0x1052F}, {0x10564, 0x1056E}, {0x10570, 0x105FF}, {0x10737, 0x1073F}, {0x10756, 0x1075F}, {0x10768, 0x107FF}, {0x10806, 0x10807}, +{0x10809, 0x10809}, {0x10836, 0x10836}, {0x10839, 0x1083B}, {0x1083D, 0x1083E}, {0x10856, 0x10856}, {0x1089F, 0x108A6}, {0x108B0, 0x108DF}, {0x108F3, 0x108F3}, {0x108F6, 0x108FA}, {0x1091C, 0x1091E}, +{0x1093A, 0x1093E}, {0x10940, 0x1097F}, {0x109B8, 0x109BB}, {0x109D0, 0x109D1}, {0x10A04, 0x10A04}, {0x10A07, 0x10A0B}, {0x10A14, 0x10A14}, {0x10A18, 0x10A18}, {0x10A36, 0x10A37}, {0x10A3B, 0x10A3E}, +{0x10A49, 0x10A4F}, {0x10A59, 0x10A5F}, {0x10AA0, 0x10ABF}, {0x10AE7, 0x10AEA}, {0x10AF7, 0x10AFF}, {0x10B36, 0x10B38}, {0x10B56, 0x10B57}, {0x10B73, 0x10B77}, {0x10B92, 0x10B98}, {0x10B9D, 0x10BA8}, +{0x10BB0, 0x10BFF}, {0x10C49, 0x10C7F}, {0x10CB3, 0x10CBF}, {0x10CF3, 0x10CF9}, {0x10D28, 0x10D2F}, {0x10D3A, 0x10E5F}, {0x10E7F, 0x10E7F}, {0x10EAA, 0x10EAA}, {0x10EAE, 0x10EAF}, {0x10EB2, 0x10EFF}, +{0x10F28, 0x10F2F}, {0x10F5A, 0x10FAF}, {0x10FCC, 0x10FDF}, {0x10FF7, 0x10FFF}, {0x1104E, 0x11051}, {0x11070, 0x1107E}, {0x110BD, 0x110BD}, {0x110C2, 0x110CF}, {0x110E9, 0x110EF}, {0x110FA, 0x110FF}, +{0x11135, 0x11135}, {0x11148, 0x1114F}, {0x11177, 0x1117F}, {0x111E0, 0x111E0}, {0x111F5, 0x111FF}, {0x11212, 0x11212}, {0x1123F, 0x1127F}, {0x11287, 0x11287}, {0x11289, 0x11289}, {0x1128E, 0x1128E}, +{0x1129E, 0x1129E}, {0x112AA, 0x112AF}, {0x112EB, 0x112EF}, {0x112FA, 0x112FF}, {0x11304, 0x11304}, {0x1130D, 0x1130E}, {0x11311, 0x11312}, {0x11329, 0x11329}, {0x11331, 0x11331}, {0x11334, 0x11334}, +{0x1133A, 0x1133A}, {0x11345, 0x11346}, {0x11349, 0x1134A}, {0x1134E, 0x1134F}, {0x11351, 0x11356}, {0x11358, 0x1135C}, {0x11364, 0x11365}, {0x1136D, 0x1136F}, {0x11375, 0x113FF}, {0x1145C, 0x1145C}, +{0x11462, 0x1147F}, {0x114C8, 0x114CF}, {0x114DA, 0x1157F}, {0x115B6, 0x115B7}, {0x115DE, 0x115FF}, {0x11645, 0x1164F}, {0x1165A, 0x1165F}, {0x1166D, 0x1167F}, {0x116B9, 0x116BF}, {0x116CA, 0x116FF}, +{0x1171B, 0x1171C}, {0x1172C, 0x1172F}, {0x11740, 0x117FF}, {0x1183C, 0x1189F}, {0x118F3, 0x118FE}, {0x11907, 0x11908}, {0x1190A, 0x1190B}, {0x11914, 0x11914}, {0x11917, 0x11917}, {0x11936, 0x11936}, +{0x11939, 0x1193A}, {0x11947, 0x1194F}, {0x1195A, 0x1199F}, {0x119A8, 0x119A9}, {0x119D8, 0x119D9}, {0x119E5, 0x119FF}, {0x11A48, 0x11A4F}, {0x11AA3, 0x11ABF}, {0x11AF9, 0x11BFF}, {0x11C09, 0x11C09}, +{0x11C37, 0x11C37}, {0x11C46, 0x11C4F}, {0x11C6D, 0x11C6F}, {0x11C90, 0x11C91}, {0x11CA8, 0x11CA8}, {0x11CB7, 0x11CFF}, {0x11D07, 0x11D07}, {0x11D0A, 0x11D0A}, {0x11D37, 0x11D39}, {0x11D3B, 0x11D3B}, +{0x11D3E, 0x11D3E}, {0x11D48, 0x11D4F}, {0x11D5A, 0x11D5F}, {0x11D66, 0x11D66}, {0x11D69, 0x11D69}, {0x11D8F, 0x11D8F}, {0x11D92, 0x11D92}, {0x11D99, 0x11D9F}, {0x11DAA, 0x11EDF}, {0x11EF9, 0x11FAF}, +{0x11FB1, 0x11FBF}, {0x11FF2, 0x11FFE}, {0x1239A, 0x123FF}, {0x1246F, 0x1246F}, {0x12475, 0x1247F}, {0x12544, 0x12FFF}, {0x1342F, 0x143FF}, {0x14647, 0x167FF}, {0x16A39, 0x16A3F}, {0x16A5F, 0x16A5F}, +{0x16A6A, 0x16A6D}, {0x16A70, 0x16ACF}, {0x16AEE, 0x16AEF}, {0x16AF6, 0x16AFF}, {0x16B46, 0x16B4F}, {0x16B5A, 0x16B5A}, {0x16B62, 0x16B62}, {0x16B78, 0x16B7C}, {0x16B90, 0x16E3F}, {0x16E9B, 0x16EFF}, +{0x16F4B, 0x16F4E}, {0x16F88, 0x16F8E}, {0x16FA0, 0x16FDF}, {0x16FE5, 0x16FEF}, {0x16FF2, 0x16FFF}, {0x187F8, 0x187FF}, {0x18CD6, 0x18CFF}, {0x18D09, 0x1AFFF}, {0x1B11F, 0x1B14F}, {0x1B153, 0x1B163}, +{0x1B168, 0x1B16F}, {0x1B2FC, 0x1BBFF}, {0x1BC6B, 0x1BC6F}, {0x1BC7D, 0x1BC7F}, {0x1BC89, 0x1BC8F}, {0x1BC9A, 0x1BC9B}, {0x1BCA0, 0x1CFFF}, {0x1D0F6, 0x1D0FF}, {0x1D127, 0x1D128}, {0x1D173, 0x1D17A}, +{0x1D1E9, 0x1D1FF}, {0x1D246, 0x1D2DF}, {0x1D2F4, 0x1D2FF}, {0x1D357, 0x1D35F}, {0x1D379, 0x1D3FF}, {0x1D455, 0x1D455}, {0x1D49D, 0x1D49D}, {0x1D4A0, 0x1D4A1}, {0x1D4A3, 0x1D4A4}, {0x1D4A7, 0x1D4A8}, +{0x1D4AD, 0x1D4AD}, {0x1D4BA, 0x1D4BA}, {0x1D4BC, 0x1D4BC}, {0x1D4C4, 0x1D4C4}, {0x1D506, 0x1D506}, {0x1D50B, 0x1D50C}, {0x1D515, 0x1D515}, {0x1D51D, 0x1D51D}, {0x1D53A, 0x1D53A}, {0x1D53F, 0x1D53F}, +{0x1D545, 0x1D545}, {0x1D547, 0x1D549}, {0x1D551, 0x1D551}, {0x1D6A6, 0x1D6A7}, {0x1D7CC, 0x1D7CD}, {0x1DA8C, 0x1DA9A}, {0x1DAA0, 0x1DAA0}, {0x1DAB0, 0x1DFFF}, {0x1E007, 0x1E007}, {0x1E019, 0x1E01A}, +{0x1E022, 0x1E022}, {0x1E025, 0x1E025}, {0x1E02B, 0x1E0FF}, {0x1E12D, 0x1E12F}, {0x1E13E, 0x1E13F}, {0x1E14A, 0x1E14D}, {0x1E150, 0x1E2BF}, {0x1E2FA, 0x1E2FE}, {0x1E300, 0x1E7FF}, {0x1E8C5, 0x1E8C6}, +{0x1E8D7, 0x1E8FF}, {0x1E94C, 0x1E94F}, {0x1E95A, 0x1E95D}, {0x1E960, 0x1EC70}, {0x1ECB5, 0x1ED00}, {0x1ED3E, 0x1EDFF}, {0x1EE04, 0x1EE04}, {0x1EE20, 0x1EE20}, {0x1EE23, 0x1EE23}, {0x1EE25, 0x1EE26}, +{0x1EE28, 0x1EE28}, {0x1EE33, 0x1EE33}, {0x1EE38, 0x1EE38}, {0x1EE3A, 0x1EE3A}, {0x1EE3C, 0x1EE41}, {0x1EE43, 0x1EE46}, {0x1EE48, 0x1EE48}, {0x1EE4A, 0x1EE4A}, {0x1EE4C, 0x1EE4C}, {0x1EE50, 0x1EE50}, +{0x1EE53, 0x1EE53}, {0x1EE55, 0x1EE56}, {0x1EE58, 0x1EE58}, {0x1EE5A, 0x1EE5A}, {0x1EE5C, 0x1EE5C}, {0x1EE5E, 0x1EE5E}, {0x1EE60, 0x1EE60}, {0x1EE63, 0x1EE63}, {0x1EE65, 0x1EE66}, {0x1EE6B, 0x1EE6B}, +{0x1EE73, 0x1EE73}, {0x1EE78, 0x1EE78}, {0x1EE7D, 0x1EE7D}, {0x1EE7F, 0x1EE7F}, {0x1EE8A, 0x1EE8A}, {0x1EE9C, 0x1EEA0}, {0x1EEA4, 0x1EEA4}, {0x1EEAA, 0x1EEAA}, {0x1EEBC, 0x1EEEF}, {0x1EEF2, 0x1EFFF}, +{0x1F02C, 0x1F02F}, {0x1F094, 0x1F09F}, {0x1F0AF, 0x1F0B0}, {0x1F0C0, 0x1F0C0}, {0x1F0D0, 0x1F0D0}, {0x1F0F6, 0x1F0FF}, {0x1F1AE, 0x1F1E5}, {0x1F203, 0x1F20F}, {0x1F23C, 0x1F23F}, {0x1F249, 0x1F24F}, +{0x1F252, 0x1F25F}, {0x1F266, 0x1F2FF}, {0x1F6D8, 0x1F6DF}, {0x1F6ED, 0x1F6EF}, {0x1F6FD, 0x1F6FF}, {0x1F774, 0x1F77F}, {0x1F7D9, 0x1F7DF}, {0x1F7EC, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, +{0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8AF}, {0x1F8B2, 0x1F8FF}, {0x1F979, 0x1F979}, {0x1F9CC, 0x1F9CC}, {0x1FA54, 0x1FA5F}, {0x1FA6E, 0x1FA6F}, {0x1FA75, 0x1FA77}, {0x1FA7B, 0x1FA7F}, +{0x1FA87, 0x1FA8F}, {0x1FAA9, 0x1FAAF}, {0x1FAB7, 0x1FABF}, {0x1FAC3, 0x1FACF}, {0x1FAD7, 0x1FAFF}, {0x1FB93, 0x1FB93}, {0x1FBCB, 0x1FBEF}, {0x1FBFA, 0x1FFFF}, {0x2A6DE, 0x2A6FF}, {0x2B735, 0x2B73F}, +{0x2B81E, 0x2B81F}, {0x2CEA2, 0x2CEAF}, {0x2EBE1, 0x2F7FF}, {0x2FA1E, 0x2FFFF}, {0x3134B, 0xE00FF}, {0xE01F0, 0x10FFFF}, +}; + +//String +bool CNCTString::operator==(const std::string& other) const { + return str.compare(other) == 0; +} +bool CNCTString::operator==(const char other) const { + return str.compare(std::string(1, other)) == 0; +} +bool CNCTString::operator==(const CNCTString& other) const { + return str.compare(other.str) == 0; +} +// + operators +CNCTString& CNCTString::operator+=(const std::string& other) { + str += other; + int new_len = CNCTUnicode::strlen_utf8(other); + utf8_chars += new_len; + char_type = CNCTUnicode::string_identify(str); + seq_offset_bytes += other.size(); + seq_offset_utf8_chars += new_len; + return *this; +} + +CNCTString& CNCTString::operator+=(const char other) { + std::string str = std::string(1, other); + *this += str; + return *this; +} + +CNCTString& CNCTString::operator+=(const CNCTString& other) { + str += other.str; + utf8_chars += other.utf8_chars; + char_type = CNCTUnicode::string_identify(str); + seq_offset_bytes += other.str.size(); + seq_offset_utf8_chars += other.utf8_chars; + return *this; +} + +struct CRCompare { + bool operator()(const std::pair& p, int i) { + return p.second < i; + } + bool operator()(int i, const std::pair& p) { + return i < p.first; + } +}; + +// binary search for code range +bool CNCTUnicode::check_code_range(int c, const std::vector> &ranges) { + auto it = std::upper_bound(ranges.begin(), ranges.end(), c, CRCompare()); + if (it != ranges.begin()) { + --it; + } + return c >= it->first && c <= it->second; +} + +// these are binary searches, it takes only a few operations +CNCTCharType CNCTUnicode::get_code_type(int c) { + if (check_code_range(c, letter_ranges)) { + return LETTER; + } + if (check_code_range(c, digit_ranges)) { + return DIGIT; + } + if (check_code_range(c, whitespace_ranges)) { + return WHITESPACE; + } + if (check_code_range(c, punctuation_ranges)) { + return PUNCTUATION; + } + if (check_code_range(c, symbol_ranges)) { + return SYMBOL; + } + if (check_code_range(c, accent_mark_ranges)) { + return ACCENT_MARK; + } + if (check_code_range(c, control_ranges)) { + return CONTROL; + } + return UNIDENTIFIED; +} + +static int utf8_to_unicode(const std::string& utf8_char) { + int c = 0; + int len = (int)utf8_char.size(); + if (len == 1) { + c = utf8_char[0]; + } else if (len == 2) { + c = ((utf8_char[0] & 0x1F) << 6) | (utf8_char[1] & 0x3F); + } else if (len == 3) { + c = ((utf8_char[0] & 0x0F) << 12) | ((utf8_char[1] & 0x3F) << 6) | (utf8_char[2] & 0x3F); + } else if (len == 4) { + c = ((utf8_char[0] & 0x07) << 18) | ((utf8_char[1] & 0x3F) << 12) | ((utf8_char[2] & 0x3F) << 6) | (utf8_char[3] & 0x3F); + } + return c; +} + +CNCTCharType CNCTUnicode::get_code_type(const std::string &utf8_char) { + return get_code_type(utf8_to_unicode(utf8_char)); +} + +int CNCTUnicode::utf8_len(const char c) +{ + if ((c & 0x80) == 0) { + return 1; // ASCII character + } + if ((c & 0xE0) == 0xC0) { + return 2; // 2-byte character + } + if ((c & 0xF0) == 0xE0) { + return 3; // 3-byte character + } + if ((c & 0xF0) == 0xF0) { + return 4; // 4-byte character + } + return 1; // not valid utf8 + // static const uint8_t lookup[] = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4 }; + // return lookup[static_cast(c) >> 4]; +} + +int CNCTUnicode::strlen_utf8(const std::string src) { + int len = 0; + for (std::string::const_iterator it = src.begin(); it != src.end(); ++it) { + int char_len = utf8_len(*it); + if (char_len > 1) { + it += char_len - 1; + } + len += 1; + } + return len; +} + +// split a string into unicode strings +std::vector CNCTUnicode::split_utf8(const std::string &src) { + std::vector result; + for (std::string::const_iterator it = src.begin(); it != src.end(); ++it) { + int char_len = utf8_len(*it); + std::string str(it, it + char_len); + result.push_back(str); + if (char_len > 1) { + it += char_len - 1; + } + } + return result; +} + +// split a string into unicode strings (CNCTString) with sequence information +std::vector CNCTUnicode::split_utf8_enhanced(const std::string &src) { + std::vector result; + int seq_offset_bytes=0; + int seq_offset_utf8_chars=0; + for (std::string::const_iterator it = src.begin(); it != src.end(); ++it) { + int char_len = utf8_len(*it); + std::string str(it, it + char_len); + CNCTString cnct_str; + cnct_str.seq_offset_bytes = seq_offset_bytes; + cnct_str.seq_offset_utf8_chars = seq_offset_utf8_chars; + cnct_str.str = str; + cnct_str.utf8_chars = 1; + cnct_str.char_type = get_code_type(str); + #if 0 + switch (cnct_str.char_type) + { + case DIGIT: + printf("%s = DIGIT\n", str.c_str()); + break; + case LETTER: + printf("%s = LETTER\n", str.c_str()); + break; + case WHITESPACE: + printf("%s = WHITESPACE\n", str.c_str()); + break; + case PUNCTUATION: + printf("%s = PUNCTUATION\n", str.c_str()); + break; + case UNIDENTIFIED: + printf("%s = UNIDENTIFIED\n", str.c_str()); + break; + case SYMBOL: + printf("%s = SYMBOL\n", str.c_str()); + break; + case CONTROL: + printf("%s = CONTROL\n", str.c_str()); + break; + } + #endif + + result.push_back(cnct_str); + seq_offset_bytes += char_len; + seq_offset_utf8_chars += 1; + if (char_len > 1) { + it += char_len - 1; + } + + } + return result; +} + +// return the type of the string +CNCTCharType CNCTUnicode::string_identify(const std::string &str) { + CNCTCharType result = UNIDENTIFIED; + std::string::const_iterator it = str.begin(); + while (it != str.end()) { + int len = utf8_len(*it); + int c = 0; + for (int i = 0; i < len && it != str.end(); ++i, ++it) { + c = (c << 8) | static_cast(*it); + } + switch (get_code_type(c)) { + case DIGIT: + if (result == UNIDENTIFIED) { + result = DIGIT; + } else if (result != DIGIT) { + return MIXED; + } + break; + case LETTER: + if (result == UNIDENTIFIED) { + result = LETTER; + } else if (result != LETTER) { + return MIXED; + } + break; + case WHITESPACE: + if (result == UNIDENTIFIED) { + result = WHITESPACE; + } else if (result != WHITESPACE) { + return MIXED; + } + break; + case PUNCTUATION: + if (result == UNIDENTIFIED) { + result = PUNCTUATION; + } else if (result != PUNCTUATION) { + return MIXED; + } + break; + default: + return MIXED; + break; + } + } + return result; +} + +// verify the content of a string +bool CNCTUnicode::string_test(const std::string &str, CNCTCharType chartype) +{ + std::string::const_iterator it = str.begin(); + while (it != str.end()) { + int len = utf8_len(*it); + int c = 0; + for (int i = 0; i < len && it != str.end(); ++i, ++it) { + c = (c << 8) | static_cast(*it); + } + if (get_code_type(c) != chartype) { + return false; + } + } + return true; +} + +//----------------- +// llama.cpp GPT2 vocab (from libfalcon.cpp) +//----------------- + +std::string replaceAll(std::string str, const std::string& from, const std::string& to) { + size_t start_pos = 0; + while((start_pos = str.find(from, start_pos)) != std::string::npos) { + str.replace(start_pos, from.length(), to); + start_pos += to.length(); // Handles case where 'to' is a substring of 'from' + } + return str; +} + +struct TrieNode { + std::map map; + int32_t Id = -1; +}; + +struct Trie { + TrieNode *root; + + Trie() : root(new TrieNode()) {} + + ~Trie() { + if(root) + deleteTrie(root); + } + + // Move constructor + Trie(Trie&& other) noexcept : root(other.root) { + other.root = nullptr; + } + + // Move assignment operator + Trie& operator=(Trie&& other) noexcept { + if (this != &other) { + if(root) + deleteTrie(root); + root = other.root; + other.root = nullptr; + } + return *this; + } + + void insert(const std::string &token, int32_t Id) { + TrieNode* current = root; + for(auto ch : token) { + if(current->map.find(ch) == current->map.end()) { + current->map[ch] = new TrieNode(); + } + current = current->map[ch]; + } + current->Id = Id; + } + + void reset() { + deleteTrie(root); + root = new TrieNode(); + } + +private: + void deleteTrie(TrieNode* node) { + for(auto &it: node->map) { + deleteTrie(it.second); + } + delete node; + } + +}; + +struct gpt2bpe_vocab { + using id = int32_t; + using token = std::string; + + std::map max_token_length; // max length, for each 2byte prefix + std::map, int> bpe_ranks; + std::vector> bpe_merges; + + id special_bos_id = -1; + id special_eos_id = -1; + id special_unk_id = -1; + id special_sep_id = -1; + id special_pad_id = -1; + + id linefeed_id = -1; + + std::unordered_map token_to_id; + std::unordered_map id_to_token; + + Trie trie; // highspeed access to tokens by prefix tree + + // populate trie from map + void populate_trie_from_map() { + trie.reset(); + for (const auto& pair : token_to_id) { + trie.insert(pair.first, pair.second); + if (pair.first.size() >= 2) { + std::string prefix = pair.first.substr(0, 2); + max_token_length[prefix] = std::max(max_token_length[prefix], (uint32_t)pair.first.size()); + } + } + } + // populate token ranks map + int populate_bpe_ranks(std::vector> bpe_merges_) { + for (int i = 0; i < (int)bpe_merges_.size(); i++) { + bpe_ranks.emplace(bpe_merges_[i], i); + } + bpe_merges = bpe_merges_; + return bpe_merges_.size(); + } + + // Trim whitespace characters from the beginning and end of the string + void trim(std::string& str) { + // Remove whitespace characters from the beginning of the string + str.erase(str.begin(), std::find_if(str.begin(), str.end(), [](int ch) { + return !std::isspace(ch); + })); + + // Remove whitespace characters from the end of the string + str.erase(std::find_if(str.rbegin(), str.rend(), [](int ch) { + return !std::isspace(ch); + }).base(), str.end()); + } + + // get max token length available for a prefix of 2 bytes (string at least 2 bytes long) + int get_max_token_length(const std::string& string) const { + if (string.size() < 2) { + return -1; + } + std::string prefix = string.substr(0, 2); + if (max_token_length.find(prefix) == max_token_length.end()) { + return 0; + } + return max_token_length.at(prefix); + } + + // function to find if two tokens match in bpe_rank, return rank or -1 + int find_bpe_rank(const std::string& token1, const std::string& token2) const { + std::string left_token = token1; + std::string right_token = token2; + left_token = replaceAll(left_token, " ", "Ġ"); + left_token = replaceAll(left_token, "\n", "Ċ"); + right_token = replaceAll(right_token, " ", "Ġ"); + right_token = replaceAll(right_token, "\n", "Ċ"); + + auto it = bpe_ranks.find(std::make_pair(left_token, right_token)); + if (it == bpe_ranks.end()) { + return -1; + } + return it->second; + } + + std::pair find_longest_match(const std::string& snippet) const { + TrieNode* current = trie.root; + gpt2bpe_vocab::id last_matched_id = -1; + std::string last_matched_token = ""; + std::string current_token = ""; + for (auto ch : snippet) { + if (current->map.find(ch) == current->map.end()) { + break; + } + current = current->map[ch]; + current_token += ch; + if (current->Id != -1) { + last_matched_id = current->Id; + last_matched_token = current_token; + } + } + return {last_matched_id, last_matched_token}; + } + +}; + + +// +// tokenizer - bpe type, gpt2 tokenization compatible +// + +struct ggllm_bpe_symbol { + using index = int; + index prev; + index next; + const char * text; + size_t n; +}; + +static_assert(std::is_trivially_copyable::value, "ggllm_bpe_symbol is not trivially copyable"); + +struct ggllm_bpe_bigram { + struct comparator { + bool operator()(ggllm_bpe_bigram & l, ggllm_bpe_bigram & r) { + return l.rank > r.rank || (l.rank == r.rank && l.left > r.left); + } + }; + + using queue_storage = std::vector; + using queue = std::priority_queue; + ggllm_bpe_symbol::index left; + ggllm_bpe_symbol::index right; + std::string text; + int rank; + size_t size; +}; + +struct gpt2bpe_tokenizer { + gpt2bpe_tokenizer(const gpt2bpe_vocab & vocab, bool g2ws_): vocab_(vocab) { flag_g2ws = g2ws_; } + + void tokenize(const std::string & text, std::vector & output) { + int final_prev_index = -1; + // auto start = ggml_time_us(); + auto word_collection = bpe_gpt2_preprocess(text); + // auto end = ggml_time_us(); + // fprintf(stderr, "%s: preprocessing took %0.3f ms\n", __func__, (end - start) / 1000.0); + + symbols_final.clear(); + + for (auto & word : word_collection) { + work_queue_ = ggllm_bpe_bigram::queue(); + symbols_.clear(); + + int index = 0; + size_t offset = 0; + + while (offset < word.size()) { + ggllm_bpe_symbol sym; + size_t char_len = std::min(word.size() - offset, (size_t) CNCTUnicode::utf8_len(word[offset])); + sym.text = word.c_str() + offset; + sym.n = 1; + sym.n = char_len; + offset += sym.n; + sym.prev = index - 1; + sym.next = offset == word.size() ? -1 : index + 1; + index++; + symbols_.emplace_back(sym); + } + for (size_t i = 1; i < symbols_.size(); ++i) { + add_new_bigram(i - 1, i); + } + + // build token(s) + while (!work_queue_.empty()) { + auto bigram = work_queue_.top(); + work_queue_.pop(); + + auto & left_symbol = symbols_[bigram.left]; + auto & right_symbol = symbols_[bigram.right]; + + if (left_symbol.n == 0 || right_symbol.n == 0) { + continue; + } + std::string left_token = std::string(left_symbol.text, left_symbol.n); + std::string right_token = std::string(right_symbol.text, right_symbol.n); + if (left_token + right_token != bigram.text) { + continue; // Skip this bigram if it's outdated + } + + // merge the right sym into the left one + left_symbol.n += right_symbol.n; + right_symbol.n = 0; + + // remove the right sym from the chain + left_symbol.next = right_symbol.next; + if (right_symbol.next >= 0) { + symbols_[right_symbol.next].prev = bigram.left; + } + + add_new_bigram(left_symbol.prev, bigram.left); // left side of current symbol + add_new_bigram(bigram.left, left_symbol.next); // right side of current symbol + } + + // add the fnished tokens to the final list keeping correct order for next and prev + for (auto & sym : symbols_) { + if (sym.n > 0) { + sym.prev = final_prev_index; + sym.next = -1; + if (final_prev_index != -1) { + symbols_final[final_prev_index].next = symbols_final.size(); + } + symbols_final.emplace_back(sym); + final_prev_index = symbols_final.size() - 1; + } + } + } + + symbols_ = symbols_final; + if (symbols_.size()) + for (int i = 0; i != -1; i = symbols_[i].next) { + auto & symbol = symbols_[i]; + if (symbol.n == 0) { + continue; + } + std::string str = std::string(symbol.text, symbol.n); + std::string str_decoded = decode_token(str); + auto token = vocab_.token_to_id.find(str_decoded); + + if (token == vocab_.token_to_id.end()) { + for (auto j = str_decoded.begin(); j != str_decoded.end(); ++j) { + std::string byte_str(1, *j); + auto token_multibyte = vocab_.token_to_id.find(byte_str); + if (token_multibyte == vocab_.token_to_id.end()) { + fprintf(stderr,"ERROR: byte not found in vocab: '%s'\n", byte_str.c_str()); + } + output.push_back((*token_multibyte).second); + } + } else { + output.push_back((*token).second); + } + } + } + +private: + void add_new_bigram(int left, int right) { + if (left == -1 || right == -1) return; + + std::string left_token = std::string(symbols_[left].text, symbols_[left].n); + std::string right_token = std::string(symbols_[right].text, symbols_[right].n); + + int rank_found = -1; + rank_found = vocab_.find_bpe_rank(left_token, right_token); + + if (rank_found < 0) { + return; + } + + ggllm_bpe_bigram bigram; + bigram.left = left; + bigram.right = right; + bigram.rank = rank_found; + bigram.size = left_token.size() + right_token.size(); + bigram.text = left_token + right_token; + work_queue_.push(bigram); + } + + std::unordered_map bytes_to_unicode() { + static std::unordered_map hex_map = { + { 0x21, "\x21" }, { 0x22, "\x22" }, { 0x23, "\x23" }, { 0x24, "\x24" }, { 0x25, "\x25" }, { 0x26, "\x26" }, { 0x27, "\x27" }, { 0x28, "\x28" }, { 0x29, "\x29" }, { 0x2A, "\x2A" }, + { 0x2B, "\x2B" }, { 0x2C, "\x2C" }, { 0x2D, "\x2D" }, { 0x2E, "\x2E" }, { 0x2F, "\x2F" }, { 0x30, "\x30" }, { 0x31, "\x31" }, { 0x32, "\x32" }, { 0x33, "\x33" }, { 0x34, "\x34" }, + { 0x35, "\x35" }, { 0x36, "\x36" }, { 0x37, "\x37" }, { 0x38, "\x38" }, { 0x39, "\x39" }, { 0x3A, "\x3A" }, { 0x3B, "\x3B" }, { 0x3C, "\x3C" }, { 0x3D, "\x3D" }, { 0x3E, "\x3E" }, + { 0x3F, "\x3F" }, { 0x40, "\x40" }, { 0x41, "\x41" }, { 0x42, "\x42" }, { 0x43, "\x43" }, { 0x44, "\x44" }, { 0x45, "\x45" }, { 0x46, "\x46" }, { 0x47, "\x47" }, { 0x48, "\x48" }, + { 0x49, "\x49" }, { 0x4A, "\x4A" }, { 0x4B, "\x4B" }, { 0x4C, "\x4C" }, { 0x4D, "\x4D" }, { 0x4E, "\x4E" }, { 0x4F, "\x4F" }, { 0x50, "\x50" }, { 0x51, "\x51" }, { 0x52, "\x52" }, + { 0x53, "\x53" }, { 0x54, "\x54" }, { 0x55, "\x55" }, { 0x56, "\x56" }, { 0x57, "\x57" }, { 0x58, "\x58" }, { 0x59, "\x59" }, { 0x5A, "\x5A" }, { 0x5B, "\x5B" }, { 0x5C, "\x5C" }, + { 0x5D, "\x5D" }, { 0x5E, "\x5E" }, { 0x5F, "\x5F" }, { 0x60, "\x60" }, { 0x61, "\x61" }, { 0x62, "\x62" }, { 0x63, "\x63" }, { 0x64, "\x64" }, { 0x65, "\x65" }, { 0x66, "\x66" }, + { 0x67, "\x67" }, { 0x68, "\x68" }, { 0x69, "\x69" }, { 0x6A, "\x6A" }, { 0x6B, "\x6B" }, { 0x6C, "\x6C" }, { 0x6D, "\x6D" }, { 0x6E, "\x6E" }, { 0x6F, "\x6F" }, { 0x70, "\x70" }, + { 0x71, "\x71" }, { 0x72, "\x72" }, { 0x73, "\x73" }, { 0x74, "\x74" }, { 0x75, "\x75" }, { 0x76, "\x76" }, { 0x77, "\x77" }, { 0x78, "\x78" }, { 0x79, "\x79" }, { 0x7A, "\x7A" }, + { 0x7B, "\x7B" }, { 0x7C, "\x7C" }, { 0x7D, "\x7D" }, { 0x7E, "\x7E" }, { 0xA1, "\xC2\xA1" }, { 0xA2, "\xC2\xA2" }, { 0xA3, "\xC2\xA3" }, { 0xA4, "\xC2\xA4" }, { 0xA5, "\xC2\xA5" }, + { 0xA6, "\xC2\xA6" }, { 0xA7, "\xC2\xA7" }, { 0xA8, "\xC2\xA8" }, { 0xA9, "\xC2\xA9" }, { 0xAA, "\xC2\xAA" }, { 0xAB, "\xC2\xAB" }, { 0xAC, "\xC2\xAC" }, { 0xAE, "\xC2\xAE" }, + { 0xAF, "\xC2\xAF" }, { 0xB0, "\xC2\xB0" }, { 0xB1, "\xC2\xB1" }, { 0xB2, "\xC2\xB2" }, { 0xB3, "\xC2\xB3" }, { 0xB4, "\xC2\xB4" }, { 0xB5, "\xC2\xB5" }, { 0xB6, "\xC2\xB6" }, + { 0xB7, "\xC2\xB7" }, { 0xB8, "\xC2\xB8" }, { 0xB9, "\xC2\xB9" }, { 0xBA, "\xC2\xBA" }, { 0xBB, "\xC2\xBB" }, { 0xBC, "\xC2\xBC" }, { 0xBD, "\xC2\xBD" }, { 0xBE, "\xC2\xBE" }, + { 0xBF, "\xC2\xBF" }, { 0xC0, "\xC3\x80" }, { 0xC1, "\xC3\x81" }, { 0xC2, "\xC3\x82" }, { 0xC3, "\xC3\x83" }, { 0xC4, "\xC3\x84" }, { 0xC5, "\xC3\x85" }, { 0xC6, "\xC3\x86" }, + { 0xC7, "\xC3\x87" }, { 0xC8, "\xC3\x88" }, { 0xC9, "\xC3\x89" }, { 0xCA, "\xC3\x8A" }, { 0xCB, "\xC3\x8B" }, { 0xCC, "\xC3\x8C" }, { 0xCD, "\xC3\x8D" }, { 0xCE, "\xC3\x8E" }, + { 0xCF, "\xC3\x8F" }, { 0xD0, "\xC3\x90" }, { 0xD1, "\xC3\x91" }, { 0xD2, "\xC3\x92" }, { 0xD3, "\xC3\x93" }, { 0xD4, "\xC3\x94" }, { 0xD5, "\xC3\x95" }, { 0xD6, "\xC3\x96" }, + { 0xD7, "\xC3\x97" }, { 0xD8, "\xC3\x98" }, { 0xD9, "\xC3\x99" }, { 0xDA, "\xC3\x9A" }, { 0xDB, "\xC3\x9B" }, { 0xDC, "\xC3\x9C" }, { 0xDD, "\xC3\x9D" }, { 0xDE, "\xC3\x9E" }, + { 0xDF, "\xC3\x9F" }, { 0xE0, "\xC3\xA0" }, { 0xE1, "\xC3\xA1" }, { 0xE2, "\xC3\xA2" }, { 0xE3, "\xC3\xA3" }, { 0xE4, "\xC3\xA4" }, { 0xE5, "\xC3\xA5" }, { 0xE6, "\xC3\xA6" }, + { 0xE7, "\xC3\xA7" }, { 0xE8, "\xC3\xA8" }, { 0xE9, "\xC3\xA9" }, { 0xEA, "\xC3\xAA" }, { 0xEB, "\xC3\xAB" }, { 0xEC, "\xC3\xAC" }, { 0xED, "\xC3\xAD" }, { 0xEE, "\xC3\xAE" }, + { 0xEF, "\xC3\xAF" }, { 0xF0, "\xC3\xB0" }, { 0xF1, "\xC3\xB1" }, { 0xF2, "\xC3\xB2" }, { 0xF3, "\xC3\xB3" }, { 0xF4, "\xC3\xB4" }, { 0xF5, "\xC3\xB5" }, { 0xF6, "\xC3\xB6" }, + { 0xF7, "\xC3\xB7" }, { 0xF8, "\xC3\xB8" }, { 0xF9, "\xC3\xB9" }, { 0xFA, "\xC3\xBA" }, { 0xFB, "\xC3\xBB" }, { 0xFC, "\xC3\xBC" }, { 0xFD, "\xC3\xBD" }, { 0xFE, "\xC3\xBE" }, + { 0xFF, "\xC3\xBF" }, { 0x00, "\xC4\x80" }, { 0x01, "\xC4\x81" }, { 0x02, "\xC4\x82" }, { 0x03, "\xC4\x83" }, { 0x04, "\xC4\x84" }, { 0x05, "\xC4\x85" }, { 0x06, "\xC4\x86" }, + { 0x07, "\xC4\x87" }, { 0x08, "\xC4\x88" }, { 0x09, "\xC4\x89" }, { 0x0A, "\xC4\x8A" }, { 0x0B, "\xC4\x8B" }, { 0x0C, "\xC4\x8C" }, { 0x0D, "\xC4\x8D" }, { 0x0E, "\xC4\x8E" }, + { 0x0F, "\xC4\x8F" }, { 0x10, "\xC4\x90" }, { 0x11, "\xC4\x91" }, { 0x12, "\xC4\x92" }, { 0x13, "\xC4\x93" }, { 0x14, "\xC4\x94" }, { 0x15, "\xC4\x95" }, { 0x16, "\xC4\x96" }, + { 0x17, "\xC4\x97" }, { 0x18, "\xC4\x98" }, { 0x19, "\xC4\x99" }, { 0x1A, "\xC4\x9A" }, { 0x1B, "\xC4\x9B" }, { 0x1C, "\xC4\x9C" }, { 0x1D, "\xC4\x9D" }, { 0x1E, "\xC4\x9E" }, + { 0x1F, "\xC4\x9F" }, { 0x20, "\xC4\xA0" }, { 0x7F, "\xC4\xA1" }, { 0x80, "\xC4\xA2" }, { 0x81, "\xC4\xA3" }, { 0x82, "\xC4\xA4" }, { 0x83, "\xC4\xA5" }, { 0x84, "\xC4\xA6" }, + { 0x85, "\xC4\xA7" }, { 0x86, "\xC4\xA8" }, { 0x87, "\xC4\xA9" }, { 0x88, "\xC4\xAA" }, { 0x89, "\xC4\xAB" }, { 0x8A, "\xC4\xAC" }, { 0x8B, "\xC4\xAD" }, { 0x8C, "\xC4\xAE" }, + { 0x8D, "\xC4\xAF" }, { 0x8E, "\xC4\xB0" }, { 0x8F, "\xC4\xB1" }, { 0x90, "\xC4\xB2" }, { 0x91, "\xC4\xB3" }, { 0x92, "\xC4\xB4" }, { 0x93, "\xC4\xB5" }, { 0x94, "\xC4\xB6" }, + { 0x95, "\xC4\xB7" }, { 0x96, "\xC4\xB8" }, { 0x97, "\xC4\xB9" }, { 0x98, "\xC4\xBA" }, { 0x99, "\xC4\xBB" }, { 0x9A, "\xC4\xBC" }, { 0x9B, "\xC4\xBD" }, { 0x9C, "\xC4\xBE" }, + { 0x9D, "\xC4\xBF" }, { 0x9E, "\xC5\x80" }, { 0x9F, "\xC5\x81" }, { 0xA0, "\xC5\x82" }, { 0xAD, "\xC5\x83" } + }; + return hex_map; + } + + std::unordered_map unicode_to_bytes() { + static std::unordered_map hex_map = { + { "\x21", 0x21 }, { "\x22", 0x22 }, { "\x23", 0x23 }, { "\x24", 0x24 }, { "\x25", 0x25 }, { "\x26", 0x26 }, { "\x27", 0x27 }, { "\x28", 0x28 }, { "\x29", 0x29 }, { "\x2A", 0x2A }, + { "\x2B", 0x2B }, { "\x2C", 0x2C }, { "\x2D", 0x2D }, { "\x2E", 0x2E }, { "\x2F", 0x2F }, { "\x30", 0x30 }, { "\x31", 0x31 }, { "\x32", 0x32 }, { "\x33", 0x33 }, { "\x34", 0x34 }, + { "\x35", 0x35 }, { "\x36", 0x36 }, { "\x37", 0x37 }, { "\x38", 0x38 }, { "\x39", 0x39 }, { "\x3A", 0x3A }, { "\x3B", 0x3B }, { "\x3C", 0x3C }, { "\x3D", 0x3D }, { "\x3E", 0x3E }, + { "\x3F", 0x3F }, { "\x40", 0x40 }, { "\x41", 0x41 }, { "\x42", 0x42 }, { "\x43", 0x43 }, { "\x44", 0x44 }, { "\x45", 0x45 }, { "\x46", 0x46 }, { "\x47", 0x47 }, { "\x48", 0x48 }, + { "\x49", 0x49 }, { "\x4A", 0x4A }, { "\x4B", 0x4B }, { "\x4C", 0x4C }, { "\x4D", 0x4D }, { "\x4E", 0x4E }, { "\x4F", 0x4F }, { "\x50", 0x50 }, { "\x51", 0x51 }, { "\x52", 0x52 }, + { "\x53", 0x53 }, { "\x54", 0x54 }, { "\x55", 0x55 }, { "\x56", 0x56 }, { "\x57", 0x57 }, { "\x58", 0x58 }, { "\x59", 0x59 }, { "\x5A", 0x5A }, { "\x5B", 0x5B }, { "\x5C", 0x5C }, + { "\x5D", 0x5D }, { "\x5E", 0x5E }, { "\x5F", 0x5F }, { "\x60", 0x60 }, { "\x61", 0x61 }, { "\x62", 0x62 }, { "\x63", 0x63 }, { "\x64", 0x64 }, { "\x65", 0x65 }, { "\x66", 0x66 }, + { "\x67", 0x67 }, { "\x68", 0x68 }, { "\x69", 0x69 }, { "\x6A", 0x6A }, { "\x6B", 0x6B }, { "\x6C", 0x6C }, { "\x6D", 0x6D }, { "\x6E", 0x6E }, { "\x6F", 0x6F }, { "\x70", 0x70 }, + { "\x71", 0x71 }, { "\x72", 0x72 }, { "\x73", 0x73 }, { "\x74", 0x74 }, { "\x75", 0x75 }, { "\x76", 0x76 }, { "\x77", 0x77 }, { "\x78", 0x78 }, { "\x79", 0x79 }, { "\x7A", 0x7A }, + { "\x7B", 0x7B }, { "\x7C", 0x7C }, { "\x7D", 0x7D }, { "\x7E", 0x7E }, { "\xC2\xA1", 0xA1 }, { "\xC2\xA2", 0xA2 }, { "\xC2\xA3", 0xA3 }, { "\xC2\xA4", 0xA4 }, { "\xC2\xA5", 0xA5 }, + { "\xC2\xA6", 0xA6 }, { "\xC2\xA7", 0xA7 }, { "\xC2\xA8", 0xA8 }, { "\xC2\xA9", 0xA9 }, { "\xC2\xAA", 0xAA }, { "\xC2\xAB", 0xAB }, { "\xC2\xAC", 0xAC }, { "\xC2\xAE", 0xAE }, + { "\xC2\xAF", 0xAF }, { "\xC2\xB0", 0xB0 }, { "\xC2\xB1", 0xB1 }, { "\xC2\xB2", 0xB2 }, { "\xC2\xB3", 0xB3 }, { "\xC2\xB4", 0xB4 }, { "\xC2\xB5", 0xB5 }, { "\xC2\xB6", 0xB6 }, + { "\xC2\xB7", 0xB7 }, { "\xC2\xB8", 0xB8 }, { "\xC2\xB9", 0xB9 }, { "\xC2\xBA", 0xBA }, { "\xC2\xBB", 0xBB }, { "\xC2\xBC", 0xBC }, { "\xC2\xBD", 0xBD }, { "\xC2\xBE", 0xBE }, + { "\xC2\xBF", 0xBF }, { "\xC3\x80", 0xC0 }, { "\xC3\x81", 0xC1 }, { "\xC3\x82", 0xC2 }, { "\xC3\x83", 0xC3 }, { "\xC3\x84", 0xC4 }, { "\xC3\x85", 0xC5 }, { "\xC3\x86", 0xC6 }, + { "\xC3\x87", 0xC7 }, { "\xC3\x88", 0xC8 }, { "\xC3\x89", 0xC9 }, { "\xC3\x8A", 0xCA }, { "\xC3\x8B", 0xCB }, { "\xC3\x8C", 0xCC }, { "\xC3\x8D", 0xCD }, { "\xC3\x8E", 0xCE }, + { "\xC3\x8F", 0xCF }, { "\xC3\x90", 0xD0 }, { "\xC3\x91", 0xD1 }, { "\xC3\x92", 0xD2 }, { "\xC3\x93", 0xD3 }, { "\xC3\x94", 0xD4 }, { "\xC3\x95", 0xD5 }, { "\xC3\x96", 0xD6 }, + { "\xC3\x97", 0xD7 }, { "\xC3\x98", 0xD8 }, { "\xC3\x99", 0xD9 }, { "\xC3\x9A", 0xDA }, { "\xC3\x9B", 0xDB }, { "\xC3\x9C", 0xDC }, { "\xC3\x9D", 0xDD }, { "\xC3\x9E", 0xDE }, + { "\xC3\x9F", 0xDF }, { "\xC3\xA0", 0xE0 }, { "\xC3\xA1", 0xE1 }, { "\xC3\xA2", 0xE2 }, { "\xC3\xA3", 0xE3 }, { "\xC3\xA4", 0xE4 }, { "\xC3\xA5", 0xE5 }, { "\xC3\xA6", 0xE6 }, + { "\xC3\xA7", 0xE7 }, { "\xC3\xA8", 0xE8 }, { "\xC3\xA9", 0xE9 }, { "\xC3\xAA", 0xEA }, { "\xC3\xAB", 0xEB }, { "\xC3\xAC", 0xEC }, { "\xC3\xAD", 0xED }, { "\xC3\xAE", 0xEE }, + { "\xC3\xAF", 0xEF }, { "\xC3\xB0", 0xF0 }, { "\xC3\xB1", 0xF1 }, { "\xC3\xB2", 0xF2 }, { "\xC3\xB3", 0xF3 }, { "\xC3\xB4", 0xF4 }, { "\xC3\xB5", 0xF5 }, { "\xC3\xB6", 0xF6 }, + { "\xC3\xB7", 0xF7 }, { "\xC3\xB8", 0xF8 }, { "\xC3\xB9", 0xF9 }, { "\xC3\xBA", 0xFA }, { "\xC3\xBB", 0xFB }, { "\xC3\xBC", 0xFC }, { "\xC3\xBD", 0xFD }, { "\xC3\xBE", 0xFE }, + { "\xC3\xBF", 0xFF }, { "\xC4\x80", 0x00 }, { "\xC4\x81", 0x01 }, { "\xC4\x82", 0x02 }, { "\xC4\x83", 0x03 }, { "\xC4\x84", 0x04 }, { "\xC4\x85", 0x05 }, { "\xC4\x86", 0x06 }, + { "\xC4\x87", 0x07 }, { "\xC4\x88", 0x08 }, { "\xC4\x89", 0x09 }, { "\xC4\x8A", 0x0A }, { "\xC4\x8B", 0x0B }, { "\xC4\x8C", 0x0C }, { "\xC4\x8D", 0x0D }, { "\xC4\x8E", 0x0E }, + { "\xC4\x8F", 0x0F }, { "\xC4\x90", 0x10 }, { "\xC4\x91", 0x11 }, { "\xC4\x92", 0x12 }, { "\xC4\x93", 0x13 }, { "\xC4\x94", 0x14 }, { "\xC4\x95", 0x15 }, { "\xC4\x96", 0x16 }, + { "\xC4\x97", 0x17 }, { "\xC4\x98", 0x18 }, { "\xC4\x99", 0x19 }, { "\xC4\x9A", 0x1A }, { "\xC4\x9B", 0x1B }, { "\xC4\x9C", 0x1C }, { "\xC4\x9D", 0x1D }, { "\xC4\x9E", 0x1E }, + { "\xC4\x9F", 0x1F }, { "\xC4\xA0", 0x20 }, { "\xC4\xA1", 0x7F }, { "\xC4\xA2", 0x80 }, { "\xC4\xA3", 0x81 }, { "\xC4\xA4", 0x82 }, { "\xC4\xA5", 0x83 }, { "\xC4\xA6", 0x84 }, + { "\xC4\xA7", 0x85 }, { "\xC4\xA8", 0x86 }, { "\xC4\xA9", 0x87 }, { "\xC4\xAA", 0x88 }, { "\xC4\xAB", 0x89 }, { "\xC4\xAC", 0x8A }, { "\xC4\xAD", 0x8B }, { "\xC4\xAE", 0x8C }, + { "\xC4\xAF", 0x8D }, { "\xC4\xB0", 0x8E }, { "\xC4\xB1", 0x8F }, { "\xC4\xB2", 0x90 }, { "\xC4\xB3", 0x91 }, { "\xC4\xB4", 0x92 }, { "\xC4\xB5", 0x93 }, { "\xC4\xB6", 0x94 }, + { "\xC4\xB7", 0x95 }, { "\xC4\xB8", 0x96 }, { "\xC4\xB9", 0x97 }, { "\xC4\xBA", 0x98 }, { "\xC4\xBB", 0x99 }, { "\xC4\xBC", 0x9A }, { "\xC4\xBD", 0x9B }, { "\xC4\xBE", 0x9C }, + { "\xC4\xBF", 0x9D }, { "\xC5\x80", 0x9E }, { "\xC5\x81", 0x9F }, { "\xC5\x82", 0xA0 }, { "\xC5\x83", 0xAD } + }; + return hex_map; + } + + // len must be available + bool inline str_is_equal(const char* str1, const char* str2, size_t len) { + for (size_t i = 0; i < len; ++i) { + if (str1[i] != str2[i]) { + return false; + } + } + return true; + } + + std::vector bpe_gpt2_preprocess(const std::string& text) { + static std::unordered_map< unsigned char, std::string> byte_encoder = bytes_to_unicode(); + std::vector bpe_words; + std::vector bpe_encoded_words; + + std::string token=""; + const char *raw_text_p = text.c_str(); + // GPT2 system regex: 's|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+ + bool collecting_numeric = false; + bool collecting_letter = false; + bool collecting_special = false; + bool collecting_whitespace_lookahead = false; + bool collecting=false; + + std::vector text_utf; + text_utf.reserve(text.size()); + bpe_words.reserve(text.size()); + bpe_encoded_words.reserve(text.size()); + + text_utf = CNCTUnicode::split_utf8_enhanced(text); + + for (int i = 0; i < (int)text_utf.size(); i++) { + const CNCTString &utf_char = text_utf[i]; + bool split_condition = false; + const char *text_pos = raw_text_p + utf_char.seq_offset_bytes; + int bytes_remain = strlen(text_pos); + // forward backward lookups + const CNCTString &utf_char_next = (i+1 < (int)text_utf.size()) ? text_utf[i+1] : CNCTString(); + const CNCTString &utf_char_next_next = (i+2 < (int)text_utf.size()) ? text_utf[i+2] : CNCTString(); + // const CNCTString &utf_char_prev = (i > 0) ? text_utf[i-1] : CNCTString(); + + // handling contractions + if (!split_condition && bytes_remain >= 2) { + // 's|'t|'m|'d + if (utf_char == '\'' && (utf_char_next == 's' || utf_char_next == 't' || utf_char_next == 'm' || utf_char_next == 'd')) { + split_condition = true; + } + if (split_condition) { + if (token.size()) { + bpe_words.emplace_back(token); // push previous content as token + } + token = utf_char.str + utf_char_next.str; + bpe_words.emplace_back(token); + token=""; + i++; + continue; + } + } + if (!split_condition && bytes_remain >= 3) { + // 're|'ve|'ll + if (utf_char == '\'' && ( + (utf_char_next == 'r' || utf_char_next_next == 'e') || + (utf_char_next == 'v' || utf_char_next_next == 'e') || + (utf_char_next == 'l' || utf_char_next_next == 'l')) + ) { + split_condition = true; + } + if (split_condition) { + // current token + next token can be defined + if (token.size()) { + bpe_words.emplace_back(token); // push previous content as token + } + token = utf_char.str + utf_char_next.str + utf_char_next_next.str; + bpe_words.emplace_back(token); // the contraction + token=""; + i+=2; + continue; + } + } + + if (!split_condition && !collecting) { + if (utf_char.char_type == CNCTCharType::LETTER || (!token.size() && utf_char==" " && utf_char_next.char_type == CNCTCharType::LETTER)) { + collecting_letter = true; + collecting = true; + } else if (utf_char.char_type == CNCTCharType::DIGIT || (!token.size() && utf_char==" " && utf_char_next.char_type == CNCTCharType::DIGIT)) { + collecting_numeric = true; + collecting = true; + } else if ( + ((utf_char.char_type != CNCTCharType::LETTER && utf_char.char_type != CNCTCharType::DIGIT) && (utf_char.char_type != CNCTCharType::WHITESPACE)) || + (!token.size() && utf_char==" " && utf_char_next.char_type != CNCTCharType::LETTER && utf_char_next.char_type != CNCTCharType::DIGIT && utf_char_next.char_type != CNCTCharType::WHITESPACE) + ) { + collecting_special = true; + collecting = true; + } else if (utf_char.char_type == CNCTCharType::WHITESPACE && utf_char_next.char_type == CNCTCharType::WHITESPACE) { + collecting_whitespace_lookahead = true; + collecting = true; + } else if (utf_char.char_type == CNCTCharType::WHITESPACE) { + split_condition = true; + } + } else if (!split_condition && collecting) { + if (collecting_letter && utf_char.char_type != CNCTCharType::LETTER) { + split_condition = true; + } else if (collecting_numeric && utf_char.char_type != CNCTCharType::DIGIT) { + split_condition = true; + } else if (collecting_special && (utf_char.char_type == CNCTCharType::LETTER || utf_char.char_type == CNCTCharType::DIGIT || utf_char.char_type == CNCTCharType::WHITESPACE)) { + split_condition = true; + } else if (collecting_whitespace_lookahead && utf_char_next.char_type != CNCTCharType::WHITESPACE) { + split_condition = true; + } + } + + if(utf_char_next.str.size() == 0) { + split_condition = true; // final + token += utf_char.str; + } + + if (split_condition) { + if (token.size()) { + bpe_words.emplace_back(token); + } + token = utf_char.str; + collecting = false; + collecting_letter = false; + collecting_numeric = false; + collecting_special = false; + collecting_whitespace_lookahead = false; + } else { + token += utf_char.str; + } + } + + for (std::string& word : bpe_words) { + std::string encoded_token=""; + for (char& c : word) { + encoded_token += byte_encoder[c]; + } + bpe_encoded_words.emplace_back(encoded_token); + } + + return bpe_encoded_words; + } + + // decoder (for one token) + std::string decode_token(const std::string& token) { + static std::unordered_map< std::string, unsigned char> byte_decoder = unicode_to_bytes(); + std::string decoded_token=""; + auto unicode_seqeunces = CNCTUnicode::split_utf8(token); + for (auto& unicode_sequence : unicode_seqeunces) { + decoded_token += byte_decoder[unicode_sequence]; + } + + return decoded_token; + } + + const gpt2bpe_vocab & vocab_; + std::vector symbols_; + std::vector symbols_final; + ggllm_bpe_bigram::queue work_queue_; + bool flag_g2ws=false; +}; + +static std::vector gpt2bpe_tokenize(const gpt2bpe_vocab & vocab, const std::string & text, bool bos, bool g2ws ) { + gpt2bpe_tokenizer tokenizer(vocab, g2ws); + std::vector output; + + if (text.empty()) { + return output; + } + + if (bos && vocab.special_bos_id != -1) { + output.push_back(vocab.special_bos_id); + } + + tokenizer.tokenize(text, output); + return output; +} + +#endif // CMPNCT_GPT2BPE diff --git a/examples/gptneox-wip/falcon-main.cpp b/examples/gptneox-wip/falcon-main.cpp new file mode 100644 index 000000000..43b6a29f3 --- /dev/null +++ b/examples/gptneox-wip/falcon-main.cpp @@ -0,0 +1,1111 @@ +#include "ggml.h" +#include "cmpnct_gpt2bpe.hpp" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +// default hparams +struct falcon_hparams { + size_t n_merges = 0; + size_t n_vocab = 0; + uint32_t n_ctx = 0; + uint32_t n_embd = 0; + uint32_t n_head = 0; + uint32_t n_head_kv = 1; // Needs to be 1 for 7B model + uint32_t n_ff = 0; + uint32_t n_block = 0; + float norm_eps = 1e-5; +}; +struct falcon_block { + // normalization + struct ggml_tensor* input_layernorm; + struct ggml_tensor* input_layernorm_b; + struct ggml_tensor* attention_norm; // Falcon-40B only + struct ggml_tensor* attention_norm_b; // Falcon-40B only + + // attention + struct ggml_tensor* query_key_value; + struct ggml_tensor* wo; + + // ff + struct ggml_tensor* ffn_up; + struct ggml_tensor* ffn_down; +}; + +struct falcon_model { + falcon_hparams hparams; + + struct ggml_tensor* tok_embeddings; + struct ggml_tensor* output_norm; + struct ggml_tensor* output_norm_b; + struct ggml_tensor* lm_head; + + std::vector blocks; + + // key + value memory + struct ggml_tensor* memory_k; + struct ggml_tensor* memory_v; + + struct gguf_context * ggufctx; + struct ggml_context * ctx; + struct ggml_context * kvctx; + + std::map tensors; +}; + +struct gpt_params { + int32_t seed = -1; // RNG seed + int32_t n_threads = std::min(4, (int32_t) std::thread::hardware_concurrency()); + uint32_t n_predict = 200; // new tokens to predict + uint32_t n_batch = 512; // batch size for prompt processing + + // sampling parameters + int32_t top_k = 40; + float top_p = 1.0f; + float temp = 0.8f; + int32_t repeat_last_n = 64; + float repeat_penalty = 1.02f; + + std::string model = ""; // model path + std::string prompt = ""; + + std::string token_test = ""; + bool interactive = false; + int32_t interactive_port = -1; + int32_t n_gpu_layers = 0; +}; + +void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { + fprintf(stderr, "usage: %s [options]\n", argv[0]); + fprintf(stderr, "\n"); + fprintf(stderr, "options:\n"); + fprintf(stderr, " -h, --help show this help message and exit\n"); + fprintf(stderr, " -s SEED, --seed SEED RNG seed (default: -1)\n"); + fprintf(stderr, " -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); + fprintf(stderr, " -ngl N, --gpu-layers N number of layers to offload to GPU on supported models (default: %d)\n", params.n_gpu_layers); + fprintf(stderr, " -p PROMPT, --prompt PROMPT\n"); + fprintf(stderr, " prompt to start generation with (default: random)\n"); + fprintf(stderr, " -f FNAME, --file FNAME\n"); + fprintf(stderr, " load prompt from a file\n"); + fprintf(stderr, " -tt TOKEN_TEST, --token_test TOKEN_TEST\n"); + fprintf(stderr, " test tokenization\n"); + fprintf(stderr, " -n N, --n_predict N number of tokens to predict (default: %d)\n", params.n_predict); + fprintf(stderr, " --top_k N top-k sampling, 0 = n_vocab (default: %d)\n", params.top_k); + fprintf(stderr, " --top_p N top-p sampling (default: %.1f)\n", params.top_p); + fprintf(stderr, " --temp N temperature (default: %.1f)\n", params.temp); + fprintf(stderr, " --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled)\n", params.repeat_last_n); + fprintf(stderr, " --repeat-penalty N penalize repeat sequence of tokens (default: %.2f, 1.0 = disabled)\n", (double)params.repeat_penalty); + fprintf(stderr, " -b N, --batch_size N batch size for prompt processing (default: %d)\n", params.n_batch); + fprintf(stderr, " -m FNAME, --model FNAME\n"); + fprintf(stderr, " model path (default: %s)\n", params.model.c_str()); + fprintf(stderr, "\n"); +} + +// Function to check if the next argument exists +std::string get_next_arg(int& i, int argc, char** argv, const std::string& flag, gpt_params& params) { + if (i + 1 < argc && argv[i + 1][0] != '-') { + return argv[++i]; + } else { + fprintf(stderr, "error: %s requires one argument.\n", flag.c_str()); + gpt_print_usage(argc, argv, params); + exit(0); + } +} + +bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { + for (int i = 1; i < argc; i++) { + std::string arg = argv[i]; + + if (arg == "-s" || arg == "--seed") { + params.seed = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-t" || arg == "--threads") { + params.n_threads = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-ngl" || arg == "--gpu-layers" || arg == "--n-gpu-layers") { + params.n_gpu_layers = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-p" || arg == "--prompt") { + params.prompt = get_next_arg(i, argc, argv, arg, params); + } else if (arg == "-n" || arg == "--n_predict") { + params.n_predict = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--top_k") { + params.top_k = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--top_p") { + params.top_p = std::stof(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--temp") { + params.temp = std::stof(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--repeat-last-n") { + params.repeat_last_n = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--repeat-penalty") { + params.repeat_penalty = std::stof(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-b" || arg == "--batch_size") { + params.n_batch= std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-m" || arg == "--model") { + params.model = get_next_arg(i, argc, argv, arg, params); + } else if (arg == "-i" || arg == "--interactive") { + params.interactive = true; + } else if (arg == "-ip" || arg == "--interactive-port") { + params.interactive = true; + params.interactive_port = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-h" || arg == "--help") { + gpt_print_usage(argc, argv, params); + exit(0); + } else if (arg == "-f" || arg == "--file") { + get_next_arg(i, argc, argv, arg, params); + std::ifstream file(argv[i]); + if (!file) { + fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); + break; + } + std::copy(std::istreambuf_iterator(file), std::istreambuf_iterator(), back_inserter(params.prompt)); + if (params.prompt.back() == '\n') { + params.prompt.pop_back(); + } + } else if (arg == "-tt" || arg == "--token_test") { + params.token_test = get_next_arg(i, argc, argv, arg, params); + } + else { + fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); + gpt_print_usage(argc, argv, params); + exit(0); + } + } + + return true; +} + +gpt2bpe_vocab::id sample_top_k_top_p_repeat( + const gpt2bpe_vocab & vocab, + const float * logits, + const int32_t * last_n_tokens_data, + size_t last_n_tokens_data_size, + int top_k, + double top_p, + double temp, + int repeat_last_n, + float repeat_penalty, + std::mt19937 & rng) { + + int n_logits = vocab.id_to_token.size(); + + const auto * plogits = logits; + + const auto last_n_tokens = std::vector(last_n_tokens_data, last_n_tokens_data + last_n_tokens_data_size); + + if (temp <= 0) { + // select the token with the highest logit directly + float max_logit = plogits[0]; + gpt2bpe_vocab::id max_id = 0; + + for (int i = 1; i < n_logits; ++i) { + if (plogits[i] > max_logit) { + max_logit = plogits[i]; + max_id = i; + } + } + return max_id; + } + + + std::vector> logits_id; + logits_id.reserve(n_logits); + + { + const float scale = 1.0f/temp; + for (int i = 0; i < n_logits; ++i) { + // repetition penalty from ctrl paper (https://arxiv.org/abs/1909.05858) + // credit https://github.com/facebookresearch/llama/compare/main...shawwn:llama:main + if (repeat_last_n > 0 && std::find(last_n_tokens.end()-repeat_last_n, last_n_tokens.end(), i) != last_n_tokens.end()) { + // if score < 0 then repetition penalty has to multiplied to reduce the previous token probability + if (plogits[i] < 0.0f) { + logits_id.push_back(std::make_pair(plogits[i]*scale*repeat_penalty, i)); + } else { + logits_id.push_back(std::make_pair(plogits[i]*scale/repeat_penalty, i)); + } + } else { + logits_id.push_back(std::make_pair(plogits[i]*scale, i)); + } + } + } + + // find the top K tokens + std::partial_sort( + logits_id.begin(), + logits_id.begin() + top_k, logits_id.end(), + [](const std::pair & a, const std::pair & b) { + return a.first > b.first; + }); + + logits_id.resize(top_k); + + double maxl = -INFINITY; + for (const auto & kv : logits_id) { + maxl = std::max(maxl, kv.first); + } + + // compute probs for the top K tokens + std::vector probs; + probs.reserve(logits_id.size()); + + double sum = 0.0; + for (const auto & kv : logits_id) { + double p = exp(kv.first - maxl); + probs.push_back(p); + sum += p; + } + + // normalize the probs + for (auto & p : probs) { + p /= sum; + } + + if (top_p < 1.0f) { + double cumsum = 0.0f; + for (int i = 0; i < top_k; i++) { + cumsum += probs[i]; + if (cumsum >= top_p) { + top_k = i + 1; + probs.resize(top_k); + logits_id.resize(top_k); + break; + } + } + + cumsum = 1.0/cumsum; + for (int i = 0; i < (int) probs.size(); i++) { + probs[i] *= cumsum; + } + } + +// printf("\n"); +// for (int i = 0; i < (int) probs.size(); i++) { +// for (int i = 0; i < 10; i++) { +// printf("%d: '%s' %f\n", i, vocab.id_to_token.at(logits_id[i].second).c_str(), probs[i]); +// } + + std::discrete_distribution<> dist(probs.begin(), probs.end()); + int idx = dist(rng); + + return logits_id[idx].second; + +} + +struct ggml_tensor * get_tensor_ex( struct ggml_context * ctx, std::string name){ + + struct ggml_tensor * cur = ggml_get_tensor(ctx, name.c_str()); + if( cur == NULL ) { + fprintf(stdout, "%s: tensor '%s' not found!\n", __func__, name.c_str()); + } else { +// fprintf(stdout, "%s: n_dims = %d, name = '%s'\n", __func__, cur->n_dims, cur->name); + } + + return cur; +} + +// load the model's weights from a file +bool falcon_model_load(const std::string & fname, falcon_model & model, gpt2bpe_vocab & vocab) { + printf("%s: loading model from '%s'..\n", __func__, fname.c_str()); + + model.ctx = NULL; + + struct gguf_init_params ggufparams = { + /*.no_alloc = */ false, + /*.ctx = */ &model.ctx, + }; + + auto & ggufctx = model.ggufctx; + + ggufctx = gguf_init_from_file(fname.c_str(), ggufparams); + + if (!ggufctx) { + fprintf(stderr, "%s: gguf_init_from_file() failed\n", __func__); + return false; + } + + fprintf(stdout, "%s: gguf version = %d\n", __func__, gguf_get_version(ggufctx)); + fprintf(stdout, "%s: gguf alignment = %zu\n", __func__, gguf_get_alignment(ggufctx)); + fprintf(stdout, "%s: gguf data offset = %zu\n", __func__, gguf_get_data_offset(ggufctx)); + + // print all kv + #if 0 + { + const int n_kv = gguf_get_n_kv(ggufctx); + + fprintf(stdout, "%s: n_kv: %d\n", __func__, n_kv); + + for (int i = 0; i < n_kv; ++i) { + const char * key = gguf_get_key(ggufctx, i); + + fprintf(stdout, "%s: kv[%d]: key = %s\n", __func__, i, key); + } + } + #endif + + // print some standard metadata + { + int keyidx; + + keyidx = gguf_find_key(ggufctx, "general.name"); + if (keyidx != -1) { fprintf(stdout, "%s: model name = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.description"); + if (keyidx != -1) { fprintf(stdout, "%s: model description = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.author"); + if (keyidx != -1) { fprintf(stdout, "%s: model author = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.license"); + if (keyidx != -1) { fprintf(stdout, "%s: model license = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.architecture"); + if (keyidx != -1) { fprintf(stdout, "%s: model architecture = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.file_type"); + if (keyidx != -1) { fprintf(stdout, "%s: model file type = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "gptneox.tensor_data_layout"); + if (keyidx != -1) { fprintf(stdout, "%s: model data layout = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.source.hugginface.repository"); + if (keyidx != -1) { fprintf(stdout, "%s: model source HF repo = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + } + + // check required metadata + { + int keyidx; + + // check model architecture kv + keyidx = gguf_find_key(ggufctx, "general.architecture"); + if (keyidx != -1) { + if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "falcon") != 0) { + fprintf(stdout, "%s: model architecture not supported!\n", __func__); + return false; + } + } else { + fprintf(stdout, "%s: gguf model architecture not found!\n", __func__); + return false; + } + + // check model tensor data layout kv + keyidx = gguf_find_key(ggufctx, "falcon.tensor_data_layout"); + if (keyidx != -1) { + if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "jploski") != 0) { + fprintf(stdout, "%s: model tensor data layout not supported!\n", __func__); + return false; + } + } else { + fprintf(stdout, "%s: gguf model tensor data layout not found!\n", __func__); + return false; + } + + } + + // load hparams + { + auto & hparams = model.hparams; + + bool ok = true; + int keyidx; + + if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.context_length"); + if (keyidx != -1) { hparams.n_ctx = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.embedding_length"); + if (keyidx != -1) { hparams.n_embd = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.attention.head_count"); + if (keyidx != -1) { hparams.n_head = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.feed_forward_length"); + if (keyidx != -1) { hparams.n_ff = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.block_count"); + if (keyidx != -1) { hparams.n_block = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.attention.layer_norm_epsilon"); + if (keyidx != -1) { hparams.norm_eps= gguf_get_val_f32(ggufctx, keyidx); } else { ok = false; } } + + if (!ok) { + fprintf(stderr, "%s: required hparam missing!\n", __func__); + return false; + } + + keyidx = gguf_find_key(ggufctx, "falcon.attention.head_count_kv"); + if (keyidx != -1) { hparams.n_head_kv = gguf_get_val_u32(ggufctx, keyidx); } + + + printf("%s: n_ctx = %d\n", __func__, hparams.n_ctx); + printf("%s: n_embd = %d\n", __func__, hparams.n_embd); + printf("%s: n_head = %d\n", __func__, hparams.n_head); + printf("%s: n_head_kv = %d\n", __func__, hparams.n_head_kv); + printf("%s: n_block = %d\n", __func__, hparams.n_block); + printf("%s: norm_eps = %g\n", __func__, hparams.norm_eps); + + } + + // load vocab + { + auto & hparams = model.hparams; + + int keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.model"); + + if (keyidx != -1) { + if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "gpt2") != 0) { + fprintf(stdout, "%s: tokenizer model not supported!\n", __func__); + return false; + } + } else { + fprintf(stdout, "%s: tokenizer model not found!\n", __func__); + return false; + } + + + int tokens_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.tokens"); + + if (tokens_keyidx == -1) { + fprintf(stdout, "%s: gpt2 tokenizer vocab not found!\n", __func__); + return false; + } + + int merges_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.merges"); + + if (merges_keyidx == -1) { + fprintf(stdout, "%s: gpt2 tokenizer merges not found!\n", __func__); + return false; + } + + hparams.n_vocab = gguf_get_arr_n(ggufctx,tokens_keyidx); + hparams.n_merges = gguf_get_arr_n(ggufctx,merges_keyidx); + + fprintf(stdout, "%s: gpt2 tokenizer vocab = %zu\n", __func__, hparams.n_vocab); + fprintf(stdout, "%s: gpt2 tokenizer merges = %zu\n", __func__, hparams.n_merges); + + for (size_t i = 0; i < hparams.n_vocab; i++) { + std::string word = gguf_get_arr_str(ggufctx, tokens_keyidx, i); + +// printf("token %d = '%s'\n",i,word.c_str() ); + + vocab.token_to_id[word] = i; + vocab.id_to_token[i] = word; + + if( vocab.id_to_token[i] == "\n" ) { + vocab.linefeed_id = i; + } + } + + std::vector> bpe_merges; + + for (size_t i = 0; i < hparams.n_merges; i++) { + + std::string word = gguf_get_arr_str(ggufctx, merges_keyidx, i); + + // Split the merges + std::string first, second; + size_t pos = word.find(' ', 1); // Start the search from the second character + if (pos != std::string::npos) { + first = word.substr(0, pos); + second = word.substr(pos + 1); + } + + bpe_merges.push_back(std::make_pair(first, second)); + } + + vocab.populate_bpe_ranks(bpe_merges); + + + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.bos_token_id"); if( keyidx != -1 ) { vocab.special_bos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.eos_token_id"); if( keyidx != -1 ) { vocab.special_eos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.unknown_token_id"); if( keyidx != -1 ) { vocab.special_unk_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.separator_token_id"); if( keyidx != -1 ) { vocab.special_sep_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.padding_token_id"); if( keyidx != -1 ) { vocab.special_pad_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + + if( vocab.special_bos_id != -1 ) { fprintf(stdout, "%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].c_str() ); } + if( vocab.special_eos_id != -1 ) { fprintf(stdout, "%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].c_str() ); } + if( vocab.special_unk_id != -1 ) { fprintf(stdout, "%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].c_str() ); } + if( vocab.special_sep_id != -1 ) { fprintf(stdout, "%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].c_str() ); } + if( vocab.special_pad_id != -1 ) { fprintf(stdout, "%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].c_str() ); } + if( vocab.linefeed_id != -1 ) { fprintf(stdout, "%s: LF token = %d\n", __func__, vocab.linefeed_id ); } + + } + + + auto & ctx = model.ctx; + size_t ctx_size = ggml_get_mem_size(ctx); + + printf("%s: ggml ctx size = %6.2f MB\n", __func__, ctx_size/(1024.0*1024.0)); + + // print tensor info + #if 0 + { + const int n_tensors = gguf_get_n_tensors(ggufctx); + + fprintf(stdout, "%s: n_tensors: %d\n", __func__, n_tensors); + + for (int i = 0; i < n_tensors; ++i) { + const char * name = gguf_get_tensor_name (ggufctx, i); + const size_t offset = gguf_get_tensor_offset(ggufctx, i); + + fprintf(stdout, "%s: tensor[%d]: name = %s, offset = %zu\n", __func__, i, name, offset); + } + } + #endif + + // prepare memory for the weights + { + + auto & hparams = model.hparams; + + const int n_block = hparams.n_block; + + model.blocks.resize(n_block); + + model.tok_embeddings = ggml_get_tensor(ctx, "token_embd.weight"); + + model.output_norm = ggml_get_tensor(ctx, "output_norm.weight"); + model.output_norm_b = ggml_get_tensor(ctx, "output_norm.bias"); + model.lm_head = ggml_get_tensor(ctx, "output.weight"); + + // map by name + model.tensors["token_embd.weight"] = model.tok_embeddings; + model.tensors["output_norm.weight"] = model.output_norm; + model.tensors["output_norm.bias"] = model.output_norm_b; + model.tensors["output.weight"] = model.lm_head; + + for (int i = 0; i < n_block; ++i) { + + auto& block = model.blocks[i]; + std::string blocknamestart = "blk." + std::to_string(i) + "."; + + block.input_layernorm = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); + block.input_layernorm_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); + + if ( hparams.n_head_kv == 8 ) { // Falcon-40B + block.attention_norm = get_tensor_ex(ctx, blocknamestart + "attn_norm_2.weight" ); + block.attention_norm_b = get_tensor_ex(ctx, blocknamestart + "attn_norm_2.bias" ); + } + + // query_key_value shape for config.multi_query == True: + block.query_key_value = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); + block.wo = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); + + block.ffn_up = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); + block.ffn_down = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); + + // map by name + if ( hparams.n_head_kv == 8 ) { // Falcon-40B + // Falcon-40B: + model.tensors[blocknamestart + "attn_norm.weight"] = block.input_layernorm; + model.tensors[blocknamestart + "attn_norm.bias"] = block.input_layernorm_b; + model.tensors[blocknamestart + "attn_norm_2.weight"] = block.attention_norm; + model.tensors[blocknamestart + "attn_norm_2.bias"] = block.attention_norm_b; + } else { + // Falcon-7B: + model.tensors[blocknamestart + "attn_norm.weight"] = block.input_layernorm; + model.tensors[blocknamestart + "attn_norm.bias"] = block.input_layernorm_b; + } + + model.tensors[blocknamestart + "attn_qkv.weight"] = block.query_key_value; + model.tensors[blocknamestart + "attn_output.weight"] = block.wo; + + model.tensors[blocknamestart + "ffn_up.weight"] = block.ffn_up; + model.tensors[blocknamestart + "ffn_down.weight"] = block.ffn_down; + } + } + + // key + value memory + { + const auto & kvctx = model.kvctx; + const auto & hparams = model.hparams; + + const int n_block = hparams.n_block; + const int n_ctx = hparams.n_ctx; + const int n_embd = hparams.n_embd; + + const int64_t n_mem = n_block*n_ctx; + const int64_t n_elements = n_embd*n_mem; + + // create the ggml context + { + struct ggml_init_params params = { + /*.mem_size =*/ size_t(n_elements*4+ggml_tensor_overhead()*2), + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ false, + }; + + model.kvctx = ggml_init(params); + if (!model.kvctx) { + fprintf(stderr, "%s: kv ggml_init() failed\n", __func__); + return false; + } + + } + + + model.memory_k = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); + model.memory_v = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); + + const size_t memory_size = ggml_nbytes(model.memory_k) + ggml_nbytes(model.memory_v); + + printf("%s: memory_size = %8.2f MB, n_mem = %" PRId64 "\n", __func__, memory_size/1024.0/1024.0, n_mem); + } + + return true; +} + + +// evaluate the transformer +// +// - model: the model +// - n_threads: number of threads to use +// - n_past: the context size so far +// - embd_inp: the embeddings of the tokens in the context +// - embd_w: the predicted logits for the next token +// +bool falcon_eval( + const falcon_model & model, + const int n_threads, + const int n_past, + const std::vector & embd_inp, + std::vector & embd_w, + size_t & mem_per_token) { + + + const int N = embd_inp.size(); + + const auto & hparams = model.hparams; + + const int n_embd = hparams.n_embd; + const int n_block = hparams.n_block; + const int n_ctx = hparams.n_ctx; + const int n_head = hparams.n_head; + const int n_head_kv = hparams.n_head_kv; + const int n_vocab = hparams.n_vocab; + const size_t head_dim = n_embd / n_head; + + static size_t buf_size = 256u*1024*1024; + static void * buf = malloc(buf_size); + + // use 2 scratch buffers + // TODO: very hacky solution - reimplement in a more elegant way + static size_t scr0_size = 256u*1024*1024; + static void * scr0 = malloc(scr0_size); + + static size_t scr1_size = 256u*1024*1024; + static void * scr1 = malloc(scr1_size); + + if (mem_per_token > 0 && mem_per_token*N > buf_size) { + const size_t buf_size_new = 1.1*(mem_per_token*N); // add 10% to account for ggml object overhead + //printf("\n%s: reallocating buffer from %zu to %zu bytes\n", __func__, buf_size, buf_size_new); + + // reallocate + buf_size = buf_size_new; + buf = realloc(buf, buf_size); + if (buf == nullptr) { + fprintf(stderr, "%s: failed to allocate %zu bytes\n", __func__, buf_size); + return false; + } + } + + struct ggml_init_params params = { + /*.mem_size =*/ buf_size, + /*.mem_buffer =*/ buf, + /*.no_alloc =*/ false, + }; + + struct ggml_context * ctx0 = ggml_init(params); + struct ggml_cgraph gf = {}; +// gf.n_threads = n_threads; + + struct ggml_tensor * embd = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, N); + memcpy(embd->data, embd_inp.data(), N*ggml_element_size(embd)); + + // wte + struct ggml_tensor * inpL = ggml_get_rows(ctx0, model.tok_embeddings, embd); +// struct ggml_tensor* repeat_dummy = ggml_new_tensor_3d(ctx0, inpL->type, head_dim, N + n_past, n_head); + + ggml_type wtype = GGML_TYPE_F32; + const int sizeof_wtype = ggml_type_sizef(wtype); + + for (int il = 0; il < n_block; ++il) { + struct ggml_tensor * cur; + struct ggml_tensor * layernorm_output; + + ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); + + // self-attention + { + layernorm_output = ggml_norm(ctx0, inpL); + + layernorm_output = ggml_add(ctx0, + ggml_mul(ctx0, + ggml_repeat(ctx0, model.blocks[il].input_layernorm, layernorm_output), + layernorm_output), + ggml_repeat(ctx0, model.blocks[il].input_layernorm_b, layernorm_output)); + + if ( hparams.n_head_kv == 8 ) { // Falcon-40B + cur = ggml_norm(ctx0, inpL); + + cur = ggml_add(ctx0, + ggml_mul(ctx0, + ggml_repeat(ctx0, model.blocks[il].attention_norm, cur), + cur), + ggml_repeat(ctx0, model.blocks[il].attention_norm_b, cur)); + } + else { // Falcon 7B + cur = layernorm_output; + } + + // compute QKV + + cur = ggml_mul_mat(ctx0, model.blocks[il].query_key_value, cur); + + // Note that the strides for Kcur, Vcur are set up so that the + // resulting views are misaligned with the tensor's storage + // (by applying the K/V offset we shift the tensor's original + // view to stick out behind the viewed QKV tensor's allocated + // memory, so to say). This is ok because no actual accesses + // happen to that out-of-range memory, but it can require some + // trickery when trying to accurately dump these views for + // debugging. + + struct ggml_tensor * Qcur = ggml_view_3d( + ctx0, cur, head_dim, n_head, N, + head_dim * sizeof_wtype, + head_dim * (n_head + 2 * n_head_kv) * sizeof_wtype, + 0); + + struct ggml_tensor * Kcur = ggml_view_3d( + ctx0, cur, head_dim, n_head_kv, N, + head_dim * sizeof_wtype, + head_dim * (n_head + 2 * n_head_kv) * sizeof_wtype, + head_dim * n_head * sizeof_wtype); + + struct ggml_tensor * Vcur = ggml_view_3d( + ctx0, cur, head_dim, n_head_kv, N, + head_dim * sizeof_wtype, + head_dim * (n_head + 2 * n_head_kv) * sizeof_wtype, + head_dim * (n_head + n_head_kv) * sizeof_wtype); + + // using mode = 2 for neox mode + Qcur = ggml_rope_inplace(ctx0, Qcur, n_past, head_dim, 2, 0); + Kcur = ggml_rope_inplace(ctx0, Kcur, n_past, head_dim, 2, 0); + + // store key and value to memory + { + struct ggml_tensor* k = ggml_view_1d( + ctx0, model.memory_k, N * n_head_kv * head_dim, + (ggml_element_size(model.memory_k) * n_head_kv * head_dim) * + (il * n_ctx + n_past)); + struct ggml_tensor* v = ggml_view_1d( + ctx0, model.memory_v, N * n_head_kv * head_dim, + (ggml_element_size(model.memory_v) * n_head_kv * head_dim) * + (il * n_ctx + n_past)); + + ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Kcur, k)); + ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Vcur, v)); + } + + struct ggml_tensor * K = ggml_permute( + ctx0, + ggml_reshape_3d( + ctx0, + ggml_view_1d(ctx0, model.memory_k, (n_past + N) * n_head_kv * head_dim, + il * n_ctx * + ggml_element_size(model.memory_k) * + n_head_kv * + head_dim), + head_dim, n_head_kv, n_past + N), + 0, 2, 1, 3); + + // K * Q + +// K = ggml_cont(ctx0, ggml_repeat2(ctx0, K, repeat_dummy)); + + struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); + struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); + + // KQ_scaled = KQ / sqrt(n_embd/n_head) + struct ggml_tensor * KQ_scaled = + ggml_scale_inplace(ctx0, + KQ, + ggml_new_f32(ctx0, 1.0f/sqrt(float(head_dim))) + ); + + // KQ_masked = mask_past(KQ_scaled) + struct ggml_tensor * KQ_masked = ggml_diag_mask_inf_inplace(ctx0, KQ_scaled, n_past); + + // KQ = soft_max(KQ_masked) + struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); + + // V_trans = Vmem.view(n_embd/n_head, n_head, n_past + N).permute(1, 2, 0, 3).contiguous() + struct ggml_tensor* V = ggml_permute( + ctx0, + ggml_reshape_3d( + ctx0, + ggml_view_1d(ctx0, model.memory_v, (n_past + N) * n_head_kv * head_dim, + il * n_ctx * + ggml_element_size(model.memory_v) * + n_head_kv * + head_dim), + head_dim, n_head_kv, n_past + N), + 0, 2, 1, 3); + +// V = ggml_cont(ctx0, ggml_transpose(ctx0, ggml_repeat2(ctx0, V, repeat_dummy))); + V = ggml_cont(ctx0, ggml_transpose(ctx0, V)); + + // KQV = transpose(V) * KQ_soft_max + struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); + + // KQV_merged = KQV.permute(0, 2, 1, 3) + struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); + + // cur = KQV_merged.contiguous().view(n_embd, N) + cur = ggml_cpy(ctx0, + KQV_merged, + ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); + + // projection + { + cur = ggml_mul_mat(ctx0, + model.blocks[il].wo, + cur); + } + } + + ggml_set_scratch(ctx0, { 0, scr1_size, scr1, }); + + struct ggml_tensor* inpFF = layernorm_output; + struct ggml_tensor* attn_out = ggml_cpy( + ctx0, cur, ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); + + { + cur = ggml_mul_mat(ctx0, model.blocks[il].ffn_up, inpFF); + cur = ggml_gelu(ctx0, cur); + cur = ggml_mul_mat(ctx0, model.blocks[il].ffn_down, cur); + } + + cur = ggml_add(ctx0, cur, attn_out); + cur = ggml_add(ctx0, cur, inpL); + // input for next layer + inpL = cur; + } + + ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); + + // norm + { + inpL = ggml_norm(ctx0, inpL); + + // inpL = ln_f_g*inpL + ln_f_b + inpL = ggml_add(ctx0, + ggml_mul(ctx0, + ggml_repeat(ctx0, model.output_norm, inpL), + inpL), + ggml_repeat(ctx0, model.output_norm_b, inpL)); + } + + ggml_set_scratch(ctx0, { 0, 0, nullptr, }); + + // lm_head + { + inpL = ggml_mul_mat(ctx0, model.lm_head, inpL); + + //inpL = ggml_add(ctx0, + // ggml_repeat(ctx0, model.lmh_b, inpL), + // inpL); + } + + // logits -> probs + //inpL = ggml_soft_max_inplace(ctx0, inpL); + + // run the computation + ggml_build_forward_expand(&gf, inpL); +// ggml_graph_compute (ctx0, &gf); + ggml_graph_compute_with_ctx(ctx0, &gf, n_threads); + + //if (n_past%100 == 0) { + // ggml_graph_print (&gf); + // ggml_graph_dump_dot(&gf, NULL, "gpt-2.dot"); + //} + + // return result for just the last token + embd_w.resize(n_vocab); + memcpy(embd_w.data(), (float *)ggml_get_data(inpL) + (n_vocab * (N - 1)), sizeof(float) * n_vocab); + + if (mem_per_token == 0) { + mem_per_token = ggml_used_mem(ctx0)/N; + } + //printf("used_mem = %zu\n", ggml_used_mem(ctx0)); + + ggml_free(ctx0); + + return true; +} + +int main(int argc, char ** argv) { + ggml_time_init(); + + const int64_t t_main_start_us = ggml_time_us(); + + gpt_params params; + + if (gpt_params_parse(argc, argv, params) == false) { + return 1; + } + + int64_t t_load_us = 0; + + gpt2bpe_vocab vocab; + falcon_model model; + + // load the model + { + const int64_t t_start_us = ggml_time_us(); + + if (!falcon_model_load(params.model, model, vocab)) { + fprintf(stderr, "%s: failed to load model from '%s'\n", __func__, params.model.c_str()); + return 1; + } + + t_load_us = ggml_time_us() - t_start_us; + + } + + if (params.seed < 0) { + params.seed = time(NULL); + } + + if (params.top_k == 0) { + params.top_k = model.hparams.n_vocab; + } + + printf("%s: seed = %d\n", __func__, params.seed); + printf("%s: temp = %.3f\n", __func__, params.temp); + printf("%s: top_k = %d\n", __func__, params.top_k); + printf("%s: top_p = %.3f\n", __func__, params.top_p); + printf("%s: repeat_last_n = %d\n", __func__, params.repeat_last_n); + printf("%s: repeat_penalty = %.3f\n", __func__, params.repeat_penalty); + + std::mt19937 rng(params.seed); + + if (params.prompt.empty()) { + params.prompt = "Once upon"; + } + + std::vector last_n_tokens(model.hparams.n_ctx); + std::fill(last_n_tokens.begin(), last_n_tokens.end(), 0); + + int n_past = 0; + + int64_t t_sample_us = 0; + int64_t t_predict_us = 0; + + std::vector logits; + + // tokenize the prompt + std::vector embd_inp = gpt2bpe_tokenize(vocab, params.prompt,false, false); + + params.n_predict = std::min(params.n_predict, model.hparams.n_ctx - (int) embd_inp.size()); + + printf("%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); +// for (size_t i = 0; i < embd_inp.size(); i++) { +// printf("%s: token[%zu] = %6d, %s\n", __func__, i, embd_inp[i], vocab.id_to_token[embd_inp[i]].c_str()); +// } + + if( model.hparams.n_ctx < params.n_predict+embd_inp.size() ) { + params.n_predict = model.hparams.n_ctx-embd_inp.size(); + } + + printf("%s: n_predict = %d\n", __func__, params.n_predict); + printf("\n"); + + std::vector embd; + + // determine the required inference memory per token: + size_t mem_per_token = 0; + falcon_eval(model, params.n_threads, 0, { 0, 1, 2, 3 }, logits, mem_per_token); + + for (size_t i = embd.size(); i < embd_inp.size() + params.n_predict; i++) { + // predict + if (embd.size() > 0) { + const int64_t t_start_us = ggml_time_us(); + + if (!falcon_eval(model, params.n_threads, n_past, embd, logits, mem_per_token)) { + printf("Failed to predict\n"); + return 1; + } + + t_predict_us += ggml_time_us() - t_start_us; + } + + n_past += embd.size(); + embd.clear(); + + if (i >= embd_inp.size()) { + // sample next token + const int top_k = params.top_k; + const float top_p = params.top_p; + const float temp = params.temp; + const int repeat_last_n = params.repeat_last_n; + const float repeat_penalty = params.repeat_penalty; + + const int n_vocab = model.hparams.n_vocab; + + gpt2bpe_vocab::id id = 0; + + { + const int64_t t_start_sample_us = ggml_time_us(); + + id = sample_top_k_top_p_repeat(vocab, logits.data() + (logits.size() - n_vocab), last_n_tokens.data(), last_n_tokens.size(), top_k, top_p, temp, repeat_last_n, repeat_penalty, rng); + + last_n_tokens.erase(last_n_tokens.begin()); + last_n_tokens.push_back(id); + + t_sample_us += ggml_time_us() - t_start_sample_us; + } + + // add it to the context + embd.push_back(id); + } else { + // if here, it means we are still processing the input prompt + for (size_t k = i; k < embd_inp.size(); k++) { + embd.push_back(embd_inp[k]); + if (embd.size() > params.n_batch) { + break; + } + } + i += embd.size() - 1; + } + + // display text + for (auto id : embd) { + printf("%s", vocab.id_to_token[id].c_str() ); + } + fflush(stdout); + + // end of text token + if (vocab.special_eos_id != -1 && embd.back() == vocab.special_eos_id) { + break; + } + } + + // report timing + { + const int64_t t_main_end_us = ggml_time_us(); + + printf("\n\n"); + printf("%s: mem per token = %8zu bytes\n", __func__, mem_per_token); + printf("%s: load time = %8.2f ms\n", __func__, t_load_us/1000.0f); + printf("%s: sample time = %8.2f ms\n", __func__, t_sample_us/1000.0f); + printf("%s: predict time = %8.2f ms / %.2f ms per token\n", __func__, t_predict_us/1000.0f, t_predict_us/1000.0f/n_past); + printf("%s: total time = %8.2f ms\n", __func__, (t_main_end_us - t_main_start_us)/1000.0f); + } + + ggml_free(model.ctx); + + return 0; +} diff --git a/examples/gptneox-wip/gptneox-main.cpp b/examples/gptneox-wip/gptneox-main.cpp new file mode 100644 index 000000000..04af50245 --- /dev/null +++ b/examples/gptneox-wip/gptneox-main.cpp @@ -0,0 +1,1082 @@ +#include "ggml.h" +#include "cmpnct_gpt2bpe.hpp" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +// default hparams +struct gpt_neox_hparams { + size_t n_merges = 0; + size_t n_vocab = 0; + uint32_t n_ctx = 0; + uint32_t n_embd = 0; + uint32_t n_head = 0; + uint32_t n_block = 0; + uint32_t n_rot = 0; // rotary_pct * (n_embd / n_head) + bool par_res = true; + float norm_eps = 1e-5; +}; + +struct gpt_neox_block { + // pre normalization + struct ggml_tensor * ln_1_g; + struct ggml_tensor * ln_1_b; + + // attention + struct ggml_tensor * c_attn_attn_w; + struct ggml_tensor * c_attn_attn_b; + + struct ggml_tensor * c_attn_proj_w; + struct ggml_tensor * c_attn_proj_b; + + // post normalization + struct ggml_tensor * ln_2_g; + struct ggml_tensor * ln_2_b; + + // ff + struct ggml_tensor * c_mlp_fc_w; + struct ggml_tensor * c_mlp_fc_b; + + struct ggml_tensor * c_mlp_proj_w; + struct ggml_tensor * c_mlp_proj_b; +}; + +struct gpt_neox_model { + gpt_neox_hparams hparams; + + // normalization + struct ggml_tensor * ln_f_g; + struct ggml_tensor * ln_f_b; + + struct ggml_tensor * wte; // position embedding + + struct ggml_tensor * lmh_g; // language model head + + std::vector blocks; + + // key + value memory + struct ggml_tensor * memory_k; + struct ggml_tensor * memory_v; + + // + struct gguf_context * ggufctx; + struct ggml_context * ctx; + struct ggml_context * kvctx; + + std::map tensors; +}; + +struct gpt_params { + int32_t seed = -1; // RNG seed + int32_t n_threads = std::min(4, (int32_t) std::thread::hardware_concurrency()); + uint32_t n_predict = 200; // new tokens to predict + uint32_t n_batch = 512; // batch size for prompt processing + + // sampling parameters + int32_t top_k = 40; + float top_p = 1.0f; + float temp = 0.8f; + int32_t repeat_last_n = 64; + float repeat_penalty = 1.02f; + + std::string model = ""; // model path + std::string prompt = ""; + + std::string token_test = ""; + bool interactive = false; + int32_t interactive_port = -1; + int32_t n_gpu_layers = 0; +}; + +void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { + fprintf(stderr, "usage: %s [options]\n", argv[0]); + fprintf(stderr, "\n"); + fprintf(stderr, "options:\n"); + fprintf(stderr, " -h, --help show this help message and exit\n"); + fprintf(stderr, " -s SEED, --seed SEED RNG seed (default: -1)\n"); + fprintf(stderr, " -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); + fprintf(stderr, " -ngl N, --gpu-layers N number of layers to offload to GPU on supported models (default: %d)\n", params.n_gpu_layers); + fprintf(stderr, " -p PROMPT, --prompt PROMPT\n"); + fprintf(stderr, " prompt to start generation with (default: random)\n"); + fprintf(stderr, " -f FNAME, --file FNAME\n"); + fprintf(stderr, " load prompt from a file\n"); + fprintf(stderr, " -tt TOKEN_TEST, --token_test TOKEN_TEST\n"); + fprintf(stderr, " test tokenization\n"); + fprintf(stderr, " -n N, --n_predict N number of tokens to predict (default: %d)\n", params.n_predict); + fprintf(stderr, " --top_k N top-k sampling, 0 = n_vocab (default: %d)\n", params.top_k); + fprintf(stderr, " --top_p N top-p sampling (default: %.1f)\n", params.top_p); + fprintf(stderr, " --temp N temperature (default: %.1f)\n", params.temp); + fprintf(stderr, " --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled)\n", params.repeat_last_n); + fprintf(stderr, " --repeat-penalty N penalize repeat sequence of tokens (default: %.2f, 1.0 = disabled)\n", (double)params.repeat_penalty); + fprintf(stderr, " -b N, --batch_size N batch size for prompt processing (default: %d)\n", params.n_batch); + fprintf(stderr, " -m FNAME, --model FNAME\n"); + fprintf(stderr, " model path (default: %s)\n", params.model.c_str()); + fprintf(stderr, "\n"); +} + +// Function to check if the next argument exists +std::string get_next_arg(int& i, int argc, char** argv, const std::string& flag, gpt_params& params) { + if (i + 1 < argc && argv[i + 1][0] != '-') { + return argv[++i]; + } else { + fprintf(stderr, "error: %s requires one argument.\n", flag.c_str()); + gpt_print_usage(argc, argv, params); + exit(0); + } +} + +bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { + for (int i = 1; i < argc; i++) { + std::string arg = argv[i]; + + if (arg == "-s" || arg == "--seed") { + params.seed = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-t" || arg == "--threads") { + params.n_threads = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-ngl" || arg == "--gpu-layers" || arg == "--n-gpu-layers") { + params.n_gpu_layers = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-p" || arg == "--prompt") { + params.prompt = get_next_arg(i, argc, argv, arg, params); + } else if (arg == "-n" || arg == "--n_predict") { + params.n_predict = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--top_k") { + params.top_k = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--top_p") { + params.top_p = std::stof(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--temp") { + params.temp = std::stof(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--repeat-last-n") { + params.repeat_last_n = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "--repeat-penalty") { + params.repeat_penalty = std::stof(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-b" || arg == "--batch_size") { + params.n_batch= std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-m" || arg == "--model") { + params.model = get_next_arg(i, argc, argv, arg, params); + } else if (arg == "-i" || arg == "--interactive") { + params.interactive = true; + } else if (arg == "-ip" || arg == "--interactive-port") { + params.interactive = true; + params.interactive_port = std::stoi(get_next_arg(i, argc, argv, arg, params)); + } else if (arg == "-h" || arg == "--help") { + gpt_print_usage(argc, argv, params); + exit(0); + } else if (arg == "-f" || arg == "--file") { + get_next_arg(i, argc, argv, arg, params); + std::ifstream file(argv[i]); + if (!file) { + fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); + break; + } + std::copy(std::istreambuf_iterator(file), std::istreambuf_iterator(), back_inserter(params.prompt)); + if (params.prompt.back() == '\n') { + params.prompt.pop_back(); + } + } else if (arg == "-tt" || arg == "--token_test") { + params.token_test = get_next_arg(i, argc, argv, arg, params); + } + else { + fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); + gpt_print_usage(argc, argv, params); + exit(0); + } + } + + return true; +} + +gpt2bpe_vocab::id sample_top_k_top_p_repeat( + const gpt2bpe_vocab & vocab, + const float * logits, + const int32_t * last_n_tokens_data, + size_t last_n_tokens_data_size, + int top_k, + double top_p, + double temp, + int repeat_last_n, + float repeat_penalty, + std::mt19937 & rng) { + + int n_logits = vocab.id_to_token.size(); + + const auto * plogits = logits; + + const auto last_n_tokens = std::vector(last_n_tokens_data, last_n_tokens_data + last_n_tokens_data_size); + + if (temp <= 0) { + // select the token with the highest logit directly + float max_logit = plogits[0]; + gpt2bpe_vocab::id max_id = 0; + + for (int i = 1; i < n_logits; ++i) { + if (plogits[i] > max_logit) { + max_logit = plogits[i]; + max_id = i; + } + } + return max_id; + } + + + std::vector> logits_id; + logits_id.reserve(n_logits); + + { + const float scale = 1.0f/temp; + for (int i = 0; i < n_logits; ++i) { + // repetition penalty from ctrl paper (https://arxiv.org/abs/1909.05858) + // credit https://github.com/facebookresearch/llama/compare/main...shawwn:llama:main + if (repeat_last_n > 0 && std::find(last_n_tokens.end()-repeat_last_n, last_n_tokens.end(), i) != last_n_tokens.end()) { + // if score < 0 then repetition penalty has to multiplied to reduce the previous token probability + if (plogits[i] < 0.0f) { + logits_id.push_back(std::make_pair(plogits[i]*scale*repeat_penalty, i)); + } else { + logits_id.push_back(std::make_pair(plogits[i]*scale/repeat_penalty, i)); + } + } else { + logits_id.push_back(std::make_pair(plogits[i]*scale, i)); + } + } + } + + // find the top K tokens + std::partial_sort( + logits_id.begin(), + logits_id.begin() + top_k, logits_id.end(), + [](const std::pair & a, const std::pair & b) { + return a.first > b.first; + }); + + logits_id.resize(top_k); + + double maxl = -INFINITY; + for (const auto & kv : logits_id) { + maxl = std::max(maxl, kv.first); + } + + // compute probs for the top K tokens + std::vector probs; + probs.reserve(logits_id.size()); + + double sum = 0.0; + for (const auto & kv : logits_id) { + double p = exp(kv.first - maxl); + probs.push_back(p); + sum += p; + } + + // normalize the probs + for (auto & p : probs) { + p /= sum; + } + + if (top_p < 1.0f) { + double cumsum = 0.0f; + for (int i = 0; i < top_k; i++) { + cumsum += probs[i]; + if (cumsum >= top_p) { + top_k = i + 1; + probs.resize(top_k); + logits_id.resize(top_k); + break; + } + } + + cumsum = 1.0/cumsum; + for (int i = 0; i < (int) probs.size(); i++) { + probs[i] *= cumsum; + } + } + +// printf("\n"); +// for (int i = 0; i < (int) probs.size(); i++) { +// for (int i = 0; i < 10; i++) { +// printf("%d: '%s' %f\n", i, vocab.id_to_token.at(logits_id[i].second).c_str(), probs[i]); +// } + + std::discrete_distribution<> dist(probs.begin(), probs.end()); + int idx = dist(rng); + + return logits_id[idx].second; + +} + +struct ggml_tensor * get_tensor_ex( struct ggml_context * ctx, std::string name){ + + struct ggml_tensor * cur = ggml_get_tensor(ctx, name.c_str()); + if( cur == NULL ) { + fprintf(stdout, "%s: tensor '%s' not found!\n", __func__, name.c_str()); + } else { +// fprintf(stdout, "%s: n_dims = %d, name = '%s'\n", __func__, cur->n_dims, cur->name); + } + + return cur; +} + +// load the model's weights from a file +bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2bpe_vocab & vocab) { + printf("%s: loading model from '%s'..\n", __func__, fname.c_str()); + + model.ctx = NULL; + + struct gguf_init_params ggufparams = { + /*.no_alloc = */ false, + /*.ctx = */ &model.ctx, + }; + + auto & ggufctx = model.ggufctx; + + ggufctx = gguf_init_from_file(fname.c_str(), ggufparams); + + if (!ggufctx) { + fprintf(stderr, "%s: gguf_init_from_file() failed\n", __func__); + return false; + } + + fprintf(stdout, "%s: gguf version = %d\n", __func__, gguf_get_version(ggufctx)); + fprintf(stdout, "%s: gguf alignment = %zu\n", __func__, gguf_get_alignment(ggufctx)); + fprintf(stdout, "%s: gguf data offset = %zu\n", __func__, gguf_get_data_offset(ggufctx)); + + // print all kv + #if 0 + { + const int n_kv = gguf_get_n_kv(ggufctx); + + fprintf(stdout, "%s: n_kv: %d\n", __func__, n_kv); + + for (int i = 0; i < n_kv; ++i) { + const char * key = gguf_get_key(ggufctx, i); + + fprintf(stdout, "%s: kv[%d]: key = %s\n", __func__, i, key); + } + } + #endif + + // print some standard metadata + { + int keyidx; + + keyidx = gguf_find_key(ggufctx, "general.name"); + if (keyidx != -1) { fprintf(stdout, "%s: model name = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.description"); + if (keyidx != -1) { fprintf(stdout, "%s: model description = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.author"); + if (keyidx != -1) { fprintf(stdout, "%s: model author = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.license"); + if (keyidx != -1) { fprintf(stdout, "%s: model license = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.architecture"); + if (keyidx != -1) { fprintf(stdout, "%s: model architecture = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.file_type"); + if (keyidx != -1) { fprintf(stdout, "%s: model file type = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "gptneox.tensor_data_layout"); + if (keyidx != -1) { fprintf(stdout, "%s: model data layout = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + keyidx = gguf_find_key(ggufctx, "general.source.hugginface.repository"); + if (keyidx != -1) { fprintf(stdout, "%s: model source HF repo = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } + } + + // check required metadata + { + int keyidx; + + // check model architecture kv + keyidx = gguf_find_key(ggufctx, "general.architecture"); + if (keyidx != -1) { + if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "gptneox") != 0) { + fprintf(stdout, "%s: model architecture not supported!\n", __func__); + return false; + } + } else { + fprintf(stdout, "%s: gguf model architecture not found!\n", __func__); + return false; + } + + } + + // load hparams + { + auto & hparams = model.hparams; + + bool ok = true; + int keyidx; + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.context_length"); + if (keyidx != -1) { hparams.n_ctx = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.embedding_length"); + if (keyidx != -1) { hparams.n_embd = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.attention.head_count"); + if (keyidx != -1) { hparams.n_head = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.block_count"); + if (keyidx != -1) { hparams.n_block = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.rope.dimension_count"); + if (keyidx != -1) { hparams.n_rot = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.use_parallel_residual"); + if (keyidx != -1) { hparams.par_res = gguf_get_val_bool(ggufctx, keyidx); } else { ok = false; } } + + if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.attention.layer_norm_epsilon"); + if (keyidx != -1) { hparams.norm_eps= gguf_get_val_f32(ggufctx, keyidx); } else { ok = false; } } + + if (!ok) { + fprintf(stderr, "%s: required hparam missing!\n", __func__); + return false; + } + + printf("%s: n_ctx = %d\n", __func__, hparams.n_ctx); + printf("%s: n_embd = %d\n", __func__, hparams.n_embd); + printf("%s: n_head = %d\n", __func__, hparams.n_head); + printf("%s: n_block = %d\n", __func__, hparams.n_block); + printf("%s: n_rot = %d\n", __func__, hparams.n_rot); + printf("%s: par_res = %d\n", __func__, hparams.par_res); + printf("%s: norm_eps = %g\n", __func__, hparams.norm_eps); + + } + + // load vocab + { + auto & hparams = model.hparams; + + int keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.model"); + + if (keyidx != -1) { + if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "gpt2") != 0) { + fprintf(stdout, "%s: tokenizer model not supported!\n", __func__); + return false; + } + } else { + fprintf(stdout, "%s: tokenizer model not found!\n", __func__); + return false; + } + + + int tokens_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.tokens"); + + if (tokens_keyidx == -1) { + fprintf(stdout, "%s: gpt2 tokenizer vocab not found!\n", __func__); + return false; + } + + int merges_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.merges"); + + if (merges_keyidx == -1) { + fprintf(stdout, "%s: gpt2 tokenizer merges not found!\n", __func__); + return false; + } + + hparams.n_vocab = gguf_get_arr_n(ggufctx,tokens_keyidx); + hparams.n_merges = gguf_get_arr_n(ggufctx,merges_keyidx); + + fprintf(stdout, "%s: gpt2 tokenizer vocab = %zu\n", __func__, hparams.n_vocab); + fprintf(stdout, "%s: gpt2 tokenizer merges = %zu\n", __func__, hparams.n_merges); + + for (size_t i = 0; i < hparams.n_vocab; i++) { + std::string word = gguf_get_arr_str(ggufctx, tokens_keyidx, i); + +// printf("token %d = '%s'\n",i,word.c_str() ); + + vocab.token_to_id[word] = i; + vocab.id_to_token[i] = word; + + if( vocab.id_to_token[i] == "\n" ) { + vocab.linefeed_id = i; + } + } + + std::vector> bpe_merges; + + for (size_t i = 0; i < hparams.n_merges; i++) { + + std::string word = gguf_get_arr_str(ggufctx, merges_keyidx, i); + + // Split the merges + std::string first, second; + size_t pos = word.find(' ', 1); // Start the search from the second character + if (pos != std::string::npos) { + first = word.substr(0, pos); + second = word.substr(pos + 1); + } + + bpe_merges.push_back(std::make_pair(first, second)); + } + + vocab.populate_bpe_ranks(bpe_merges); + + + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.bos_token_id"); if( keyidx != -1 ) { vocab.special_bos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.eos_token_id"); if( keyidx != -1 ) { vocab.special_eos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.unknown_token_id"); if( keyidx != -1 ) { vocab.special_unk_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.separator_token_id"); if( keyidx != -1 ) { vocab.special_sep_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.padding_token_id"); if( keyidx != -1 ) { vocab.special_pad_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } + + if( vocab.special_bos_id != -1 ) { fprintf(stdout, "%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].c_str() ); } + if( vocab.special_eos_id != -1 ) { fprintf(stdout, "%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].c_str() ); } + if( vocab.special_unk_id != -1 ) { fprintf(stdout, "%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].c_str() ); } + if( vocab.special_sep_id != -1 ) { fprintf(stdout, "%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].c_str() ); } + if( vocab.special_pad_id != -1 ) { fprintf(stdout, "%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].c_str() ); } + if( vocab.linefeed_id != -1 ) { fprintf(stdout, "%s: LF token = %d\n", __func__, vocab.linefeed_id ); } + } + + + auto & ctx = model.ctx; + size_t ctx_size = ggml_get_mem_size(ctx); + + printf("%s: ggml ctx size = %6.2f MB\n", __func__, ctx_size/(1024.0*1024.0)); + + // print tensor info + #if 0 + { + const int n_tensors = gguf_get_n_tensors(ggufctx); + + fprintf(stdout, "%s: n_tensors: %d\n", __func__, n_tensors); + + for (int i = 0; i < n_tensors; ++i) { + const char * name = gguf_get_tensor_name (ggufctx, i); + const size_t offset = gguf_get_tensor_offset(ggufctx, i); + + fprintf(stdout, "%s: tensor[%d]: name = %s, offset = %zu\n", __func__, i, name, offset); + } + } + #endif + + // prepare memory for the weights + { + const int n_block = model.hparams.n_block; + + model.blocks.resize(n_block); + + model.wte = ggml_get_tensor(ctx, "token_embd.weight"); + model.ln_f_g = ggml_get_tensor(ctx, "output_norm.weight"); + model.ln_f_b = ggml_get_tensor(ctx, "output_norm.bias"); + model.lmh_g = ggml_get_tensor(ctx, "output.weight"); + + // map by name + model.tensors["token_embd.weight"] = model.wte; + model.tensors["output_norm.weight"] = model.ln_f_g; + model.tensors["output_norm.bias"] = model.ln_f_b; + model.tensors["output.weight"] = model.lmh_g; + + for (int i = 0; i < n_block; ++i) { + auto & block = model.blocks[i]; + + std::string blocknamestart = "blk." + std::to_string(i) + "."; + + block.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); + block.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); + + block.c_attn_attn_w = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); + block.c_attn_attn_b = get_tensor_ex(ctx ,blocknamestart + "attn_qkv.bias" ); + + block.c_attn_proj_w = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); + block.c_attn_proj_b = get_tensor_ex(ctx, blocknamestart + "attn_output.bias" ); + + block.ln_2_g = get_tensor_ex(ctx, blocknamestart + "ffn_norm.weight" ); + block.ln_2_b = get_tensor_ex(ctx, blocknamestart + "ffn_norm.bias"); + + block.c_mlp_fc_w = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); + block.c_mlp_fc_b = get_tensor_ex(ctx, blocknamestart + "ffn_up.bias" ); + + block.c_mlp_proj_w = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); + block.c_mlp_proj_b = get_tensor_ex(ctx, blocknamestart + "ffn_down.bias" ); + + // map by name + model.tensors[blocknamestart + "attn_norm.weight"] = block.ln_1_g; + model.tensors[blocknamestart + "attn_norm.bias"] = block.ln_1_b; + + model.tensors[blocknamestart + "attn_qkv.weight"] = block.c_attn_attn_w; + model.tensors[blocknamestart + "attn_qkv.bias"] = block.c_attn_attn_b; + + model.tensors[blocknamestart + "attn_output.weight"] = block.c_attn_proj_w; + model.tensors[blocknamestart + "attn_output.bias"] = block.c_attn_proj_b; + + model.tensors[blocknamestart + "ffn_norm.weight"] = block.ln_2_g; + model.tensors[blocknamestart + "ffn_norm.bias"] = block.ln_2_b; + + model.tensors[blocknamestart + "ffn_up.weight"] = block.c_mlp_fc_w; + model.tensors[blocknamestart + "ffn_up.bias"] = block.c_mlp_fc_b; + + model.tensors[blocknamestart + "ffn_down.weight"] = block.c_mlp_proj_w; + model.tensors[blocknamestart + "ffn_down.bias"] = block.c_mlp_proj_b; + } + } + + // key + value memory + { + const auto & kvctx = model.kvctx; + const auto & hparams = model.hparams; + + const int n_embd = hparams.n_embd; + const int n_block = hparams.n_block; + const int n_ctx = hparams.n_ctx; + + const int64_t n_mem = n_block*n_ctx; + const int64_t n_elements = n_embd*n_mem; + + // create the ggml context + { + struct ggml_init_params params = { + /*.mem_size =*/ size_t(n_elements*4+ggml_tensor_overhead()*2), + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ false, + }; + + model.kvctx = ggml_init(params); + if (!model.kvctx) { + fprintf(stderr, "%s: kv ggml_init() failed\n", __func__); + return false; + } + + } + + + model.memory_k = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); + model.memory_v = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); + + const size_t memory_size = ggml_nbytes(model.memory_k) + ggml_nbytes(model.memory_v); + + printf("%s: memory_size = %8.2f MB, n_mem = %" PRId64 "\n", __func__, memory_size/1024.0/1024.0, n_mem); + } + + return true; +} + + +// feed-forward network +ggml_tensor * gpt_neox_ff( + const gpt_neox_block &block, + ggml_context * ctx0, + ggml_tensor * inp) { + + ggml_tensor * cur = ggml_norm(ctx0, inp); + + cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, block.ln_2_g, cur), cur), ggml_repeat(ctx0, block.ln_2_b, cur)); + cur = ggml_mul_mat(ctx0, block.c_mlp_fc_w, cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, block.c_mlp_fc_b, cur), cur); + + // GELU activation + cur = ggml_gelu(ctx0, cur); + + // projection + // cur = proj_w*cur + proj_b + cur = ggml_mul_mat(ctx0, block.c_mlp_proj_w, cur); + + cur = ggml_add(ctx0, ggml_repeat(ctx0, block.c_mlp_proj_b, cur), cur); + return cur; +} + +// evaluate the transformer +// +// - model: the model +// - n_threads: number of threads to use +// - n_past: the context size so far +// - embd_inp: the embeddings of the tokens in the context +// - embd_w: the predicted logits for the next token +// +bool gpt_neox_eval( + const gpt_neox_model & model, + const int n_threads, + const int n_past, + const std::vector & embd_inp, + std::vector & embd_w, + size_t & mem_per_token) { + const int N = embd_inp.size(); + + const auto & hparams = model.hparams; + + const int n_embd = hparams.n_embd; + const int n_block = hparams.n_block; + const int n_ctx = hparams.n_ctx; + const int n_head = hparams.n_head; + const int n_vocab = hparams.n_vocab; + const int n_rot = hparams.n_rot; + + static size_t buf_size = 256u*1024*1024; + static void * buf = malloc(buf_size); + + // use 2 scratch buffers + // TODO: very hacky solution - reimplement in a more elegant way + static size_t scr0_size = 256u*1024*1024; + static void * scr0 = malloc(scr0_size); + + static size_t scr1_size = 256u*1024*1024; + static void * scr1 = malloc(scr1_size); + + if (mem_per_token > 0 && mem_per_token*N > buf_size) { + const size_t buf_size_new = 1.1*(mem_per_token*N); // add 10% to account for ggml object overhead + //printf("\n%s: reallocating buffer from %zu to %zu bytes\n", __func__, buf_size, buf_size_new); + + // reallocate + buf_size = buf_size_new; + buf = realloc(buf, buf_size); + if (buf == nullptr) { + fprintf(stderr, "%s: failed to allocate %zu bytes\n", __func__, buf_size); + return false; + } + } + + struct ggml_init_params params = { + /*.mem_size =*/ buf_size, + /*.mem_buffer =*/ buf, + /*.no_alloc =*/ false, + }; + + struct ggml_context * ctx0 = ggml_init(params); + struct ggml_cgraph gf = {}; + + struct ggml_tensor * embd = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, N); + memcpy(embd->data, embd_inp.data(), N*ggml_element_size(embd)); + + + // wte + struct ggml_tensor * inpL = ggml_get_rows(ctx0, model.wte, embd); + + for (int il = 0; il < n_block; ++il) { + struct ggml_tensor * cur; + + ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); + + // self-attention + { + { + cur = ggml_norm(ctx0, inpL); + + cur = ggml_add(ctx0, + ggml_mul(ctx0, ggml_repeat(ctx0, model.blocks[il].ln_1_g, cur), cur), + ggml_repeat(ctx0, model.blocks[il].ln_1_b, cur)); + } + + // compute QKV + { + + cur = ggml_mul_mat(ctx0, model.blocks[il].c_attn_attn_w, cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, model.blocks[il].c_attn_attn_b, cur), cur); + } + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 0*sizeof(float)*n_embd/n_head)); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 1*sizeof(float)*n_embd/n_head)); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 2*sizeof(float)*n_embd/n_head)); + + // using mode = 2 for GPT-NeoX mode + Qcur = ggml_rope_inplace(ctx0, Qcur, n_past, n_rot, 2, 0); + Kcur = ggml_rope_inplace(ctx0, Kcur, n_past, n_rot, 2, 0); + + // store key and value to memory + { + Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, Vcur, n_embd, N)); + + struct ggml_tensor * k = ggml_view_1d(ctx0, model.memory_k, N*n_embd, (ggml_element_size(model.memory_k)*n_embd)*(il*n_ctx + n_past)); + struct ggml_tensor * v = ggml_view_2d(ctx0, model.memory_v, N, n_embd, + ( n_ctx)*ggml_element_size(model.memory_v), + (il*n_ctx)*ggml_element_size(model.memory_v)*n_embd + n_past*ggml_element_size(model.memory_v)); + + ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Kcur, k)); + ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Vcur, v)); + } + + // Q = Qcur.contiguous().view(n_embd/n_head, n_head, N).permute(0, 2, 1, 3) + struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); + + // K = Kmem.view(n_embd/n_head, n_head, n_past + N).permute(0, 2, 1, 3) + struct ggml_tensor * K = + ggml_permute(ctx0, + ggml_reshape_3d(ctx0, + ggml_view_1d(ctx0, model.memory_k, (n_past + N)*n_embd, il*n_ctx*ggml_element_size(model.memory_k)*n_embd), + n_embd/n_head, n_head, n_past + N), + 0, 2, 1, 3); + + // K * Q + struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); + + // KQ_scaled = KQ / sqrt(n_embd/n_head) + struct ggml_tensor * KQ_scaled = + ggml_scale_inplace(ctx0, + KQ, + ggml_new_f32(ctx0, 1.0f/sqrt(float(n_embd)/n_head)) + ); + + // KQ_masked = mask_past(KQ_scaled) + struct ggml_tensor * KQ_masked = ggml_diag_mask_inf_inplace(ctx0, KQ_scaled, n_past); + + // KQ = soft_max(KQ_masked) + struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); + + // V_trans = Vmem.view(n_embd/n_head, n_head, n_past + N).permute(1, 2, 0, 3).contiguous() + struct ggml_tensor * V = + ggml_view_3d(ctx0, model.memory_v, + n_past + N, n_embd/n_head, n_head, + n_ctx*ggml_element_size(model.memory_v), + n_ctx*ggml_element_size(model.memory_v)*n_embd/n_head, + il*n_ctx*ggml_element_size(model.memory_v)*n_embd); + + // KQV = transpose(V) * KQ_soft_max + struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); + + // KQV_merged = KQV.permute(0, 2, 1, 3) + struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); + + // cur = KQV_merged.contiguous().view(n_embd, N) + cur = ggml_cpy(ctx0, KQV_merged, ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); + + // projection + { + cur = ggml_mul_mat(ctx0, model.blocks[il].c_attn_proj_w, cur); + cur = ggml_add(ctx0, ggml_repeat(ctx0, model.blocks[il].c_attn_proj_b, cur), cur); + } + } + + ggml_set_scratch(ctx0, { 0, scr1_size, scr1, }); + + if (hparams.par_res == 0) { + struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpL); + + cur = gpt_neox_ff(model.blocks[il], ctx0, inpFF); + + // input for next layer + inpL = ggml_add(ctx0, cur, inpFF); + } else { + struct ggml_tensor * inpFF = cur; + + // this is independent of the self-attention result, so it could be done in parallel to the self-attention + // note here we pass inpL instead of cur + cur = gpt_neox_ff(model.blocks[il], ctx0, inpL); + + // layer input + FF + cur = ggml_add(ctx0, cur, inpFF); + + // input for next layer + inpL = ggml_add(ctx0, cur, inpL); + } + } + + ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); + + // norm + { + inpL = ggml_norm(ctx0, inpL); + + // inpL = ln_f_g*inpL + ln_f_b + inpL = ggml_add(ctx0, + ggml_mul(ctx0, + ggml_repeat(ctx0, model.ln_f_g, inpL), + inpL), + ggml_repeat(ctx0, model.ln_f_b, inpL)); + } + + ggml_set_scratch(ctx0, { 0, 0, nullptr, }); + + // lm_head + { + inpL = ggml_mul_mat(ctx0, model.lmh_g, inpL); + + //inpL = ggml_add(ctx0, + // ggml_repeat(ctx0, model.lmh_b, inpL), + // inpL); + } + + // logits -> probs + //inpL = ggml_soft_max_inplace(ctx0, inpL); + + // run the computation + ggml_build_forward_expand(&gf, inpL); + ggml_graph_compute_with_ctx(ctx0, &gf, n_threads); + + //if (n_past%100 == 0) { + // ggml_graph_print (&gf); + // ggml_graph_dump_dot(&gf, NULL, "gpt-2.dot"); + //} + + //embd_w.resize(n_vocab*N); + //memcpy(embd_w.data(), ggml_get_data(inpL), sizeof(float)*n_vocab*N); + + // return result for just the last token + embd_w.resize(n_vocab); + memcpy(embd_w.data(), (float *) ggml_get_data(inpL) + (n_vocab*(N-1)), sizeof(float)*n_vocab); + + if (mem_per_token == 0) { + mem_per_token = ggml_used_mem(ctx0)/N; + } + //printf("used_mem = %zu\n", ggml_used_mem(ctx0)); + + ggml_free(ctx0); + + return true; +} + +int main(int argc, char ** argv) { + ggml_time_init(); + + const int64_t t_main_start_us = ggml_time_us(); + + gpt_params params; + + if (gpt_params_parse(argc, argv, params) == false) { + return 1; + } + + int64_t t_load_us = 0; + + gpt2bpe_vocab vocab; + gpt_neox_model model; + + // load the model + { + const int64_t t_start_us = ggml_time_us(); + + if (!gpt_neox_model_load(params.model, model, vocab)) { + fprintf(stderr, "%s: failed to load model from '%s'\n", __func__, params.model.c_str()); + return 1; + } + + t_load_us = ggml_time_us() - t_start_us; + + } + + if (params.seed < 0) { + params.seed = time(NULL); + } + + if (params.top_k == 0) { + params.top_k = model.hparams.n_vocab; + } + + printf("%s: seed = %d\n", __func__, params.seed); + printf("%s: temp = %.3f\n", __func__, params.temp); + printf("%s: top_k = %d\n", __func__, params.top_k); + printf("%s: top_p = %.3f\n", __func__, params.top_p); + printf("%s: repeat_last_n = %d\n", __func__, params.repeat_last_n); + printf("%s: repeat_penalty = %.3f\n", __func__, params.repeat_penalty); + + std::mt19937 rng(params.seed); + + if (params.prompt.empty()) { + params.prompt = "Once upon"; + } + + std::vector last_n_tokens(model.hparams.n_ctx); + std::fill(last_n_tokens.begin(), last_n_tokens.end(), 0); + + int n_past = 0; + + int64_t t_sample_us = 0; + int64_t t_predict_us = 0; + + std::vector logits; + + // tokenize the prompt + std::vector embd_inp = gpt2bpe_tokenize(vocab, params.prompt,false, false); + + params.n_predict = std::min(params.n_predict, model.hparams.n_ctx - (int) embd_inp.size()); + + printf("%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); +// for (size_t i = 0; i < embd_inp.size(); i++) { +// printf("%s: token[%zu] = %6d, %s\n", __func__, i, embd_inp[i], vocab.id_to_token[embd_inp[i]].c_str()); +// } + + if( model.hparams.n_ctx < params.n_predict+embd_inp.size() ) { + params.n_predict = model.hparams.n_ctx-embd_inp.size(); + } + + printf("%s: n_predict = %d\n", __func__, params.n_predict); + printf("\n"); + + std::vector embd; + + // determine the required inference memory per token: + size_t mem_per_token = 0; + gpt_neox_eval(model, params.n_threads, 0, { 0, 1, 2, 3 }, logits, mem_per_token); + + for (size_t i = embd.size(); i < embd_inp.size() + params.n_predict; i++) { + // predict + if (embd.size() > 0) { + const int64_t t_start_us = ggml_time_us(); + + if (!gpt_neox_eval(model, params.n_threads, n_past, embd, logits, mem_per_token)) { + printf("Failed to predict\n"); + return 1; + } + + t_predict_us += ggml_time_us() - t_start_us; + } + + n_past += embd.size(); + embd.clear(); + + if (i >= embd_inp.size()) { + // sample next token + const int top_k = params.top_k; + const float top_p = params.top_p; + const float temp = params.temp; + const int repeat_last_n = params.repeat_last_n; + const float repeat_penalty = params.repeat_penalty; + + const int n_vocab = model.hparams.n_vocab; + + gpt2bpe_vocab::id id = 0; + + { + const int64_t t_start_sample_us = ggml_time_us(); + + id = sample_top_k_top_p_repeat(vocab, logits.data() + (logits.size() - n_vocab), last_n_tokens.data(), last_n_tokens.size(), top_k, top_p, temp, repeat_last_n, repeat_penalty, rng); + + last_n_tokens.erase(last_n_tokens.begin()); + last_n_tokens.push_back(id); + + t_sample_us += ggml_time_us() - t_start_sample_us; + } + + // add it to the context + embd.push_back(id); + } else { + // if here, it means we are still processing the input prompt + for (size_t k = i; k < embd_inp.size(); k++) { + embd.push_back(embd_inp[k]); + if (embd.size() > params.n_batch) { + break; + } + } + i += embd.size() - 1; + } + + // display text + for (auto id : embd) { + printf("%s", vocab.id_to_token[id].c_str() ); + } + fflush(stdout); + + // end of text token + if (vocab.special_eos_id != -1 && embd.back() == vocab.special_eos_id) { + break; + } + } + + // report timing + { + const int64_t t_main_end_us = ggml_time_us(); + + printf("\n\n"); + printf("%s: mem per token = %8zu bytes\n", __func__, mem_per_token); + printf("%s: load time = %8.2f ms\n", __func__, t_load_us/1000.0f); + printf("%s: sample time = %8.2f ms\n", __func__, t_sample_us/1000.0f); + printf("%s: predict time = %8.2f ms / %.2f ms per token\n", __func__, t_predict_us/1000.0f, t_predict_us/1000.0f/n_past); + printf("%s: total time = %8.2f ms\n", __func__, (t_main_end_us - t_main_start_us)/1000.0f); + } + + ggml_free(model.ctx); + + return 0; +} diff --git a/examples/jeopardy/graph.py b/examples/jeopardy/graph.py old mode 100644 new mode 100755 index 1b6c54bff..8bc0706b8 --- a/examples/jeopardy/graph.py +++ b/examples/jeopardy/graph.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import matplotlib.pyplot as plt import os import csv diff --git a/examples/jeopardy/jeopardy.sh b/examples/jeopardy/jeopardy.sh old mode 100644 new mode 100755 diff --git a/examples/json-schema-to-grammar.py b/examples/json-schema-to-grammar.py old mode 100644 new mode 100755 index 2dccc118a..2a4cb65bc --- a/examples/json-schema-to-grammar.py +++ b/examples/json-schema-to-grammar.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import argparse import json import re diff --git a/examples/llama-bench/CMakeLists.txt b/examples/llama-bench/CMakeLists.txt new file mode 100644 index 000000000..7e395afd0 --- /dev/null +++ b/examples/llama-bench/CMakeLists.txt @@ -0,0 +1,8 @@ +set(TARGET llama-bench) +add_executable(${TARGET} llama-bench.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) +if(TARGET BUILD_INFO) + add_dependencies(${TARGET} BUILD_INFO) +endif() diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp new file mode 100755 index 000000000..36057bfca --- /dev/null +++ b/examples/llama-bench/llama-bench.cpp @@ -0,0 +1,969 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "ggml.h" +#include "llama.h" +#include "common.h" +#include "build-info.h" +#ifdef GGML_USE_CUBLAS +#include "ggml-cuda.h" +#endif + +// utils +static uint64_t get_time_ns() { + using clock = std::chrono::high_resolution_clock; + return std::chrono::nanoseconds(clock::now().time_since_epoch()).count(); +} + +template +static std::string join(const std::vector & values, const std::string & delim) { + std::ostringstream str; + for (size_t i = 0; i < values.size(); i++) { + str << values[i]; + if (i < values.size() - 1) { + str << delim; + } + } + return str.str(); +} + +template +static std::vector split(const std::string & str, char delim) { + std::vector values; + std::istringstream str_stream(str); + std::string token; + while (std::getline(str_stream, token, delim)) { + T value; + std::istringstream token_stream(token); + token_stream >> value; + values.push_back(value); + } + return values; +} + +template +static T avg(const std::vector & v) { + if (v.empty()) { + return 0; + } + T sum = std::accumulate(v.begin(), v.end(), T(0)); + return sum / (T)v.size(); +} + +template +static T stdev(const std::vector & v) { + if (v.size() <= 1) { + return 0; + } + T mean = avg(v); + T sq_sum = std::inner_product(v.begin(), v.end(), v.begin(), T(0)); + T stdev = std::sqrt(sq_sum / (T)(v.size() - 1) - mean * mean * (T)v.size() / (T)(v.size() - 1)); + return stdev; +} + +static bool ggml_cpu_has_metal() { +#if defined(GGML_USE_METAL) + return true; +#else + return false; +#endif +} + +static std::string get_cpu_info() { + std::string id; +#ifdef __linux__ + FILE * f = fopen("/proc/cpuinfo", "r"); + if (f) { + char buf[1024]; + while (fgets(buf, sizeof(buf), f)) { + if (strncmp(buf, "model name", 10) == 0) { + char * p = strchr(buf, ':'); + if (p) { + p++; + while (std::isspace(*p)) { + p++; + } + while (std::isspace(p[strlen(p) - 1])) { + p[strlen(p) - 1] = '\0'; + } + id = p; + break; + } + } + } + } +#endif + // TODO: other platforms + return id; +} + +static std::string get_gpu_info() { + std::string id; +#ifdef GGML_USE_CUBLAS + int count = ggml_cuda_get_device_count(); + for (int i = 0; i < count; i++) { + char buf[128]; + ggml_cuda_get_device_description(i, buf, sizeof(buf)); + id += buf; + if (i < count - 1) { + id += "/"; + } + } +#endif + // TODO: other backends + return id; +} + +// command line params +enum output_formats {CSV, JSON, MARKDOWN, SQL}; + +struct cmd_params { + std::vector model; + std::vector n_prompt; + std::vector n_gen; + std::vector n_batch; + std::vector f32_kv; + std::vector n_threads; + std::vector n_gpu_layers; + std::vector main_gpu; + std::vector mul_mat_q; + std::vector low_vram; + std::vector> tensor_split; + int reps; + bool verbose; + output_formats output_format; +}; + +static const cmd_params cmd_params_defaults = { + /* model */ {"models/7B/ggml-model-q4_0.gguf"}, + /* n_prompt */ {512}, + /* n_gen */ {128}, + /* n_batch */ {512}, + /* f32_kv */ {false}, + /* n_threads */ {get_num_physical_cores()}, + /* n_gpu_layers */ {99}, + /* main_gpu */ {0}, + /* mul_mat_q */ {true}, + /* low_vram */ {false}, + /* tensor_split */ {{}}, + /* reps */ 5, + /* verbose */ false, + /* output_format */ MARKDOWN +}; + +static void print_usage(int /* argc */, char ** argv) { + fprintf(stdout, "usage: %s [options]\n", argv[0]); + fprintf(stdout, "\n"); + fprintf(stdout, "options:\n"); + fprintf(stdout, " -h, --help\n"); + fprintf(stdout, " -m, --model (default: %s)\n", join(cmd_params_defaults.model, ",").c_str()); + fprintf(stdout, " -p, --n-prompt (default: %s)\n", join(cmd_params_defaults.n_prompt, ",").c_str()); + fprintf(stdout, " -n, --n-gen (default: %s)\n", join(cmd_params_defaults.n_gen, ",").c_str()); + fprintf(stdout, " -b, --batch-size (default: %s)\n", join(cmd_params_defaults.n_batch, ",").c_str()); + fprintf(stdout, " --memory-f32 <0|1> (default: %s)\n", join(cmd_params_defaults.f32_kv, ",").c_str()); + fprintf(stdout, " -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); + fprintf(stdout, " -ngl N, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); + fprintf(stdout, " -mg i, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); + fprintf(stdout, " -lv, --low-vram <0|1> (default: %s)\n", join(cmd_params_defaults.low_vram, ",").c_str()); + fprintf(stdout, " -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); + fprintf(stdout, " -ts, --tensor_split \n"); + fprintf(stdout, " -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); + fprintf(stdout, " -o, --output (default: %s)\n", cmd_params_defaults.output_format == CSV ? "csv" : cmd_params_defaults.output_format == JSON ? "json" : cmd_params_defaults.output_format == MARKDOWN ? "md" : "sql"); + fprintf(stdout, " -v, --verbose (default: %s)\n", cmd_params_defaults.verbose ? "1" : "0"); + fprintf(stdout, "\n"); + fprintf(stdout, "Multiple values can be given for each parameter by separating them with ',' or by specifying the parameter multiple times.\n"); + +} + +static cmd_params parse_cmd_params(int argc, char ** argv) { + cmd_params params; + std::string arg; + bool invalid_param = false; + const std::string arg_prefix = "--"; + const char split_delim = ','; + + params.verbose = cmd_params_defaults.verbose; + params.output_format = cmd_params_defaults.output_format; + params.reps = cmd_params_defaults.reps; + + for (int i = 1; i < argc; i++) { + arg = argv[i]; + if (arg.compare(0, arg_prefix.size(), arg_prefix) == 0) { + std::replace(arg.begin(), arg.end(), '_', '-'); + } + + if (arg == "-h" || arg == "--help") { + print_usage(argc, argv); + exit(0); + } else if (arg == "-m" || arg == "--model") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.model.insert(params.model.end(), p.begin(), p.end()); + } else if (arg == "-p" || arg == "--n-prompt") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.n_prompt.insert(params.n_prompt.end(), p.begin(), p.end()); + } else if (arg == "-n" || arg == "--n-gen") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.n_gen.insert(params.n_gen.end(), p.begin(), p.end()); + } else if (arg == "-b" || arg == "--batch-size") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.n_batch.insert(params.n_batch.end(), p.begin(), p.end()); + } else if (arg == "--memory-f32") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.f32_kv.insert(params.f32_kv.end(), p.begin(), p.end()); + } else if (arg == "-t" || arg == "--threads") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.n_threads.insert(params.n_threads.end(), p.begin(), p.end()); + } else if (arg == "-ngl" || arg == "--n-gpu-layers") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.n_gpu_layers.insert(params.n_gpu_layers.end(), p.begin(), p.end()); + } else if (arg == "-mg" || arg == "--main-gpu") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.main_gpu = split(argv[i], split_delim); + } else if (arg == "-lv" || arg == "--low-vram") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.low_vram.insert(params.low_vram.end(), p.begin(), p.end()); + } else if (arg == "-mmq" || arg == "--mul-mat-q") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.mul_mat_q.insert(params.mul_mat_q.end(), p.begin(), p.end()); + } else if (arg == "-ts" || arg == "--tensor-split") { + if (++i >= argc) { + invalid_param = true; + break; + } + for (auto ts : split(argv[i], split_delim)) { + // split string by ; and / + const std::regex regex{R"([;/]+)"}; + std::sregex_token_iterator it{ts.begin(), ts.end(), regex, -1}; + std::vector split_arg{it, {}}; + GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); + + std::array tensor_split; + for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { + if (i < split_arg.size()) { + tensor_split[i] = std::stof(split_arg[i]); + } else { + tensor_split[i] = 0.0f; + } + } + params.tensor_split.push_back(tensor_split); + } + } else if (arg == "-r" || arg == "--repetitions") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.reps = std::stoi(argv[i]); + } else if (arg == "-o" || arg == "--output") { + if (++i >= argc) { + invalid_param = true; + break; + } + if (argv[i] == std::string("csv")) { + params.output_format = CSV; + } else if (argv[i] == std::string("json")) { + params.output_format = JSON; + } else if (argv[i] == std::string("md")) { + params.output_format = MARKDOWN; + } else if (argv[i] == std::string("sql")) { + params.output_format = SQL; + } else { + invalid_param = true; + break; + } + } else if (arg == "-v" || arg == "--verbose") { + params.verbose = true; + } else { + invalid_param = true; + break; + } + } + if (invalid_param) { + fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); + print_usage(argc, argv); + exit(1); + } + + // set defaults + if (params.model.empty()) { params.model = cmd_params_defaults.model; } + if (params.n_prompt.empty()) { params.n_prompt = cmd_params_defaults.n_prompt; } + if (params.n_gen.empty()) { params.n_gen = cmd_params_defaults.n_gen; } + if (params.n_batch.empty()) { params.n_batch = cmd_params_defaults.n_batch; } + if (params.f32_kv.empty()) { params.f32_kv = cmd_params_defaults.f32_kv; } + if (params.n_gpu_layers.empty()) { params.n_gpu_layers = cmd_params_defaults.n_gpu_layers; } + if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } + if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } + if (params.low_vram.empty()) { params.low_vram = cmd_params_defaults.low_vram; } + if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } + if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } + + return params; +} + +struct cmd_params_instance { + std::string model; + int n_prompt; + int n_gen; + int n_batch; + bool f32_kv; + int n_threads; + int n_gpu_layers; + int main_gpu; + bool mul_mat_q; + bool low_vram; + std::array tensor_split; + + llama_context_params to_llama_params() const { + llama_context_params lparams = llama_context_default_params(); + lparams.n_ctx = n_prompt + n_gen; + lparams.n_batch = n_batch; + lparams.f16_kv = !f32_kv; + lparams.n_gpu_layers = n_gpu_layers; + lparams.main_gpu = main_gpu; + lparams.mul_mat_q = mul_mat_q; + lparams.low_vram = low_vram; + lparams.tensor_split = tensor_split.data(); + + return lparams; + } +}; + +static std::vector get_cmd_params_instances_int(const cmd_params & params, int n_gen, int n_prompt) { + std::vector instances; + + for (const auto & m : params.model) + for (const auto & nb : params.n_batch) + for (const auto & fk : params.f32_kv) + for (const auto & nl : params.n_gpu_layers) + for (const auto & mg : params.main_gpu) + for (const auto & mmq : params.mul_mat_q) + for (const auto & lv : params.low_vram) + for (const auto & ts : params.tensor_split) + for (const auto & nt : params.n_threads) { + cmd_params_instance instance = { + /* .model = */ m, + /* .n_prompt = */ n_prompt, + /* .n_gen = */ n_gen, + /* .n_batch = */ nb, + /* .f32_kv = */ fk, + /* .n_threads = */ nt, + /* .n_gpu_layers = */ nl, + /* .main_gpu = */ mg, + /* .mul_mat_q = */ mmq, + /* .low_vram = */ lv, + /* .tensor_split = */ ts, + }; + instances.push_back(instance); + } + return instances; +} + +static std::vector get_cmd_params_instances(const cmd_params & params) { + std::vector instances; + + for (const auto & n_prompt : params.n_prompt) { + if (n_prompt == 0) { + continue; + } + auto instances_prompt = get_cmd_params_instances_int(params, 0, n_prompt); + instances.insert(instances.end(), instances_prompt.begin(), instances_prompt.end()); + } + + for (const auto & n_gen : params.n_gen) { + if (n_gen == 0) { + continue; + } + auto instances_gen = get_cmd_params_instances_int(params, n_gen, 0); + instances.insert(instances.end(), instances_gen.begin(), instances_gen.end()); + } + + return instances; +} + +struct test { + static const std::string build_commit; + static const int build_number; + static const bool cuda; + static const bool opencl; + static const bool metal; + static const bool gpu_blas; + static const bool blas; + static const std::string cpu_info; + static const std::string gpu_info; + std::string model_filename; + std::string model_type; + int n_batch; + int n_threads; + bool f32_kv; + int n_gpu_layers; + int main_gpu; + bool mul_mat_q; + bool low_vram; + std::array tensor_split; + int n_prompt; + int n_gen; + std::string test_time; + std::vector samples_ns; + + test(const cmd_params_instance & inst, const llama_model * lmodel, const llama_context * ctx) { + model_filename = inst.model; + char buf[128]; + llama_model_type(lmodel, buf, sizeof(buf)); + model_type = buf; + n_batch = inst.n_batch; + n_threads = inst.n_threads; + f32_kv = inst.f32_kv; + n_gpu_layers = inst.n_gpu_layers; + main_gpu = inst.main_gpu; + mul_mat_q = inst.mul_mat_q; + low_vram = inst.low_vram; + tensor_split = inst.tensor_split; + n_prompt = inst.n_prompt; + n_gen = inst.n_gen; + // RFC 3339 date-time format + time_t t = time(NULL); + std::strftime(buf, sizeof(buf), "%FT%TZ", gmtime(&t)); + test_time = buf; + + (void) ctx; + } + + uint64_t avg_ns() const { + return ::avg(samples_ns); + } + + uint64_t stdev_ns() const { + return ::stdev(samples_ns); + } + + std::vector get_ts() const { + int n_tokens = n_prompt + n_gen; + std::vector ts; + std::transform(samples_ns.begin(), samples_ns.end(), std::back_inserter(ts), [n_tokens](uint64_t t) { return 1e9 * n_tokens / t; }); + return ts; + } + + double avg_ts() const { + return ::avg(get_ts()); + } + + double stdev_ts() const { + return ::stdev(get_ts()); + } + + static std::string get_backend() { + if (cuda) { + return "CUDA"; + } + if (opencl) { + return "OpenCL"; + } + if (metal) { + return "Metal"; + } + if (gpu_blas) { + return "GPU BLAS"; + } + if (blas) { + return "BLAS"; + } + return "CPU"; + } + + static const std::vector & get_fields() { + static const std::vector fields = { + "build_commit", "build_number", + "cuda", "opencl", "metal", "gpu_blas", "blas", + "cpu_info", "gpu_info", + "model_filename", "model_type", + "n_batch", "n_threads", "f16_kv", + "n_gpu_layers", "main_gpu", "mul_mat_q", "low_vram", "tensor_split", + "n_prompt", "n_gen", "test_time", + "avg_ns", "stddev_ns", + "avg_ts", "stddev_ts" + }; + return fields; + } + + enum field_type {STRING, BOOL, INT, FLOAT}; + + static field_type get_field_type(const std::string & field) { + if (field == "build_number" || field == "n_batch" || field == "n_threads" || + field == "n_gpu_layers" || field == "main_gpu" || + field == "n_prompt" || field == "n_gen" || + field == "avg_ns" || field == "stddev_ns") { + return INT; + } + if (field == "cuda" || field == "opencl" || field == "metal" || field == "gpu_blas" || field == "blas" || + field == "f16_kv" || field == "mul_mat_q" || field == "low_vram") { + return BOOL; + } + if (field == "avg_ts" || field == "stddev_ts") { + return FLOAT; + } + return STRING; + } + + std::vector get_values() const { + std::string tensor_split_str; + int max_nonzero = 0; + for (int i = 0; i < LLAMA_MAX_DEVICES; i++) { + if (tensor_split[i] > 0) { + max_nonzero = i; + } + } + for (int i = 0; i <= max_nonzero; i++) { + char buf[32]; + snprintf(buf, sizeof(buf), "%.2f", tensor_split[i]); + tensor_split_str += buf; + if (i < max_nonzero) { + tensor_split_str += "/"; + } + } + std::vector values = { + build_commit, std::to_string(build_number), + std::to_string(cuda), std::to_string(opencl), std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), + cpu_info, gpu_info, + model_filename, model_type, + std::to_string(n_batch), std::to_string(n_threads), std::to_string(!f32_kv), + std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(mul_mat_q), std::to_string(low_vram), tensor_split_str, + std::to_string(n_prompt), std::to_string(n_gen), test_time, + std::to_string(avg_ns()), std::to_string(stdev_ns()), + std::to_string(avg_ts()), std::to_string(stdev_ts()) + }; + return values; + } + + std::map get_map() const { + std::map map; + auto fields = get_fields(); + auto values = get_values(); + std::transform(fields.begin(), fields.end(), values.begin(), + std::inserter(map, map.end()), std::make_pair); + return map; + } +}; + +const std::string test::build_commit = BUILD_COMMIT; +const int test::build_number = BUILD_NUMBER; +const bool test::cuda = !!ggml_cpu_has_cublas(); +const bool test::opencl = !!ggml_cpu_has_clblast(); +const bool test::metal = !!ggml_cpu_has_metal(); +const bool test::gpu_blas = !!ggml_cpu_has_gpublas(); +const bool test::blas = !!ggml_cpu_has_blas(); +const std::string test::cpu_info = get_cpu_info(); +const std::string test::gpu_info = get_gpu_info(); + +struct printer { + virtual ~printer() {} + + FILE * fout; + virtual void print_header(const cmd_params & params) { (void) params; }; + virtual void print_test(const test & t) = 0; + virtual void print_footer() { }; +}; + +struct csv_printer : public printer { + static std::string escape_csv(const std::string & field) { + std::string escaped = "\""; + for (auto c : field) { + if (c == '"') { + escaped += "\""; + } + escaped += c; + } + escaped += "\""; + return escaped; + } + + void print_header(const cmd_params & params) override { + std::vector fields = test::get_fields(); + fprintf(fout, "%s\n", join(fields, ",").c_str()); + (void) params; + } + + void print_test(const test & t) override { + std::vector values = t.get_values(); + std::transform(values.begin(), values.end(), values.begin(), escape_csv); + fprintf(fout, "%s\n", join(values, ",").c_str()); + } +}; + +struct json_printer : public printer { + bool first = true; + + static std::string escape_json(const std::string & value) { + std::string escaped; + for (auto c : value) { + if (c == '"') { + escaped += "\\\""; + } else if (c == '\\') { + escaped += "\\\\"; + } else if (c <= 0x1f) { + char buf[8]; + snprintf(buf, sizeof(buf), "\\u%04x", c); + escaped += buf; + } else { + escaped += c; + } + } + return escaped; + } + + static std::string format_value(const std::string & field, const std::string & value) { + switch (test::get_field_type(field)) { + case test::STRING: + return "\"" + escape_json(value) + "\""; + case test::BOOL: + return value == "0" ? "false" : "true"; + default: + return value; + } + } + + void print_header(const cmd_params & params) override { + fprintf(fout, "[\n"); + (void) params; + } + + void print_fields(const std::vector & fields, const std::vector & values) { + assert(fields.size() == values.size()); + for (size_t i = 0; i < fields.size(); i++) { + fprintf(fout, " \"%s\": %s,\n", fields.at(i).c_str(), format_value(fields.at(i), values.at(i)).c_str()); + } + } + + void print_test(const test & t) override { + if (first) { + first = false; + } else { + fprintf(fout, ",\n"); + } + fprintf(fout, " {\n"); + print_fields(test::get_fields(), t.get_values()); + fprintf(fout, " \"samples_ns\": [ %s ],\n", join(t.samples_ns, ", ").c_str()); + fprintf(fout, " \"samples_ts\": [ %s ]\n", join(t.get_ts(), ", ").c_str()); + fprintf(fout, " }"); + fflush(fout); + } + + void print_footer() override { + fprintf(fout, "\n]\n"); + } +}; + +struct markdown_printer : public printer { + std::vector fields; + + static int get_field_width(const std::string & field) { + if (field == "model") { + return -30; + } + if (field == "t/s") { + return 15; + } + int width = std::max((int)field.length(), 10); + + if (test::get_field_type(field) == test::STRING) { + return -width; + } + return width; + } + + void print_header(const cmd_params & params) override { + // select fields to print + fields = { "model", "backend" }; + bool is_cpu_backend = test::get_backend() == "CPU" || test::get_backend() == "BLAS"; + if (!is_cpu_backend) { + fields.push_back("n_gpu_layers"); + } + if (params.n_threads.size() > 1 || params.n_threads != cmd_params_defaults.n_threads || is_cpu_backend) { + fields.push_back("n_threads"); + } + if (params.n_batch.size() > 1 || params.n_batch != cmd_params_defaults.n_batch) { + fields.push_back("n_batch"); + } + if (params.f32_kv.size() > 1 || params.f32_kv != cmd_params_defaults.f32_kv) { + fields.push_back("f16_kv"); + } + if (params.main_gpu.size() > 1 || params.main_gpu != cmd_params_defaults.main_gpu) { + fields.push_back("main_gpu"); + } + if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { + fields.push_back("mul_mat_q"); + } + if (params.low_vram.size() > 1 || params.low_vram != cmd_params_defaults.low_vram) { + fields.push_back("low_vram"); + } + if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { + fields.push_back("tensor_split"); + } + fields.push_back("test"); + fields.push_back("t/s"); + + fprintf(fout, "|"); + for (const auto & field : fields) { + fprintf(fout, " %*s |", get_field_width(field), field.c_str()); + } + fprintf(fout, "\n"); + fprintf(fout, "|"); + for (const auto & field : fields) { + int width = get_field_width(field); + fprintf(fout, " %s%s |", std::string(std::abs(width) - 1, '-').c_str(), width > 0 ? ":" : "-"); + } + fprintf(fout, "\n"); + } + + void print_test(const test & t) override { + std::map vmap = t.get_map(); + + fprintf(fout, "|"); + for (const auto & field : fields) { + std::string value; + if (field == "model") { + value = t.model_type; + } else if (field == "backend") { + value = test::get_backend(); + } else if (field == "test") { + char buf[128]; + if (t.n_prompt > 0 && t.n_gen == 0) { + snprintf(buf, sizeof(buf), "pp %d", t.n_prompt); + } else if (t.n_gen > 0 && t.n_prompt == 0) { + snprintf(buf, sizeof(buf), "tg %d", t.n_gen); + } else { + assert(false); + exit(1); + } + value = buf; + } else if (field == "t/s") { + char buf[128]; + snprintf(buf, sizeof(buf), "%.2f ± %.2f", t.avg_ts(), t.stdev_ts()); + value = buf; + } else if (vmap.find(field) != vmap.end()) { + value = vmap.at(field); + } else { + assert(false); + exit(1); + } + + int width = get_field_width(field); + if (field == "t/s") { + // HACK: the utf-8 character is 2 bytes + width += 1; + } + fprintf(fout, " %*s |", width, value.c_str()); + } + fprintf(fout, "\n"); + } + + void print_footer() override { + fprintf(fout, "\nbuild: %s (%d)\n", test::build_commit.c_str(), test::build_number); + } +}; + +struct sql_printer : public printer { + static std::string get_sql_field_type(const std::string & field) { + switch (test::get_field_type(field)) { + case test::STRING: + return "TEXT"; + case test::BOOL: + case test::INT: + return "INTEGER"; + case test::FLOAT: + return "REAL"; + default: + assert(false); + exit(1); + } + } + + void print_header(const cmd_params & params) override { + std::vector fields = test::get_fields(); + fprintf(fout, "CREATE TABLE IF NOT EXISTS test (\n"); + for (size_t i = 0; i < fields.size(); i++) { + fprintf(fout, " %s %s%s\n", fields.at(i).c_str(), get_sql_field_type(fields.at(i)).c_str(), i < fields.size() - 1 ? "," : ""); + } + fprintf(fout, ");\n"); + fprintf(fout, "\n"); + (void) params; + } + + void print_test(const test & t) override { + fprintf(fout, "INSERT INTO test (%s) ", join(test::get_fields(), ", ").c_str()); + fprintf(fout, "VALUES ("); + std::vector values = t.get_values(); + for (size_t i = 0; i < values.size(); i++) { + fprintf(fout, "'%s'%s", values.at(i).c_str(), i < values.size() - 1 ? ", " : ""); + } + fprintf(fout, ");\n"); + } +}; + +static void test_prompt(llama_context * ctx, int n_prompt, int n_past, int n_batch, int n_threads) { + std::vector tokens(n_batch, llama_token_bos(ctx)); + int n_processed = 0; + while (n_processed < n_prompt) { + int n_tokens = std::min(n_prompt - n_processed, n_batch); + llama_eval(ctx, tokens.data(), n_tokens, n_past + n_processed, n_threads); + n_processed += n_tokens; + } +} + +static void test_gen(llama_context * ctx, int n_gen, int n_past, int n_threads) { + llama_token token = llama_token_bos(ctx); + for (int i = 0; i < n_gen; i++) { + llama_eval(ctx, &token, 1, n_past + i, n_threads); + } +} + +static void llama_null_log_callback(enum llama_log_level level, const char * text, void * user_data) { + (void) level; + (void) text; + (void) user_data; +} + +int main(int argc, char ** argv) { +#if !defined(NDEBUG) + fprintf(stderr, "warning: asserts enabled, performance may be affected\n"); +#endif + +#if (defined(_MSC_VER) && defined(_DEBUG)) || (!defined(_MSC_VER) && !defined(__OPTIMIZE__)) + fprintf(stderr, "warning: debug build, performance may be affected\n"); +#endif + +#if defined(__SANITIZE_ADDRESS__) || defined(__SANITIZE_THREAD__) + fprintf(stderr, "warning: sanitizer enabled, performance may be affected\n"); +#endif + + cmd_params params = parse_cmd_params(argc, argv); + + // initialize llama.cpp + if (!params.verbose) { + llama_log_set(llama_null_log_callback, NULL); + } + bool numa = false; + llama_backend_init(numa); + + // initialize printer + std::unique_ptr p; + switch (params.output_format) { + case CSV: + p.reset(new csv_printer()); + break; + case JSON: + p.reset(new json_printer()); + break; + case MARKDOWN: + p.reset(new markdown_printer()); + break; + case SQL: + p.reset(new sql_printer()); + break; + default: + assert(false); + exit(1); + } + p->fout = stdout; + p->print_header(params); + + std::vector params_instances = get_cmd_params_instances(params); + + for (const auto & inst : params_instances) { + // TODO: keep the model between tests when possible + llama_context_params lparams = inst.to_llama_params(); + + llama_model * lmodel = llama_load_model_from_file(inst.model.c_str(), lparams); + if (lmodel == NULL) { + fprintf(stderr, "%s: error: failed to load model '%s'\n", __func__, inst.model.c_str()); + return 1; + } + + llama_context * ctx = llama_new_context_with_model(lmodel, lparams); + if (ctx == NULL) { + fprintf(stderr, "%s: error: failed to create context with model '%s'\n", __func__, inst.model.c_str()); + llama_free_model(lmodel); + return 1; + } + + test t(inst, lmodel, ctx); + + // warmup run + test_gen(ctx, 1, 0, t.n_threads); + + for (int i = 0; i < params.reps; i++) { + uint64_t t_start = get_time_ns(); + if (t.n_prompt > 0) { + test_prompt(ctx, t.n_prompt, 0, t.n_batch, t.n_threads); + } + if (t.n_gen > 0) { + test_gen(ctx, t.n_gen, t.n_prompt, t.n_threads); + } + uint64_t t_ns = get_time_ns() - t_start; + t.samples_ns.push_back(t_ns); + } + + p->print_test(t); + + llama_print_timings(ctx); + + llama_free(ctx); + llama_free_model(lmodel); + } + + p->print_footer(); + + llama_backend_free(); + + return 0; +} diff --git a/examples/llama.vim b/examples/llama.vim new file mode 100644 index 000000000..f03fadfb7 --- /dev/null +++ b/examples/llama.vim @@ -0,0 +1,132 @@ +" Requires an already running llama.cpp server +" To install either copy or symlink to ~/.vim/autoload/llama.vim +" Then start with either :call llama#doLlamaGen(), +" or add a keybind to your vimrc such as +" nnoremap Z :call llama#doLlamaGen() +" Similarly, you could add an insert mode keybind with +" inoremap call llama#doLlamaGen() +" +" g:llama_api_url and g:llama_overrides can be configured in your .vimrc +" let g:llama_api_url = "192.168.1.10:8080" +" llama_overrides can also be set through buffer/window scopes. For instance +" autocmd filetype python let b:llama_overrides = {"temp": 0.2} +" Could be added to your .vimrc to automatically set a lower temperature when +" editing a python script +" Additionally, an override dict can be stored at the top of a file +" !*{"stop": ["User:"]} +" Could be added to the start of your chatlog.txt to set the stopping token +" These parameter dicts are merged together from lowest to highest priority: +" server default -> g:llama_overrides -> w:llama_overrides -> +" b:llama_overrides -> in file (!*) overrides +" +" Sublists (like logit_bias and stop) are overridden, not merged +" Example override: +" !*{"logit_bias": [[13, -5], [2, false]], "temperature": 1, "top_k": 5, "top_p": 0.5, "n_predict": 256, "repeat_last_n": 256, "repeat_penalty": 1.17647} +if !exists("g:llama_api_url") + let g:llama_api_url= "127.0.0.1:8080" +endif +if !exists("g:llama_overrides") + let g:llama_overrides = {} +endif +const s:querydata = {"n_predict": 256, "stop": [ "\n" ], "stream": v:true } +const s:curlcommand = ['curl','--data-raw', "{\"prompt\":\"### System:\"}", '--silent', '--no-buffer', '--request', 'POST', '--url', g:llama_api_url .. '/completion', '--header', "Content-Type: application/json"] +let s:linedict = {} + +func s:callbackHandler(bufn, channel, msg) + if len(a:msg) < 3 + return + elseif a:msg[0] == "d" + let l:msg = a:msg[6:-1] + else + let l:msg = a:msg + endif + let l:decoded_msg = json_decode(l:msg) + let l:newtext = split(l:decoded_msg['content'], "\n", 1) + if len(l:newtext) > 0 + call setbufline(a:bufn, s:linedict[a:bufn], getbufline(a:bufn, s:linedict[a:bufn])[0] .. newtext[0]) + else + echo "nothing genned" + endif + if len(newtext) > 1 + let l:failed = appendbufline(a:bufn, s:linedict[a:bufn], newtext[1:-1]) + let s:linedict[a:bufn] = s:linedict[a:bufn] + len(newtext)-1 + endif + if has_key(l:decoded_msg, "stop") && l:decoded_msg.stop + echo "Finished generation" + endif +endfunction + +func llama#doLlamaGen() + if exists("b:job") + if job_status(b:job) == "run" + call job_stop(b:job) + return + endif + endif + + let l:cbuffer = bufnr("%") + let s:linedict[l:cbuffer] = line('$') + let l:buflines = getbufline(l:cbuffer, 1, 1000) + let l:querydata = copy(s:querydata) + call extend(l:querydata, g:llama_overrides) + if exists("w:llama_overrides") + call extend(l:querydata, w:llama_overrides) + endif + if exists("b:llama_overrides") + call extend(l:querydata, b:llama_overrides) + endif + if l:buflines[0][0:1] == '!*' + let l:userdata = json_decode(l:buflines[0][2:-1]) + call extend(l:querydata, l:userdata) + let l:buflines = l:buflines[1:-1] + endif + let l:querydata.prompt = join(l:buflines, "\n") + let l:curlcommand = copy(s:curlcommand) + let l:curlcommand[2] = json_encode(l:querydata) + let b:job = job_start(l:curlcommand, {"callback": function("s:callbackHandler", [l:cbuffer])}) +endfunction + +" Echos the tokkenization of the provided string , or cursor to end of word +" Onus is placed on the user to include the preceding space +func llama#tokenizeWord(...) + if (a:0 > 0) + let l:input = a:1 + else + exe "normal \"*ye" + let l:input = @* + endif + let l:querydata = {"content": l:input} + let l:curlcommand = copy(s:curlcommand) + let l:curlcommand[2] = json_encode(l:querydata) + let l:curlcommand[8] = g:llama_api_url .. "/tokenize" + let s:token_job = job_start(l:curlcommand, {"callback": function("s:tokenizeWordCallback", [l:input])}) +endfunction + +func s:tokenizeWordCallback(plaintext, channel, msg) + echo '"' .. a:plaintext ..'" - ' .. string(json_decode(a:msg).tokens) +endfunction + + +" Echos the token count of the entire buffer (or provided string) +" Example usage :echo llama#tokenCount() +func llama#tokenCount(...) + if (a:0 > 0) + let l:buflines = a:1 + else + let l:buflines = getline(1,1000) + if l:buflines[0][0:1] == '!*' + let l:buflines = l:buflines[1:-1] + endif + let l:buflines = join(l:buflines, "\n") + endif + let l:querydata = {"content": l:buflines} + let l:curlcommand = copy(s:curlcommand) + let l:curlcommand[2] = json_encode(l:querydata) + let l:curlcommand[8] = g:llama_api_url .. "/tokenize" + let s:token_job = job_start(l:curlcommand, {"callback": "s:tokenCountCallback"}) +endfunction + +func s:tokenCountCallback(channel, msg) + let resp = json_decode(a:msg) + echo len(resp.tokens) +endfunction diff --git a/examples/llm.vim b/examples/llm.vim index efecad0cd..594a28549 100644 --- a/examples/llm.vim +++ b/examples/llm.vim @@ -1,3 +1,5 @@ +" Basic plugin example + function! Llm() let url = "http://127.0.0.1:8080/completion" @@ -16,8 +18,10 @@ function! Llm() " Extract the content field from the response let content = json_decode(response).content + let split_newlines = split(content, '\n', 1) + " Insert the content at the cursor position - call setline(line('.'), getline('.') . content) + call setline(line('.'), [ getline('.') . split_newlines[0] ] + split_newlines[1:]) endfunction command! Llm call Llm() diff --git a/examples/main/README.md b/examples/main/README.md index 014112e5d..d555afdcc 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -140,6 +140,12 @@ The `--ctx-size` option allows you to set the size of the prompt context used by - `-c N, --ctx-size N`: Set the size of the prompt context (default: 512). The LLaMA models were built with a context of 2048, which will yield the best results on longer input/inference. However, increasing the context size beyond 2048 may lead to unpredictable results. +### Extended Context Size + +Some fine-tuned models have extened the context length by scaling RoPE. For example, if the original pretrained model have a context length (max sequence length) of 4096 (4k) and the fine-tuned model have 32k. That is a scaling factor of 8, and should work by setting the above `--ctx-size` to 32768 (32k) and `--rope-scale` to 8. + +- `--rope-scale N`: Where N is the linear scaling factor used by the fine-tuned model. + ### Keep Prompt The `--keep` option allows users to retain the original prompt when the model runs out of context, ensuring a connection to the initial instruction or conversation topic is maintained. @@ -154,9 +160,13 @@ The following options allow you to control the text generation process and fine- ### Number of Tokens to Predict -- `-n N, --n-predict N`: Set the number of tokens to predict when generating text (default: 128, -1 = infinity). +- `-n N, --n-predict N`: Set the number of tokens to predict when generating text (default: 128, -1 = infinity, -2 = until context filled) -The `--n-predict` option controls the number of tokens the model generates in response to the input prompt. By adjusting this value, you can influence the length of the generated text. A higher value will result in longer text, while a lower value will produce shorter text. A value of -1 will cause text to be generated without limit. +The `--n-predict` option controls the number of tokens the model generates in response to the input prompt. By adjusting this value, you can influence the length of the generated text. A higher value will result in longer text, while a lower value will produce shorter text. + +A value of -1 will enable infinite text generation, even though we have a finite context window. When the context window is full, some of the earlier tokens (half of the tokens after `--n-keep`) will be discarded. The context must then be re-evaluated before generation can resume. On large models and/or large context windows, this will result in significant pause in output. + +If the pause is undesirable, a value of -2 will stop generation immediately when the context is filled. It is important to note that the generated text may be shorter than the specified number of tokens if an End-of-Sequence (EOS) token or a reverse prompt is encountered. In interactive mode text generation will pause and control will be returned to the user. In non-interactive mode, the program will end. In both cases, the text generation may stop before reaching the specified `n-predict` value. If you want the model to keep going without ever producing End-of-Sequence on its own, you can use the `--ignore-eos` parameter. @@ -278,6 +288,10 @@ These options help improve the performance and memory usage of the LLaMA models. - `--prompt-cache FNAME`: Specify a file to cache the model state after the initial prompt. This can significantly speed up the startup time when you're using longer prompts. The file is created during the first run and is reused and updated in subsequent runs. **Note**: Restoring a cached prompt does not imply restoring the exact state of the session at the point it was saved. So even when specifying a specific seed, you are not guaranteed to get the same sequence of tokens as the original generation. +### Grammars + +- `--grammar GRAMMAR`, `--grammar-file FILE`: Specify a grammar (defined inline or in a file) to constrain model output to a specific format. For example, you could force the model to output JSON or to speak only in emojis. See the [GBNF guide](../../grammars/README.md) for details on the syntax. + ### Quantization For information about 4-bit quantization, which can significantly improve performance and reduce memory usage, please refer to llama.cpp's primary [README](../../README.md#prepare-data--run). diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 56ada7e69..1393f0b08 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -43,7 +43,7 @@ static bool is_interacting = false; void sigint_handler(int signo) { if (signo == SIGINT) { if (!is_interacting) { - is_interacting=true; + is_interacting = true; } else { console::cleanup(); printf("\n"); @@ -143,7 +143,7 @@ int main(int argc, char ** argv) { { fprintf(stderr, "%s: testing memory usage for n_batch = %d, n_ctx = %d\n", __func__, params.n_batch, params.n_ctx); - const std::vector tmp(params.n_batch, llama_token_bos()); + const std::vector tmp(params.n_batch, llama_token_bos(ctx)); llama_eval(ctx, tmp.data(), tmp.size(), params.n_ctx, params.n_threads); } @@ -189,27 +189,30 @@ int main(int argc, char ** argv) { } } + const bool is_spm = llama_vocab_type(ctx) == LLAMA_VOCAB_TYPE_SPM; + // tokenize the prompt std::vector embd_inp; - - // Add a space in front of the first character to match OG llama tokenizer behavior - params.prompt.insert(0, 1, ' '); - if (params.interactive_first || params.instruct || !params.prompt.empty() || session_tokens.empty()) { - embd_inp = ::llama_tokenize(ctx, params.prompt, true); + embd_inp = ::llama_tokenize(ctx, params.prompt, is_spm); } else { embd_inp = session_tokens; } + // Should not run without any tokens + if (embd_inp.empty()) { + embd_inp.push_back(llama_token_bos(ctx)); + } + // Tokenize negative prompt std::vector guidance_inp; int guidance_offset = 0; int original_prompt_len = 0; if (ctx_guidance) { params.cfg_negative_prompt.insert(0, 1, ' '); - guidance_inp = ::llama_tokenize(ctx_guidance, params.cfg_negative_prompt, true); + guidance_inp = ::llama_tokenize(ctx_guidance, params.cfg_negative_prompt, is_spm); - std::vector original_inp = ::llama_tokenize(ctx, params.prompt, true); + std::vector original_inp = ::llama_tokenize(ctx, params.prompt, is_spm); original_prompt_len = original_inp.size(); guidance_offset = (int)guidance_inp.size() - original_prompt_len; } @@ -256,8 +259,8 @@ int main(int argc, char ** argv) { } // prefix & suffix for instruct mode - const auto inp_pfx = ::llama_tokenize(ctx, "\n\n### Instruction:\n\n", true); - const auto inp_sfx = ::llama_tokenize(ctx, "\n\n### Response:\n\n", false); + const auto inp_pfx = ::llama_tokenize(ctx, "\n\n### Instruction:\n\n", is_spm); + const auto inp_sfx = ::llama_tokenize(ctx, "\n\n### Response:\n\n", false); // in instruct mode, we inject a prefix and a suffix to each input by the user if (params.instruct) { @@ -270,15 +273,12 @@ int main(int argc, char ** argv) { params.interactive = true; } - // determine newline token - auto llama_token_newline = ::llama_tokenize(ctx, "\n", false); - if (params.verbose_prompt) { fprintf(stderr, "\n"); fprintf(stderr, "%s: prompt: '%s'\n", __func__, params.prompt.c_str()); fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); for (int i = 0; i < (int) embd_inp.size(); i++) { - fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_str(ctx, embd_inp[i])); + fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_str(ctx, embd_inp[i]).c_str()); } if (ctx_guidance) { @@ -286,14 +286,14 @@ int main(int argc, char ** argv) { fprintf(stderr, "%s: negative prompt: '%s'\n", __func__, params.cfg_negative_prompt.c_str()); fprintf(stderr, "%s: number of tokens in negative prompt = %zu\n", __func__, guidance_inp.size()); for (int i = 0; i < (int) guidance_inp.size(); i++) { - fprintf(stderr, "%6d -> '%s'\n", guidance_inp[i], llama_token_to_str(ctx, guidance_inp[i])); + fprintf(stderr, "%6d -> '%s'\n", guidance_inp[i], llama_token_to_str(ctx, guidance_inp[i]).c_str()); } } if (params.n_keep > 0) { fprintf(stderr, "%s: static prompt based on n_keep: '", __func__); for (int i = 0; i < params.n_keep; i++) { - fprintf(stderr, "%s", llama_token_to_str(ctx, embd_inp[i])); + fprintf(stderr, "%s", llama_token_to_str(ctx, embd_inp[i]).c_str()); } fprintf(stderr, "'\n"); } @@ -311,7 +311,7 @@ int main(int argc, char ** argv) { auto console_ctrl_handler = +[](DWORD ctrl_type) -> BOOL { return (ctrl_type == CTRL_C_EVENT) ? (sigint_handler(SIGINT), true) : false; }; - SetConsoleCtrlHandler(static_cast(console_ctrl_handler), true); + SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true); #endif fprintf(stderr, "%s: interactive mode on.\n", __func__); @@ -352,10 +352,9 @@ int main(int argc, char ** argv) { fprintf(stderr, "\n"); { - auto it = params.logit_bias.find(llama_token_eos()); + auto it = params.logit_bias.find(llama_token_eos(ctx)); if (it != params.logit_bias.end() && it->second == -INFINITY) { - fprintf(stderr, - "%s: warning: EOS token is disabled, which will cause most grammars to fail\n", __func__); + fprintf(stderr, "%s: warning: EOS token is disabled, which will cause most grammars to fail\n", __func__); } } @@ -405,7 +404,7 @@ int main(int argc, char ** argv) { // do one empty run to warm up the model { - const std::vector tmp = { llama_token_bos(), }; + const std::vector tmp = { llama_token_bos(ctx), }; llama_eval(ctx, tmp.data(), tmp.size(), 0, params.n_threads); llama_reset_timings(ctx); } @@ -431,8 +430,12 @@ int main(int argc, char ** argv) { // - take the n_keep first tokens from the original prompt (via n_past) // - take half of the last (n_ctx - n_keep) tokens and recompute the logits in batches if (n_past + (int) embd.size() + std::max(0, guidance_offset) > n_ctx) { - const int n_left = n_past - params.n_keep; + if (params.n_predict == -2) { + fprintf(stderr, "\n\n%s: context full, stopping generation\n", __func__); + break; + } + const int n_left = n_past - params.n_keep; // always keep the first token - BOS n_past = std::max(1, params.n_keep); n_past_guidance = std::max(1, params.n_keep + guidance_offset); @@ -585,7 +588,7 @@ int main(int argc, char ** argv) { } // Apply penalties - float nl_logit = logits[llama_token_nl()]; + float nl_logit = logits[llama_token_nl(ctx)]; auto last_n_repeat = std::min(std::min((int)last_n_tokens.size(), repeat_last_n), n_ctx); llama_sample_repetition_penalty(ctx, &candidates_p, last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, @@ -594,7 +597,7 @@ int main(int argc, char ** argv) { last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, last_n_repeat, alpha_frequency, alpha_presence); if (!penalize_nl) { - logits[llama_token_nl()] = nl_logit; + logits[llama_token_nl(ctx)] = nl_logit; } if (grammar != NULL) { @@ -658,7 +661,7 @@ int main(int argc, char ** argv) { // display text if (input_echo) { for (auto id : embd) { - printf("%s", llama_token_to_str(ctx, id)); + printf("%s", llama_token_to_str(ctx, id).c_str()); } fflush(stdout); } @@ -700,7 +703,7 @@ int main(int argc, char ** argv) { } // deal with end of text token in interactive mode - if (last_n_tokens.back() == llama_token_eos()) { + if (last_n_tokens.back() == llama_token_eos(ctx)) { if (params.interactive) { if (params.antiprompt.size() != 0) { // tokenize and inject first reverse prompt @@ -724,7 +727,7 @@ int main(int argc, char ** argv) { } if (params.input_prefix_bos) { - embd_inp.push_back(llama_token_bos()); + embd_inp.push_back(llama_token_bos(ctx)); } std::string buffer; @@ -778,8 +781,7 @@ int main(int argc, char ** argv) { if (grammar != NULL) { llama_grammar_free(grammar); - std::vector grammar_rules( - parsed_grammar.c_rules()); + std::vector grammar_rules( parsed_grammar.c_rules()); grammar = llama_grammar_init( grammar_rules.data(), grammar_rules.size(), parsed_grammar.symbol_ids.at("root")); @@ -790,7 +792,7 @@ int main(int argc, char ** argv) { } // end of text token - if (!embd.empty() && embd.back() == llama_token_eos() && !(params.instruct || params.interactive)) { + if (!embd.empty() && embd.back() == llama_token_eos(ctx) && !(params.instruct || params.interactive)) { fprintf(stderr, " [end of text]\n"); break; } diff --git a/examples/make-ggml.py b/examples/make-ggml.py old mode 100644 new mode 100755 index f63d9fc22..6a34eeac5 --- a/examples/make-ggml.py +++ b/examples/make-ggml.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """ This script converts Hugging Face llama models to GGML and quantizes them. diff --git a/examples/metal/metal.cpp b/examples/metal/metal.cpp index 7438defde..c05a4fa93 100644 --- a/examples/metal/metal.cpp +++ b/examples/metal/metal.cpp @@ -2,7 +2,7 @@ // // - First, export a LLaMA graph: // -// $ ./bin/main -m ../models/7B/ggml-model-q4_0.bin --export +// $ ./bin/main -m ../models/7B/ggml-model-q4_0.gguf --export // // - Run this tool to evaluate the exported graph: // diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 62433e983..a7bd9db2a 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -5,6 +5,7 @@ #include #include #include +#include #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data @@ -26,12 +27,136 @@ std::vector softmax(const std::vector& logits) { return probs; } -void perplexity(llama_context * ctx, const gpt_params & params) { +void perplexity_v2(llama_context * ctx, const gpt_params & params) { // Download: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` // Output: `perplexity: 13.5106 [114/114]` // BOS tokens will be added for each chunk before eval - auto tokens = ::llama_tokenize(ctx, params.prompt, true); + + if (params.ppl_stride <= 0) { + fprintf(stderr, "%s: stride is %d but must be greater than zero!\n",__func__,params.ppl_stride); + return; + } + + const bool is_spm = llama_vocab_type(ctx) == LLAMA_VOCAB_TYPE_SPM; + const bool add_bos = is_spm; + + fprintf(stderr, "%s: tokenizing the input ..\n", __func__); + + auto tokens = ::llama_tokenize(ctx, params.prompt, add_bos); + + const int calc_chunk = params.n_ctx; + + fprintf(stderr, "%s: have %zu tokens. Calculation chunk = %d\n", __func__, tokens.size(), calc_chunk); + + if (int(tokens.size()) <= calc_chunk) { + fprintf(stderr, "%s: there are only %zu tokens, this is not enough for a context size of %d and stride %d\n",__func__, + tokens.size(), params.n_ctx, params.ppl_stride); + return; + } + + const int n_chunk_max = (tokens.size() - calc_chunk + params.ppl_stride - 1) / params.ppl_stride; + + const int n_chunk = params.n_chunks < 0 ? n_chunk_max : std::min(params.n_chunks, n_chunk_max); + const int n_vocab = llama_n_vocab(ctx); + const int n_batch = params.n_batch; + + int count = 0; + double nll = 0.0; + + fprintf(stderr, "%s: calculating perplexity over %d chunks, batch_size=%d\n", __func__, n_chunk, n_batch); + + for (int i = 0; i < n_chunk; ++i) { + const int start = i * params.ppl_stride; + const int end = start + calc_chunk; + + const int num_batches = (calc_chunk + n_batch - 1) / n_batch; + //fprintf(stderr, "%s: evaluating %d...%d using %d batches\n", __func__, start, end, num_batches); + + std::vector logits; + + const auto t_start = std::chrono::high_resolution_clock::now(); + + for (int j = 0; j < num_batches; ++j) { + const int batch_start = start + j * n_batch; + const int batch_size = std::min(end - batch_start, n_batch); + + //fprintf(stderr, " Batch %d: starts at %d, size is %d, n_past is %d\n",j,batch_start,batch_size,j * n_batch); + if (llama_eval(ctx, tokens.data() + batch_start, batch_size, j * n_batch, params.n_threads)) { + //fprintf(stderr, "%s : failed to eval\n", __func__); + return; + } + + // save original token and restore it after eval + const auto token_org = tokens[batch_start]; + + // add BOS token for the first batch of each chunk + if (add_bos && j == 0) { + tokens[batch_start] = llama_token_bos(ctx); + } + + const auto batch_logits = llama_get_logits(ctx); + logits.insert(logits.end(), batch_logits, batch_logits + batch_size * n_vocab); + + if (j == 0) { + tokens[batch_start] = token_org; + } + } + + const auto t_end = std::chrono::high_resolution_clock::now(); + + if (i == 0) { + const float t_total = std::chrono::duration(t_end - t_start).count(); + fprintf(stderr, "%s: %.2f seconds per pass - ETA ", __func__, t_total); + int total_seconds = (int)(t_total * n_chunk); + if (total_seconds >= 60*60) { + fprintf(stderr, "%d hours ", total_seconds / (60*60)); + total_seconds = total_seconds % (60*60); + } + fprintf(stderr, "%.2f minutes\n", total_seconds / 60.0); + } + + //fprintf(stderr, "%s: using tokens %d...%d\n",__func__,params.n_ctx - params.ppl_stride + start, params.n_ctx + start); + for (int j = params.n_ctx - params.ppl_stride - 1; j < params.n_ctx - 1; ++j) { + + // Calculate probability of next token, given the previous ones. + const std::vector tok_logits( + logits.begin() + (j + 0) * n_vocab, + logits.begin() + (j + 1) * n_vocab); + + const float prob = softmax(tok_logits)[tokens[start + j + 1]]; + + nll += -std::log(prob); + ++count; + } + // perplexity is e^(average negative log-likelihood) + if (params.ppl_output_type == 0) { + printf("[%d]%.4lf,", i + 1, std::exp(nll / count)); + } else { + printf("%8d %.4lf\n", i*params.ppl_stride, std::exp(nll / count)); + } + fflush(stdout); + } + printf("\n"); +} + +void perplexity(llama_context * ctx, const gpt_params & params) { + if (params.ppl_stride > 0) { + perplexity_v2(ctx, params); + return; + } + + // Download: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research + // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` + // Output: `perplexity: 13.5106 [114/114]` + // BOS tokens will be added for each chunk before eval + + const bool is_spm = llama_vocab_type(ctx) == LLAMA_VOCAB_TYPE_SPM; + const bool add_bos = is_spm; + + fprintf(stderr, "%s: tokenizing the input ..\n", __func__); + + auto tokens = ::llama_tokenize(ctx, params.prompt, add_bos); const int n_chunk_max = tokens.size() / params.n_ctx; @@ -62,8 +187,8 @@ void perplexity(llama_context * ctx, const gpt_params & params) { const auto token_org = tokens[batch_start]; // add BOS token for the first batch of each chunk - if (j == 0) { - tokens[batch_start] = llama_token_bos(); + if (add_bos && j == 0) { + tokens[batch_start] = llama_token_bos(ctx); } if (llama_eval(ctx, tokens.data() + batch_start, batch_size, j * n_batch, params.n_threads)) { @@ -88,7 +213,7 @@ void perplexity(llama_context * ctx, const gpt_params & params) { fprintf(stderr, "%d hours ", total_seconds / (60*60)); total_seconds = total_seconds % (60*60); } - fprintf(stderr, "%d minutes\n", total_seconds / 60); + fprintf(stderr, "%.2f minutes\n", total_seconds / 60.0); } // We get the logits for all the tokens in the context window (params.n_ctx) @@ -115,12 +240,37 @@ void perplexity(llama_context * ctx, const gpt_params & params) { ++count; } // perplexity is e^(average negative log-likelihood) - printf("[%d]%.4lf,", i + 1, std::exp(nll / count)); + if (params.ppl_output_type == 0) { + printf("[%d]%.4lf,", i + 1, std::exp(nll / count)); + } else { + printf("%8d %.4lf\n", i*params.n_ctx, std::exp(nll / count)); + } fflush(stdout); } printf("\n"); } +std::vector hellaswag_evaluate_tokens(llama_context * ctx, const std::vector& tokens, int n_past, int n_batch, + int n_vocab, int n_thread) { + std::vector result; + result.reserve(tokens.size() * n_vocab); + size_t n_chunk = (tokens.size() + n_batch - 1)/n_batch; + for (size_t i_chunk = 0; i_chunk < n_chunk; ++i_chunk) { + size_t n_tokens = tokens.size() - i_chunk * n_batch; + n_tokens = std::min(n_tokens, size_t(n_batch)); + if (llama_eval(ctx, tokens.data() + i_chunk * n_batch, n_tokens, n_past, n_thread)) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return {}; + } + + const auto logits = llama_get_logits(ctx); + result.insert(result.end(), logits, logits + n_tokens * n_vocab); + + n_past += n_tokens; + } + return result; +} + void hellaswag_score(llama_context * ctx, const gpt_params & params) { // Calculates hellaswag score (acc_norm) from prompt // @@ -155,8 +305,10 @@ void hellaswag_score(llama_context * ctx, const gpt_params & params) { size_t hs_task_count = prompt_lines.size()/6; fprintf(stderr, "%s : loaded %zu tasks from prompt.\n", __func__, hs_task_count); + const bool is_spm = llama_vocab_type(ctx) == LLAMA_VOCAB_TYPE_SPM; + // This is needed as usual for LLaMA models - bool prepend_bos = true; + const bool add_bos = is_spm; // Number of tasks to use when computing the score if ( params.hellaswag_tasks < hs_task_count ) { @@ -209,50 +361,92 @@ void hellaswag_score(llama_context * ctx, const gpt_params & params) { double acc = 0.0f; const int n_vocab = llama_n_vocab(ctx); - for (size_t task_idx = 0; task_idx < hs_task_count; task_idx++) { + std::vector tok_logits(n_vocab); + for (size_t task_idx = 0; task_idx < hs_task_count; task_idx++) { // Tokenize the context to count tokens - std::vector context_embd = ::llama_tokenize(ctx, hs_data[task_idx].context, prepend_bos); + std::vector context_embd = ::llama_tokenize(ctx, hs_data[task_idx].context, add_bos); size_t context_size = context_embd.size(); - for (size_t ending_idx=0;ending_idx<4;ending_idx++) { + // Do the 1st ending + // In this case we include the context when evaluating + auto query_embd = ::llama_tokenize(ctx, hs_data[task_idx].context + hs_data[task_idx].ending[0], add_bos); + auto query_size = query_embd.size(); + //printf("First query: %d\n",(int)query_size); + + // Stop if query wont fit the ctx window + if (query_size > (size_t)params.n_ctx) { + fprintf(stderr, "%s : number of tokens in query %zu > n_ctxl\n", __func__, query_size); + return; + } + + // Speedup small evaluations by evaluating atleast 32 tokens + if (query_size < 32) { + query_embd.resize(32); + } + + auto logits = hellaswag_evaluate_tokens(ctx, query_embd, 0, params.n_batch, n_vocab, params.n_threads); + if (logits.empty()) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return; + } + + std::memcpy(tok_logits.data(), logits.data() + (context_size-1)*n_vocab, n_vocab*sizeof(float)); + const auto first_probs = softmax(tok_logits); + + hs_data[task_idx].ending_logprob_count[0] = 1; + hs_data[task_idx].ending_logprob[0] = std::log(first_probs[query_embd[context_size]]); + + // Calculate the logprobs over the ending + for (size_t j = context_size; j < query_size - 1; j++) { + + std::memcpy(tok_logits.data(), logits.data() + j*n_vocab, n_vocab*sizeof(float)); + + const float prob = softmax(tok_logits)[query_embd[j + 1]]; + + hs_data[task_idx].ending_logprob[0] += std::log(prob); + hs_data[task_idx].ending_logprob_count[0]++; + } + + // Calculate the mean token logprob for acc_norm + hs_data[task_idx].ending_logprob[0] /= hs_data[task_idx].ending_logprob_count[0]; + + // Do the remaining endings + // For these, we use the bare ending with n_past = context_size + // + for (size_t ending_idx = 1; ending_idx < 4; ending_idx++) { // Tokenize the query - std::vector query_embd = ::llama_tokenize(ctx, hs_data[task_idx].context + hs_data[task_idx].ending[ending_idx], prepend_bos); - size_t query_size = query_embd.size(); + query_embd = ::llama_tokenize(ctx, hs_data[task_idx].ending[ending_idx], false); + query_size = query_embd.size(); // Stop if query wont fit the ctx window - if (query_size > (size_t)params.n_ctx) { + if (context_size + query_size > (size_t)params.n_ctx) { fprintf(stderr, "%s : number of tokens in query %zu > n_ctxl\n", __func__, query_size); return; } // Speedup small evaluations by evaluating atleast 32 tokens - if (query_size < 32) { - query_embd.resize(32); - } + // No, resizing to 32 is actually slightly slower (at least on CUDA) + //if (query_size < 32) { + // query_embd.resize(32); + //} // Evaluate the query - if (llama_eval(ctx, query_embd.data(), query_embd.size(), 0, params.n_threads)) { + logits = hellaswag_evaluate_tokens(ctx, query_embd, context_size, params.n_batch, n_vocab, params.n_threads); + if (logits.empty()) { fprintf(stderr, "%s : failed to eval\n", __func__); return; } - const auto query_logits = llama_get_logits(ctx); - std::vector logits; - logits.insert(logits.end(), query_logits, query_logits + query_size * n_vocab); - - hs_data[task_idx].ending_logprob_count[ending_idx] = 0; - hs_data[task_idx].ending_logprob[ending_idx] = 0.0f; + hs_data[task_idx].ending_logprob_count[ending_idx] = 1; + hs_data[task_idx].ending_logprob[ending_idx] = std::log(first_probs[query_embd[0]]); // Calculate the logprobs over the ending - for (size_t j = context_size-1; j < query_size - 1; j++) { - // Calculate probability of next token, given the previous ones. - const std::vector tok_logits( - logits.begin() + (j + 0) * n_vocab, - logits.begin() + (j + 1) * n_vocab); + for (size_t j = 0; j < query_size - 1; j++) { + std::memcpy(tok_logits.data(), logits.data() + j*n_vocab, n_vocab*sizeof(float)); - const float prob = softmax(tok_logits)[query_embd[ j + 1]]; + const float prob = softmax(tok_logits)[query_embd[j + 1]]; hs_data[task_idx].ending_logprob[ending_idx] += std::log(prob); hs_data[task_idx].ending_logprob_count[ending_idx]++; @@ -267,9 +461,9 @@ void hellaswag_score(llama_context * ctx, const gpt_params & params) { } // Find the ending with maximum logprob - size_t ending_logprob_max_idx = -1; - double ending_logprob_max_val = -INFINITY; - for (size_t j=0; j < 4; j++) { + size_t ending_logprob_max_idx = 0; + double ending_logprob_max_val = hs_data[task_idx].ending_logprob[0]; + for (size_t j = 1; j < 4; j++) { if (hs_data[task_idx].ending_logprob[j] > ending_logprob_max_val) { ending_logprob_max_idx = j; ending_logprob_max_val = hs_data[task_idx].ending_logprob[j]; @@ -304,6 +498,12 @@ int main(int argc, char ** argv) { params.perplexity = true; params.n_batch = std::min(params.n_batch, params.n_ctx); + if (params.ppl_stride > 0) { + fprintf(stderr, "Will perform strided perplexity calculation -> adjusting context size from %d to %d\n", + params.n_ctx, params.n_ctx + params.ppl_stride/2); + params.n_ctx += params.ppl_stride/2; + } + if (params.n_ctx > 2048) { fprintf(stderr, "%s: warning: model might not support context sizes greater than 2048 tokens (%d specified);" "expect poor results\n", __func__, params.n_ctx); diff --git a/examples/quantize-stats/quantize-stats.cpp b/examples/quantize-stats/quantize-stats.cpp index 6aa06ec8f..06ce18f09 100644 --- a/examples/quantize-stats/quantize-stats.cpp +++ b/examples/quantize-stats/quantize-stats.cpp @@ -24,7 +24,7 @@ #endif struct quantize_stats_params { - std::string model = "models/7B/ggml-model-f16.bin"; + std::string model = "models/7B/ggml-model-f16.gguf"; bool verbose = false; bool per_layer_stats = false; bool print_histogram = false; diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 744f549c5..d172f645a 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -14,25 +14,25 @@ struct quant_option { }; static const std::vector QUANT_OPTIONS = { - { "Q4_0", LLAMA_FTYPE_MOSTLY_Q4_0, " 3.50G, +0.2499 ppl @ 7B", }, - { "Q4_1", LLAMA_FTYPE_MOSTLY_Q4_1, " 3.90G, +0.1846 ppl @ 7B", }, - { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.30G, +0.0796 ppl @ 7B", }, - { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0415 ppl @ 7B", }, + { "Q4_0", LLAMA_FTYPE_MOSTLY_Q4_0, " 3.56G, +0.2166 ppl @ LLaMA-v1-7B", }, + { "Q4_1", LLAMA_FTYPE_MOSTLY_Q4_1, " 3.90G, +0.1585 ppl @ LLaMA-v1-7B", }, + { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.33G, +0.0683 ppl @ LLaMA-v1-7B", }, + { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, #ifdef GGML_USE_K_QUANTS - { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.67G, +0.8698 ppl @ 7B", }, + { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, - { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5505 ppl @ 7B", }, - { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.06G, +0.2437 ppl @ 7B", }, - { "Q3_K_L", LLAMA_FTYPE_MOSTLY_Q3_K_L, " 3.35G, +0.1803 ppl @ 7B", }, + { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, + { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, + { "Q3_K_L", LLAMA_FTYPE_MOSTLY_Q3_K_L, " 3.35G, +0.1764 ppl @ LLaMA-v1-7B", }, { "Q4_K", LLAMA_FTYPE_MOSTLY_Q4_K_M, "alias for Q4_K_M", }, - { "Q4_K_S", LLAMA_FTYPE_MOSTLY_Q4_K_S, " 3.56G, +0.1149 ppl @ 7B", }, - { "Q4_K_M", LLAMA_FTYPE_MOSTLY_Q4_K_M, " 3.80G, +0.0535 ppl @ 7B", }, + { "Q4_K_S", LLAMA_FTYPE_MOSTLY_Q4_K_S, " 3.59G, +0.0992 ppl @ LLaMA-v1-7B", }, + { "Q4_K_M", LLAMA_FTYPE_MOSTLY_Q4_K_M, " 3.80G, +0.0532 ppl @ LLaMA-v1-7B", }, { "Q5_K", LLAMA_FTYPE_MOSTLY_Q5_K_M, "alias for Q5_K_M", }, - { "Q5_K_S", LLAMA_FTYPE_MOSTLY_Q5_K_S, " 4.33G, +0.0353 ppl @ 7B", }, - { "Q5_K_M", LLAMA_FTYPE_MOSTLY_Q5_K_M, " 4.45G, +0.0142 ppl @ 7B", }, - { "Q6_K", LLAMA_FTYPE_MOSTLY_Q6_K, " 5.15G, +0.0044 ppl @ 7B", }, + { "Q5_K_S", LLAMA_FTYPE_MOSTLY_Q5_K_S, " 4.33G, +0.0400 ppl @ LLaMA-v1-7B", }, + { "Q5_K_M", LLAMA_FTYPE_MOSTLY_Q5_K_M, " 4.45G, +0.0122 ppl @ LLaMA-v1-7B", }, + { "Q6_K", LLAMA_FTYPE_MOSTLY_Q6_K, " 5.15G, -0.0008 ppl @ LLaMA-v1-7B", }, #endif - { "Q8_0", LLAMA_FTYPE_MOSTLY_Q8_0, " 6.70G, +0.0004 ppl @ 7B", }, + { "Q8_0", LLAMA_FTYPE_MOSTLY_Q8_0, " 6.70G, +0.0004 ppl @ LLaMA-v1-7B", }, { "F16", LLAMA_FTYPE_MOSTLY_F16, "13.00G @ 7B", }, { "F32", LLAMA_FTYPE_ALL_F32, "26.00G @ 7B", }, }; @@ -68,10 +68,10 @@ bool try_parse_ftype(const std::string & ftype_str_in, llama_ftype & ftype, std: } // usage: -// ./quantize [--allow-requantize] [--leave-output-tensor] models/llama/ggml-model.bin [models/llama/ggml-model-quant.bin] type [nthreads] +// ./quantize [--allow-requantize] [--leave-output-tensor] models/llama/ggml-model.gguf [models/llama/ggml-model-quant.gguf] type [nthreads] // void usage(const char * executable) { - fprintf(stderr, "usage: %s [--help] [--allow-requantize] [--leave-output-tensor] model-f32.bin [model-quant.bin] type [nthreads]\n\n", executable); + fprintf(stderr, "usage: %s [--help] [--allow-requantize] [--leave-output-tensor] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); fprintf(stderr, " --allow-requantize: Allows requantizing tensors that have already been quantized. Warning: This can severely reduce quality compared to quantizing from 16bit or 32bit\n"); fprintf(stderr, " --leave-output-tensor: Will leave output.weight un(re)quantized. Increases model size but may also increase quality, especially when requantizing\n"); fprintf(stderr, "\nAllowed quantization types:\n"); @@ -118,8 +118,8 @@ int main(int argc, char ** argv) { if (pos != std::string::npos) { fpath = fname_inp.substr(0, pos + 1); } - // export as [inp path]/ggml-model-[ftype].bin - fname_out = fpath + "ggml-model-" + ftype_str + ".bin"; + // export as [inp path]/ggml-model-[ftype].gguf + fname_out = fpath + "ggml-model-" + ftype_str + ".gguf"; arg_idx++; } else { diff --git a/examples/reason-act.sh b/examples/reason-act.sh index e7fe655db..046c48db5 100755 --- a/examples/reason-act.sh +++ b/examples/reason-act.sh @@ -1,4 +1,3 @@ - #!/bin/bash cd `dirname $0` diff --git a/examples/save-load-state/save-load-state.cpp b/examples/save-load-state/save-load-state.cpp index 61c71c358..3db61b754 100644 --- a/examples/save-load-state/save-load-state.cpp +++ b/examples/save-load-state/save-load-state.cpp @@ -26,7 +26,6 @@ int main(int argc, char ** argv) { auto lparams = llama_context_default_params(); lparams.n_ctx = params.n_ctx; - lparams.n_gqa = params.n_gqa; lparams.seed = params.seed; lparams.f16_kv = params.memory_f16; lparams.use_mmap = params.use_mmap; @@ -45,9 +44,8 @@ int main(int argc, char ** argv) { llama_free_model(model); return 1; } - auto tokens = std::vector(params.n_ctx); - auto n_prompt_tokens = llama_tokenize(ctx, params.prompt.c_str(), tokens.data(), int(tokens.size()), true); - + auto tokens = llama_tokenize(ctx, params.prompt.c_str(), true); + auto n_prompt_tokens = tokens.size(); if (n_prompt_tokens < 1) { fprintf(stderr, "%s : failed to tokenize prompt\n", __func__); llama_free(ctx); @@ -92,7 +90,7 @@ int main(int argc, char ** argv) { auto next_token_str = llama_token_to_str(ctx, next_token); last_n_tokens_data.push_back(next_token); - printf("%s", next_token_str); + printf("%s", next_token_str.c_str()); if (llama_eval(ctx, &next_token, 1, n_past, params.n_threads)) { fprintf(stderr, "\n%s : failed to evaluate\n", __func__); llama_free(ctx); @@ -152,7 +150,7 @@ int main(int argc, char ** argv) { auto next_token_str = llama_token_to_str(ctx2, next_token); last_n_tokens_data.push_back(next_token); - printf("%s", next_token_str); + printf("%s", next_token_str.c_str()); if (llama_eval(ctx2, &next_token, 1, n_past, params.n_threads)) { fprintf(stderr, "\n%s : failed to evaluate\n", __func__); llama_free(ctx2); diff --git a/examples/server-llama2-13B.sh b/examples/server-llama2-13B.sh old mode 100644 new mode 100755 diff --git a/examples/server/README.md b/examples/server/README.md index aee31ae42..77997f98d 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -5,7 +5,7 @@ This example demonstrates a simple HTTP API server and a simple web front end to Command line options: - `--threads N`, `-t N`: Set the number of threads to use during computation. -- `-m FNAME`, `--model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.bin`). +- `-m FNAME`, `--model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.gguf`). - `-m ALIAS`, `--alias ALIAS`: Set an alias for the model. The alias will be returned in API responses. - `-c N`, `--ctx-size N`: Set the size of the prompt context. The default is 512, but LLaMA models were built with a context of 2048, which will provide better results for longer input/inference. The size may differ in other models, for example, baichuan models were build with a context of 4096. - `-ngl N`, `--n-gpu-layers N`: When compiled with appropriate support (currently CLBlast or cuBLAS), this option allows offloading some layers to the GPU for computation. Generally results in increased performance. @@ -16,6 +16,7 @@ Command line options: - `--memory-f32`: Use 32-bit floats instead of 16-bit floats for memory key+value. Not recommended. - `--mlock`: Lock the model in memory, preventing it from being swapped out when memory-mapped. - `--no-mmap`: Do not memory-map the model. By default, models are mapped into memory, which allows the system to load only the necessary parts of the model as needed. +- `--numa`: Attempt optimizations that help on some NUMA systems. - `--lora FNAME`: Apply a LoRA (Low-Rank Adaptation) adapter to the model (implies --no-mmap). This allows you to adapt the pretrained model to specific tasks or domains. - `--lora-base FNAME`: Optional model to use as a base for the layers modified by the LoRA adapter. This flag is used in conjunction with the `--lora` flag, and specifies the base model for the adaptation. - `-to N`, `--timeout N`: Server read/write timeout in seconds. Default `600`. @@ -47,15 +48,14 @@ To get started right away, run the following command, making sure to use the cor ### Unix-based systems (Linux, macOS, etc.): ```bash -./server -m models/7B/ggml-model.bin -c 2048 +./server -m models/7B/ggml-model.gguf -c 2048 ``` ### Windows: ```powershell -server.exe -m models\7B\ggml-model.bin -c 2048 +server.exe -m models\7B\ggml-model.gguf -c 2048 ``` - The above command will start a server that by default listens on `127.0.0.1:8080`. You can consume the endpoints with Postman or NodeJS with axios library. You can visit the web front end at the same url. @@ -126,7 +126,7 @@ node . `stream`: It allows receiving each predicted token in real-time instead of waiting for the completion to finish. To enable this, set to `true`. - `prompt`: Provide a prompt. Internally, the prompt is compared, and it detects if a part has already been evaluated, and the remaining part will be evaluate. A space is inserted in the front like main.cpp does. + `prompt`: Provide a prompt as a string, or as an array of strings and numbers representing tokens. Internally, the prompt is compared, and it detects if a part has already been evaluated, and the remaining part will be evaluate. If the prompt is a string, or an array with the first element given as a string, a space is inserted in the front like main.cpp does. `stop`: Specify a JSON array of stopping strings. These words will not be included in the completion, so make sure to add them to the prompt for the next iteration (default: []). @@ -151,6 +151,8 @@ node . `mirostat_eta`: Set the Mirostat learning rate, parameter eta (default: 0.1). + `grammar`: Set grammar for grammar-based sampling (default: no grammar) + `seed`: Set the random number generator (RNG) seed (default: -1, -1 = random seed). `ignore_eos`: Ignore end of stream token and continue generating (default: false). diff --git a/examples/server/api_like_OAI.py b/examples/server/api_like_OAI.py index aa325a03e..ed19237b0 100755 --- a/examples/server/api_like_OAI.py +++ b/examples/server/api_like_OAI.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import argparse from flask import Flask, jsonify, request, Response import urllib.parse diff --git a/examples/server/chat-llama2.sh b/examples/server/chat-llama2.sh old mode 100644 new mode 100755 diff --git a/examples/server/chat.mjs b/examples/server/chat.mjs index 8269e2592..87f4d2926 100644 --- a/examples/server/chat.mjs +++ b/examples/server/chat.mjs @@ -1,5 +1,34 @@ import * as readline from 'node:readline' import { stdin, stdout } from 'node:process' +import { readFileSync } from 'node:fs' +import { SchemaConverter } from './public/json-schema-to-grammar.mjs' + +const args = process.argv.slice(2); +const grammarJsonSchemaFile = args.find( + (_, index) => args[index - 1] === "--grammar-json-schema" +); +const grammarFile = args.find((_, index) => args[index - 1] === "--grammar"); + +// Example usage: function,arguments +const grammarJsonSchemaPropOrder = args.find( + (_, index) => args[index - 1] === "--grammar-json-schema-prop-order" +); +const propOrder = grammarJsonSchemaPropOrder + ? grammarJsonSchemaPropOrder + .split(",") + .reduce((acc, cur, index) => ({ ...acc, [cur]: index }), {}) + : {}; + +let grammar = null +if (grammarJsonSchemaFile) { + const schema = JSON.parse(readFileSync(grammarJsonSchemaFile, 'utf-8')) + const converter = new SchemaConverter(propOrder) + converter.visit(schema, '') + grammar = converter.formatGrammar() +} +if (grammarFile) { + grammar = readFileSync(grammarFile, 'utf-8') +} const API_URL = 'http://127.0.0.1:8080' @@ -48,6 +77,7 @@ async function chat_completion(question) { n_keep: n_keep, n_predict: 256, stop: ["\n### Human:"], // stop completion after generating this + grammar, stream: true, }) }) diff --git a/examples/server/chat.sh b/examples/server/chat.sh old mode 100644 new mode 100755 diff --git a/examples/server/deps.sh b/examples/server/deps.sh index 1e9fe964b..ea23e6450 100755 --- a/examples/server/deps.sh +++ b/examples/server/deps.sh @@ -11,8 +11,10 @@ echo >> $PUBLIC/index.js # add newline FILES=$(ls $PUBLIC) +cd $PUBLIC for FILE in $FILES; do - func=$(echo $FILE | tr '.' '_') - echo "generate $FILE.hpp ($func)" - xxd -n $func -i $PUBLIC/$FILE > $DIR/$FILE.hpp + echo "generate $FILE.hpp" + + # use simple flag for old version of xxd + xxd -i $FILE > $DIR/$FILE.hpp done diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index bd0b47870..76851d9c6 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -207,81 +207,469 @@ unsigned char index_html[] = { 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, - 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, - 0x77, 0x65, 0x65, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, - 0x64, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, - 0x72, 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, - 0x62, 0x6f, 0x74, 0x2e, 0x20, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x64, - 0x20, 0x69, 0x6e, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, - 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, - 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, - 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, - 0x73, 0x65, 0x72, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, - 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, - 0x2e, 0x37, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, - 0x20, 0x32, 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, - 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, - 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, - 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, - 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, - 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x7b, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x27, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x2e, 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, + 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6e, 0x64, + 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, 0x74, 0x2e, 0x20, + 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x73, + 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, + 0x77, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, + 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, + 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, + 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, + 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, + 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, + 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, + 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, 0x20, 0x22, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, + 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, + 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, + 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x2c, + 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, + 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, + 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, + 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, + 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, + 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, + 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, - 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, - 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, - 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, + 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, 0x3a, 0x20, 0x53, + 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, + 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, + 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, + 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, + 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, + 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, + 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, + 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, + 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, + 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, + 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, + 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, + 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, + 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, + 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, + 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, + 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, + 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, + 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, + 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, + 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, + 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, + 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, + 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, + 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, + 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, + 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, + 0x66, 0x75, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, + 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x2e, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, 0x69, + 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, + 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, + 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, + 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x74, 0x68, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, 0x6f, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, + 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, + 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, + 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, + 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, + 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, + 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, + 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, + 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, + 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, + 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, + 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, + 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, + 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, + 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, + 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, + 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, + 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, + 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, + 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, + 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, + 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, + 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, + 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, + 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, + 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, + 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, + 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, + 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, + 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, + 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, + 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, + 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, @@ -640,165 +1028,323 @@ unsigned char index_html[] = { 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, - 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, - 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, - 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, - 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, - 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, + 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, + 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x28, 0x7b, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, + 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, + 0x3a, 0x20, 0x69, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, + 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, + 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, - 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, - 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, - 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, - 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, - 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, - 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, - 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, - 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, + 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, + 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, + 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, + 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, + 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, - 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, + 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, + 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, + 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, + 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, + 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, + 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, + 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, + 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, + 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, - 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, - 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, - 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, - 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, + 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, - 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, + 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, + 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, + 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, + 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, + 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, - 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, - 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, - 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x70, 0x72, 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, + 0x22, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, + 0x31, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, + 0x33, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, + 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, @@ -1149,4 +1695,4 @@ unsigned char index_html[] = { 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a }; -unsigned int index_html_len = 13781; +unsigned int index_html_len = 20333; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index a3b5be6d8..c9dc078b7 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -4,1555 +4,1578 @@ unsigned char index_js[] = { 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6f, 0x3e, 0x31, 0x29, 0x7b, 0x6f, 0x2d, 0x2d, 0x3b, + 0x69, 0x66, 0x28, 0x75, 0x3e, 0x31, 0x29, 0x7b, 0x75, 0x2d, 0x2d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x5f, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x77, 0x68, 0x69, + 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x2e, 0x6f, 0x3b, 0x69, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, - 0x28, 0x21, 0x28, 0x38, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x26, 0x26, 0x63, + 0x28, 0x21, 0x28, 0x38, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x26, 0x26, 0x61, 0x28, 0x69, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, 0x3d, - 0x21, 0x30, 0x7d, 0x7d, 0x69, 0x3d, 0x5f, 0x7d, 0x7d, 0x72, 0x3d, 0x30, - 0x3b, 0x6f, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, 0x68, + 0x21, 0x30, 0x7d, 0x7d, 0x69, 0x3d, 0x5f, 0x7d, 0x7d, 0x66, 0x3d, 0x30, + 0x3b, 0x75, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6f, + 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x75, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, - 0x29, 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, + 0x29, 0x3b, 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x6c, 0x65, - 0x74, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3d, 0x30, 0x2c, 0x72, 0x3d, - 0x30, 0x2c, 0x75, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6c, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, - 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, - 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, - 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, - 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, - 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, - 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, - 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, - 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, - 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x66, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, - 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x2e, 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, + 0x74, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x28, 0x74, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6f, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, + 0x6c, 0x6c, 0x79, 0x7b, 0x6f, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, + 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x75, 0x3d, 0x30, 0x2c, 0x66, 0x3d, 0x30, + 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x73, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x69, + 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, 0x74, + 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, 0x69, + 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, + 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, + 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, + 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, + 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, 0x2e, + 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, 0x2e, + 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, 0x73, + 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, - 0x2e, 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x3d, 0x65, 0x7d, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, - 0x62, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x62, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x69, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, - 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, - 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x69, 0x7d, 0x7d, 0x29, 0x29, 0x7d, - 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x3b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x66, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, - 0x53, 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x66, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, + 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, + 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, + 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, + 0x65, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, + 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x69, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, + 0x2d, 0x33, 0x33, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, + 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x69, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, + 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, + 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, - 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, - 0x74, 0x79, 0x28, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, - 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x3d, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, - 0x74, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x7d, 0x2c, 0x73, 0x65, 0x74, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, - 0x20, 0x70, 0x29, 0x21, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, - 0x75, 0x74, 0x65, 0x64, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, - 0x68, 0x61, 0x76, 0x65, 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, - 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x65, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x75, 0x2b, 0x2b, - 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, - 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, - 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, - 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x66, 0x28, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, - 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, - 0x66, 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x69, 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, - 0x7c, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, - 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, - 0x3d, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, - 0x3d, 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, - 0x2e, 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, + 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, + 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, + 0x79, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, + 0x65, 0x74, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, + 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, + 0x2c, 0x73, 0x65, 0x74, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, + 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, + 0x76, 0x29, 0x21, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, + 0x74, 0x65, 0x64, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, + 0x61, 0x76, 0x65, 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, + 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x65, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, + 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, + 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, + 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, + 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, - 0x2e, 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, - 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, - 0x53, 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, - 0x3d, 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, - 0x2e, 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, - 0x65, 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x65, 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, - 0x74, 0x29, 0x7b, 0x66, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x75, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x66, - 0x29, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x33, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, - 0x66, 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x35, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, - 0x3d, 0x3d, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x75, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x63, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x68, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, - 0x74, 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x31, 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, - 0x69, 0x3d, 0x74, 0x3b, 0x61, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, - 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, - 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, - 0x7d, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x74, 0x29, 0x7d, 0x3b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x7b, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, - 0x33, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, - 0x74, 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, - 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, - 0x3d, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, - 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, - 0x65, 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x68, 0x28, 0x29, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, - 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, - 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x64, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, - 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, - 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x7b, 0x6f, - 0x2b, 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, - 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, - 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, - 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x79, 0x28, 0x74, 0x29, 0x3b, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, - 0x6c, 0x79, 0x7b, 0x69, 0x3d, 0x5f, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, - 0x53, 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x76, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x69, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, - 0x72, 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, - 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x22, 0x29, 0x3b, 0x61, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, - 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x32, 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x79, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6e, 0x28, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, - 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x3d, 0x33, 0x32, 0x7d, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, - 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, + 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, + 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, + 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, + 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, + 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, + 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, + 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, + 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, + 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x64, 0x28, 0x74, + 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, + 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, + 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, + 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, + 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, + 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, + 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, + 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, + 0x29, 0x7b, 0x63, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x29, + 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, + 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, + 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, + 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, + 0x3d, 0x6c, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x61, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, + 0x74, 0x72, 0x79, 0x7b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, + 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, + 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, + 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, + 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, + 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, + 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, + 0x3d, 0x74, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, + 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, + 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, + 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, + 0x74, 0x29, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x29, 0x7b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, + 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x76, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, + 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, + 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x76, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, + 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, + 0x28, 0x29, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, + 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x76, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, + 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, + 0x65, 0x77, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, + 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x7b, 0x75, 0x2b, + 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, + 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, + 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, + 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, + 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x67, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, + 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, + 0x79, 0x7b, 0x69, 0x3d, 0x5f, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, 0x28, 0x74, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, + 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, + 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x6d, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x69, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, + 0x29, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, + 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, + 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6e, 0x28, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6b, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x3d, 0x33, 0x32, 0x7d, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, + 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, + 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, + 0x7d, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x26, 0x3d, 0x2d, 0x39, 0x3b, 0x6d, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x3b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x75, 0x2b, 0x2b, + 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, + 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x62, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x6e, 0x29, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x6f, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, + 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, + 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x29, 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, + 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, + 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, + 0x65, 0x77, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, + 0x6e, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, + 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, + 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, + 0x76, 0x61, 0x72, 0x20, 0x78, 0x2c, 0x77, 0x2c, 0x43, 0x2c, 0x45, 0x2c, + 0x55, 0x2c, 0x48, 0x2c, 0x4e, 0x2c, 0x50, 0x2c, 0x24, 0x2c, 0x44, 0x3d, + 0x7b, 0x7d, 0x2c, 0x54, 0x3d, 0x5b, 0x5d, 0x2c, 0x56, 0x3d, 0x2f, 0x61, + 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, + 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, + 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, + 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, + 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, + 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x41, 0x3d, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, + 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, + 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, + 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x57, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, + 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, + 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, + 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, + 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, + 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, + 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, + 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, + 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, + 0x69, 0x2c, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, + 0x72, 0x65, 0x66, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, + 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, + 0x68, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x5f, 0x3f, 0x2b, 0x2b, 0x43, 0x3a, 0x5f, 0x7d, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x5f, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x77, 0x2e, 0x76, 0x6e, 0x6f, + 0x64, 0x65, 0x26, 0x26, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, + 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4c, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, 0x6c, + 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x52, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x6a, 0x28, + 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, 0x5f, + 0x6b, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, + 0x2b, 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, + 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, + 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, + 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, - 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, - 0x29, 0x7d, 0x7d, 0x3b, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x26, 0x3d, 0x2d, 0x39, 0x3b, 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x3b, 0x68, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6f, 0x2b, - 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, - 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x6d, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x6e, 0x29, 0x7d, 0x3b, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, - 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x6f, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x7d, 0x7d, 0x3b, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x38, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x29, 0x29, 0x79, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, - 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, - 0x6e, 0x65, 0x77, 0x20, 0x67, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, - 0x7b, 0x6e, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, - 0x7d, 0x76, 0x61, 0x72, 0x20, 0x6b, 0x2c, 0x53, 0x2c, 0x78, 0x2c, 0x77, - 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x4e, 0x2c, 0x50, - 0x3d, 0x7b, 0x7d, 0x2c, 0x44, 0x3d, 0x5b, 0x5d, 0x2c, 0x24, 0x3d, 0x2f, - 0x61, 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, - 0x67, 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, - 0x7c, 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, - 0x63, 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, - 0x5d, 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x54, 0x3d, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, - 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, - 0x5b, 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x28, - 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, - 0x26, 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, - 0x6c, 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x46, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, - 0x7b, 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, - 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, - 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, - 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, - 0x6f, 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, - 0x33, 0x3f, 0x6b, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, - 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, - 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x72, - 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, - 0x2c, 0x72, 0x65, 0x66, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x5f, 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, - 0x5f, 0x68, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x5f, 0x3f, 0x2b, 0x2b, 0x78, 0x3a, 0x5f, 0x7d, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x5f, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x53, 0x2e, 0x76, 0x6e, - 0x6f, 0x64, 0x65, 0x26, 0x26, 0x53, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, - 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4f, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x52, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, - 0x29, 0x2b, 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, + 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x6a, 0x28, + 0x74, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, + 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x52, - 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x63, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, - 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, - 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, - 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, - 0x5f, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, - 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x43, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x71, 0x2e, 0x5f, - 0x5f, 0x72, 0x2b, 0x2b, 0x7c, 0x7c, 0x45, 0x21, 0x3d, 0x3d, 0x53, 0x2e, - 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x45, 0x3d, - 0x53, 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x55, 0x29, - 0x28, 0x71, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x71, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x43, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, - 0x48, 0x29, 0x3b, 0x74, 0x3d, 0x43, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, - 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, - 0x6e, 0x3d, 0x43, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x69, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x72, 0x3d, 0x28, 0x6f, 0x3d, 0x28, 0x65, - 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, - 0x2c, 0x28, 0x75, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, 0x26, - 0x28, 0x69, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x56, 0x28, 0x7b, - 0x7d, 0x2c, 0x6f, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x6f, 0x2e, - 0x5f, 0x5f, 0x76, 0x2b, 0x31, 0x2c, 0x6e, 0x74, 0x28, 0x75, 0x2c, 0x6f, - 0x2c, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x75, 0x2e, 0x6f, 0x77, 0x6e, 0x65, - 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x5f, 0x5f, 0x68, 0x3f, - 0x5b, 0x72, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x72, 0x3f, 0x52, 0x28, 0x6f, 0x29, 0x3a, - 0x72, 0x2c, 0x6f, 0x2e, 0x5f, 0x5f, 0x68, 0x29, 0x2c, 0x65, 0x74, 0x28, - 0x69, 0x2c, 0x6f, 0x29, 0x2c, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, - 0x72, 0x26, 0x26, 0x49, 0x28, 0x6f, 0x29, 0x29, 0x2c, 0x43, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x73, - 0x6f, 0x72, 0x74, 0x28, 0x48, 0x29, 0x29, 0x3b, 0x71, 0x2e, 0x5f, 0x5f, - 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x42, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, 0x66, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x73, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, - 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x3d, 0x69, 0x26, 0x26, 0x69, - 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x44, 0x2c, 0x6d, 0x3d, 0x79, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x73, 0x3d, 0x30, 0x3b, - 0x73, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, - 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x28, 0x61, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x73, 0x5d, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x61, 0x3d, 0x6e, 0x5b, 0x73, + 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x29, 0x7b, 0x28, + 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x55, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x47, 0x2e, 0x5f, 0x5f, + 0x72, 0x2b, 0x2b, 0x7c, 0x7c, 0x48, 0x21, 0x3d, 0x3d, 0x77, 0x2e, 0x64, + 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x48, 0x3d, 0x77, + 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, + 0x47, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x47, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, + 0x28, 0x50, 0x29, 0x3b, 0x74, 0x3d, 0x55, 0x2e, 0x73, 0x68, 0x69, 0x66, + 0x74, 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, + 0x28, 0x6e, 0x3d, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, + 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x2c, 0x75, 0x3d, 0x28, 0x72, 0x3d, 0x28, 0x65, 0x3d, 0x74, + 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x28, + 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, 0x26, 0x28, 0x69, + 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x6f, 0x3d, + 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x72, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x72, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, 0x31, 0x2c, 0x69, 0x74, 0x28, + 0x66, 0x2c, 0x72, 0x2c, 0x6f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x66, 0x2e, 0x6f, + 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, + 0x6e, 0x74, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x72, 0x2e, 0x5f, + 0x5f, 0x68, 0x3f, 0x5b, 0x75, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x75, 0x3f, 0x6a, 0x28, + 0x72, 0x29, 0x3a, 0x75, 0x2c, 0x72, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x5f, + 0x29, 0x2c, 0x5f, 0x74, 0x28, 0x69, 0x2c, 0x72, 0x2c, 0x5f, 0x29, 0x2c, + 0x72, 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x75, 0x26, 0x26, 0x42, 0x28, + 0x72, 0x29, 0x29, 0x2c, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3e, 0x6e, 0x26, 0x26, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x50, + 0x29, 0x29, 0x3b, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, + 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, + 0x2c, 0x79, 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x3d, 0x30, + 0x2c, 0x53, 0x3d, 0x69, 0x26, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, + 0x7c, 0x54, 0x2c, 0x78, 0x3d, 0x53, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x2c, 0x77, 0x3d, 0x78, 0x2c, 0x43, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, + 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, + 0x43, 0x3b, 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x28, 0x70, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x3d, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x70, 0x3d, 0x6e, 0x5b, 0x63, 0x5d, 0x29, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x3f, 0x6e, 0x75, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x22, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, + 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x61, 0x3f, 0x4d, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, 0x3a, - 0x54, 0x28, 0x61, 0x29, 0x3f, 0x4d, 0x28, 0x4f, 0x2c, 0x7b, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x61, 0x7d, 0x2c, 0x6e, 0x75, + 0x70, 0x3f, 0x4f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, 0x3a, + 0x41, 0x28, 0x70, 0x29, 0x3f, 0x4f, 0x28, 0x52, 0x2c, 0x7b, 0x63, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x70, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x3a, 0x61, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4d, 0x28, - 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x61, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2c, 0x61, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x61, 0x2e, 0x72, - 0x65, 0x66, 0x3f, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x61, 0x29, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x61, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2c, 0x61, - 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x28, 0x68, 0x3d, 0x79, - 0x5b, 0x73, 0x5d, 0x29, 0x7c, 0x7c, 0x68, 0x26, 0x26, 0x61, 0x2e, 0x6b, - 0x65, 0x79, 0x3d, 0x3d, 0x68, 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x61, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x68, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x29, 0x79, 0x5b, 0x73, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x66, 0x6f, 0x72, 0x28, - 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x6d, 0x3b, 0x63, 0x2b, 0x2b, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x28, 0x68, 0x3d, 0x79, 0x5b, 0x63, 0x5d, 0x29, - 0x26, 0x26, 0x61, 0x2e, 0x6b, 0x65, 0x79, 0x3d, 0x3d, 0x68, 0x2e, 0x6b, - 0x65, 0x79, 0x26, 0x26, 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, - 0x3d, 0x68, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x79, 0x5b, 0x63, - 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x7d, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x6e, 0x74, - 0x28, 0x74, 0x2c, 0x61, 0x2c, 0x68, 0x3d, 0x68, 0x7c, 0x7c, 0x50, 0x2c, - 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x2c, 0x66, 0x29, - 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x28, 0x63, 0x3d, - 0x61, 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x68, 0x2e, 0x72, 0x65, - 0x66, 0x21, 0x3d, 0x63, 0x26, 0x26, 0x28, 0x76, 0x7c, 0x7c, 0x28, 0x76, - 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, - 0x76, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, 0x2c, 0x76, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x63, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, - 0x7c, 0x70, 0x2c, 0x61, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x70, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, - 0x26, 0x28, 0x64, 0x3d, 0x70, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x61, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, - 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x6c, 0x3d, 0x47, 0x28, 0x61, 0x2c, - 0x6c, 0x2c, 0x74, 0x29, 0x3a, 0x6c, 0x3d, 0x4a, 0x28, 0x74, 0x2c, 0x61, - 0x2c, 0x68, 0x2c, 0x79, 0x2c, 0x70, 0x2c, 0x6c, 0x29, 0x2c, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x6c, 0x29, 0x29, 0x3a, - 0x6c, 0x26, 0x26, 0x68, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x6c, 0x26, - 0x26, 0x6c, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x28, 0x6c, 0x3d, 0x52, 0x28, 0x68, - 0x29, 0x29, 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x64, 0x2c, 0x73, 0x3d, 0x6d, 0x3b, 0x73, 0x2d, 0x2d, 0x3b, 0x29, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x26, 0x26, - 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x79, 0x5b, - 0x73, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x79, 0x5b, 0x73, 0x5d, - 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x4b, 0x28, 0x69, 0x29, - 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, - 0x29, 0x2c, 0x6f, 0x74, 0x28, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x79, 0x5b, - 0x73, 0x5d, 0x29, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x76, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x5f, 0x74, 0x28, - 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x76, 0x5b, 0x2b, 0x2b, 0x73, 0x5d, 0x2c, - 0x76, 0x5b, 0x2b, 0x2b, 0x73, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, - 0x5f, 0x26, 0x26, 0x6f, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x28, 0x69, 0x3d, 0x5f, 0x5b, 0x6f, - 0x5d, 0x29, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, - 0x6e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x3f, 0x47, 0x28, 0x69, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x3a, 0x4a, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x69, - 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x29, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x7a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, - 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, 0x28, 0x54, 0x28, 0x74, 0x29, - 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x7a, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x64, 0x29, - 0x72, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x64, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x7c, 0x7c, 0x5f, 0x21, 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x5f, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, - 0x64, 0x65, 0x29, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, - 0x61, 0x70, 0x70, 0x65, 0x6e, 0x64, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, - 0x5f, 0x29, 0x2c, 0x72, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x75, 0x3d, 0x6f, 0x2c, 0x6c, - 0x3d, 0x30, 0x3b, 0x28, 0x75, 0x3d, 0x75, 0x2e, 0x6e, 0x65, 0x78, 0x74, - 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x6c, 0x3c, - 0x69, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6c, 0x2b, 0x3d, - 0x31, 0x29, 0x69, 0x66, 0x28, 0x75, 0x3d, 0x3d, 0x5f, 0x29, 0x62, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x74, 0x3b, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, - 0x72, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x5f, 0x2c, 0x6f, - 0x29, 0x2c, 0x72, 0x3d, 0x6f, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, - 0x72, 0x3a, 0x5f, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, - 0x69, 0x6e, 0x67, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4b, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7c, 0x7c, 0x22, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x2d, 0x31, 0x3b, 0x6e, 0x3e, 0x3d, 0x30, 0x3b, 0x6e, 0x2d, 0x2d, - 0x29, 0x69, 0x66, 0x28, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, - 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x28, 0x69, 0x3d, 0x4b, 0x28, 0x65, - 0x29, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x6f, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, - 0x20, 0x65, 0x29, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, - 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, - 0x7c, 0x7c, 0x59, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x5f, 0x26, 0x26, 0x22, + 0x29, 0x3a, 0x70, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, + 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x70, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2c, 0x70, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x70, 0x2e, 0x72, + 0x65, 0x66, 0x3f, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x70, 0x29, 0x26, + 0x26, 0x28, 0x70, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2c, 0x70, 0x2e, 0x5f, + 0x5f, 0x62, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x2d, + 0x31, 0x3d, 0x3d, 0x3d, 0x28, 0x6d, 0x3d, 0x58, 0x28, 0x70, 0x2c, 0x53, + 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x77, 0x29, 0x29, 0x3f, 0x61, + 0x3d, 0x44, 0x3a, 0x28, 0x61, 0x3d, 0x53, 0x5b, 0x6d, 0x5d, 0x7c, 0x7c, + 0x44, 0x2c, 0x53, 0x5b, 0x6d, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x77, 0x2d, 0x2d, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x74, 0x2c, + 0x70, 0x2c, 0x61, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x2c, 0x64, 0x3d, 0x70, 0x2e, 0x5f, + 0x5f, 0x65, 0x2c, 0x28, 0x68, 0x3d, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x29, + 0x26, 0x26, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x21, 0x3d, 0x68, 0x26, 0x26, + 0x28, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x61, + 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, + 0x2c, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x68, 0x2c, 0x70, 0x2e, + 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x64, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x64, 0x26, 0x26, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x76, 0x26, 0x26, 0x28, 0x76, 0x3d, 0x64, 0x29, 0x2c, + 0x62, 0x3d, 0x21, 0x28, 0x67, 0x3d, 0x61, 0x3d, 0x3d, 0x3d, 0x44, 0x7c, + 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, + 0x76, 0x29, 0x26, 0x26, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x2c, 0x67, 0x3f, + 0x2d, 0x31, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x6b, 0x2d, 0x2d, 0x3a, 0x6d, + 0x21, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x28, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, + 0x2b, 0x31, 0x3f, 0x28, 0x6b, 0x2b, 0x2b, 0x2c, 0x62, 0x3d, 0x21, 0x30, + 0x29, 0x3a, 0x6d, 0x3e, 0x79, 0x3f, 0x77, 0x3e, 0x43, 0x2d, 0x79, 0x3f, + 0x28, 0x6b, 0x2b, 0x3d, 0x6d, 0x2d, 0x79, 0x2c, 0x62, 0x3d, 0x21, 0x30, + 0x29, 0x3a, 0x6b, 0x2d, 0x2d, 0x3a, 0x6b, 0x3d, 0x6d, 0x3c, 0x79, 0x26, + 0x26, 0x6d, 0x3d, 0x3d, 0x79, 0x2d, 0x31, 0x3f, 0x6d, 0x2d, 0x79, 0x3a, + 0x30, 0x29, 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x62, 0x3d, 0x62, + 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x63, 0x26, 0x26, 0x21, 0x67, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, - 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, - 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, - 0x6f, 0x7c, 0x7c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, - 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x65, 0x5b, 0x6f, 0x5d, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, 0x59, 0x28, 0x74, 0x2c, - 0x6f, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, - 0x69, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, - 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, - 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, - 0x7c, 0x7c, 0x24, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, - 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, - 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, - 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, - 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x69, 0x3d, 0x22, 0x22, 0x29, - 0x2c, 0x69, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, - 0x69, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, - 0x7c, 0x58, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, - 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x69, 0x26, 0x26, - 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x69, 0x5b, 0x6e, 0x5d, 0x7c, - 0x7c, 0x58, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, - 0x2c, 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x69, 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, - 0x5d, 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, - 0x5d, 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x43, 0x61, - 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x22, 0x29, 0x29, - 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, - 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, - 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, - 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, - 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, - 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, - 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, - 0x65, 0x3f, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, - 0x6e, 0x2c, 0x6f, 0x3f, 0x74, 0x74, 0x3a, 0x5a, 0x2c, 0x6f, 0x29, 0x3a, - 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, - 0x6f, 0x3f, 0x74, 0x74, 0x3a, 0x5a, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, - 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, - 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, - 0x28, 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, - 0x61, 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, - 0x68, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, - 0x72, 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, - 0x69, 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, - 0x6f, 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, - 0x61, 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, - 0x26, 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, - 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, - 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, - 0x6c, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, - 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, - 0x22, 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, - 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x61, 0x2e, 0x5f, + 0x5f, 0x6b, 0x21, 0x3d, 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, - 0x65, 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, - 0x5d, 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, - 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, - 0x2e, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, 0x74, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, - 0x5b, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x28, - 0x53, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x53, 0x2e, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x6c, 0x5b, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, - 0x5d, 0x28, 0x53, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x53, 0x2e, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, - 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x66, - 0x2c, 0x73, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, - 0x2c, 0x76, 0x2c, 0x79, 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, - 0x2c, 0x78, 0x2c, 0x77, 0x2c, 0x43, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x6f, 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x6c, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x68, 0x2c, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x5d, 0x29, 0x2c, 0x28, 0x66, - 0x3d, 0x53, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x66, 0x28, 0x6e, - 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x7c, 0x7c, 0x62, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x28, 0x66, 0x3d, 0x70, 0x2e, + 0x5f, 0x5f, 0x64, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x29, 0x3a, 0x66, 0x3d, 0x64, 0x2e, 0x6e, 0x65, + 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x3a, 0x66, 0x3d, + 0x51, 0x28, 0x74, 0x2c, 0x64, 0x2c, 0x66, 0x29, 0x3a, 0x66, 0x3d, 0x4a, + 0x28, 0x70, 0x2c, 0x66, 0x2c, 0x74, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, + 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x29, 0x29, 0x29, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x76, 0x2c, 0x63, + 0x3d, 0x78, 0x3b, 0x63, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x26, 0x26, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, + 0x5f, 0x65, 0x26, 0x26, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, + 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x65, 0x2e, + 0x5f, 0x5f, 0x64, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, + 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, + 0x29, 0x2c, 0x75, 0x74, 0x28, 0x53, 0x5b, 0x63, 0x5d, 0x2c, 0x53, 0x5b, + 0x63, 0x5d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x5f, 0x26, 0x26, + 0x6f, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, + 0x2b, 0x2b, 0x29, 0x28, 0x69, 0x3d, 0x5f, 0x5b, 0x6f, 0x5d, 0x29, 0x26, + 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x43, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x76, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x79, 0x3d, - 0x28, 0x66, 0x3d, 0x43, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x69, 0x5b, 0x66, 0x2e, 0x5f, - 0x5f, 0x63, 0x5d, 0x2c, 0x6d, 0x3d, 0x66, 0x3f, 0x79, 0x3f, 0x79, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x66, 0x2e, 0x5f, 0x5f, 0x3a, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, - 0x3f, 0x64, 0x3d, 0x28, 0x73, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x73, 0x2e, - 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x43, 0x26, 0x26, 0x43, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x73, - 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x28, 0x76, 0x2c, 0x6d, 0x29, 0x3a, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x73, 0x3d, 0x6e, 0x65, 0x77, - 0x20, 0x4c, 0x28, 0x76, 0x2c, 0x6d, 0x29, 0x2c, 0x73, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x43, 0x2c, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x72, 0x74, 0x29, - 0x2c, 0x79, 0x26, 0x26, 0x79, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x73, 0x29, - 0x2c, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x76, 0x2c, 0x73, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6d, 0x2c, 0x73, 0x2e, 0x5f, - 0x5f, 0x6e, 0x3d, 0x69, 0x2c, 0x63, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x21, 0x30, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, - 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x73, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x26, 0x26, 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x73, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x73, 0x2e, 0x5f, 0x5f, - 0x73, 0x3d, 0x56, 0x28, 0x7b, 0x7d, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, - 0x29, 0x29, 0x2c, 0x56, 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x43, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x3f, 0x4a, 0x28, 0x69, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3a, 0x51, 0x28, + 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x29, 0x29, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, 0x6e, 0x7c, + 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x7c, + 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, 0x28, 0x41, + 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, 0x6e, 0x2e, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x65, 0x2e, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x21, 0x3d, 0x3d, + 0x74, 0x3f, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, 0x65, + 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, + 0x3a, 0x6e, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x6e, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, + 0x65, 0x7c, 0x7c, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, + 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6e, + 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x5f, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, 0x3d, 0x74, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, 0x31, 0x2c, + 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, + 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x66, + 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, + 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x3b, 0x69, + 0x66, 0x28, 0x69, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, + 0x3f, 0x31, 0x3a, 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, + 0x3e, 0x3d, 0x30, 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, + 0x30, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, + 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, + 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, 0x3b, 0x72, 0x2d, + 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, + 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, + 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, + 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, + 0x75, 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, + 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, + 0x69, 0x6e, 0x20, 0x65, 0x29, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, + 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x20, 0x69, 0x6e, + 0x20, 0x6e, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x5f, + 0x26, 0x26, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x5b, 0x6f, + 0x5d, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, + 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x65, 0x64, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x65, 0x5b, + 0x6f, 0x5d, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, 0x74, + 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x2c, 0x65, + 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, + 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, + 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, + 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, + 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x56, 0x2e, 0x74, 0x65, 0x73, 0x74, + 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, + 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, + 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, + 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, + 0x69, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x69, 0x29, 0x66, 0x6f, 0x72, 0x28, + 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x69, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, + 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, + 0x65, 0x29, 0x69, 0x26, 0x26, 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, + 0x69, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, + 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, + 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, + 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, + 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, + 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, + 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, + 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, + 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, + 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, + 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x69, 0x7c, 0x7c, 0x74, 0x2e, + 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, + 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, + 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, + 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, 0x6e, + 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, + 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, + 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, + 0x4c, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x5f, + 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, 0x48, 0x7c, 0x3a, 0x68, + 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, + 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x22, 0x21, 0x3d, 0x3d, + 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, 0x65, 0x66, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, 0x6d, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, 0x62, 0x49, 0x6e, 0x64, + 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x64, 0x6f, + 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, + 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, + 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, 0x53, 0x70, 0x61, 0x6e, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, + 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x3b, + 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x63, 0x61, 0x74, 0x63, + 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x22, 0x2d, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, 0x3f, 0x74, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, + 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x28, 0x77, 0x2e, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x77, 0x2e, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x73, 0x2c, 0x63, + 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, + 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x78, + 0x2c, 0x43, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x68, 0x26, 0x26, 0x28, 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, + 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x5d, 0x29, 0x2c, 0x28, 0x73, 0x3d, 0x77, + 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x73, 0x28, 0x6e, 0x29, 0x3b, + 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x45, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x79, 0x3d, + 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, 0x28, 0x73, + 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, + 0x70, 0x65, 0x29, 0x26, 0x26, 0x69, 0x5b, 0x73, 0x2e, 0x5f, 0x5f, 0x63, + 0x5d, 0x2c, 0x67, 0x3d, 0x73, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x73, 0x2e, + 0x5f, 0x5f, 0x3a, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x3f, 0x76, + 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x65, 0x2e, + 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, + 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, + 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, 0x28, 0x6e, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x49, + 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, 0x63, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, 0x2c, 0x6d, + 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, 0x2c, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x6e, + 0x3d, 0x69, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, + 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x28, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, 0x67, 0x65, + 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, + 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, + 0x2c, 0x46, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, + 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, 0x2c, 0x61, + 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x28, 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, - 0x2c, 0x68, 0x3d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, - 0x3d, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x73, 0x2e, 0x5f, - 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x63, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x43, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, - 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x73, 0x2e, 0x63, + 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x43, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, - 0x26, 0x76, 0x21, 0x3d, 0x3d, 0x68, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x73, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, - 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x76, - 0x2c, 0x6d, 0x29, 0x2c, 0x21, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, - 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, - 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, - 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6d, 0x29, 0x7c, 0x7c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x76, 0x2c, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x73, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x73, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x26, 0x26, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x2c, - 0x67, 0x3d, 0x30, 0x3b, 0x67, 0x3c, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x67, 0x2b, 0x2b, 0x29, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x2e, - 0x5f, 0x73, 0x62, 0x5b, 0x67, 0x5d, 0x29, 0x3b, 0x73, 0x2e, 0x5f, 0x73, - 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x73, 0x29, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, - 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x28, 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, - 0x6d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x73, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x28, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x29, 0x7d, 0x29, 0x29, - 0x7d, 0x69, 0x66, 0x28, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x3d, 0x6d, 0x2c, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, - 0x76, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x74, 0x2c, 0x62, 0x3d, - 0x53, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x6b, 0x3d, 0x30, 0x2c, 0x22, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, - 0x43, 0x26, 0x26, 0x43, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x73, - 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, - 0x31, 0x2c, 0x62, 0x26, 0x26, 0x62, 0x28, 0x6e, 0x29, 0x2c, 0x66, 0x3d, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x2c, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, - 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x2e, - 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x73, 0x2e, 0x5f, 0x73, - 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, 0x6f, - 0x7b, 0x73, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x62, 0x26, - 0x26, 0x62, 0x28, 0x6e, 0x29, 0x2c, 0x66, 0x3d, 0x73, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x73, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x73, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x2b, - 0x2b, 0x6b, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x73, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x56, 0x28, 0x56, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, 0x2c, 0x73, - 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x63, 0x7c, 0x7c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x53, - 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, - 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x70, 0x3d, - 0x73, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, - 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x68, 0x2c, 0x61, 0x29, 0x29, 0x2c, 0x42, 0x28, 0x74, 0x2c, - 0x54, 0x28, 0x77, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, 0x26, - 0x26, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x4f, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, - 0x3f, 0x66, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x66, 0x29, 0x3f, 0x77, 0x3a, 0x5b, - 0x77, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, - 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x6c, 0x29, 0x2c, 0x73, 0x2e, 0x62, 0x61, - 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, + 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, + 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x79, + 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, + 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, 0x2c, 0x67, + 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, + 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x63, + 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, 0x7c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, + 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, + 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, 0x7d, + 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, 0x2e, 0x5f, + 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x62, 0x2b, + 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, 0x3b, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x73, 0x29, 0x2c, 0x64, 0x26, 0x26, 0x28, - 0x73, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, - 0x31, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x69, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6c, - 0x29, 0x3b, 0x28, 0x66, 0x3d, 0x53, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, - 0x64, 0x29, 0x26, 0x26, 0x66, 0x28, 0x6e, 0x29, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x28, 0x6c, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x21, 0x21, - 0x6c, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, - 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, - 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x63, - 0x26, 0x26, 0x53, 0x2e, 0x5f, 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, - 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, - 0x7b, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x63, 0x3d, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x68, 0x3d, 0x6e, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, 0x3d, 0x6e, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x2c, 0x70, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, - 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x28, 0x5f, 0x3d, - 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, - 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x70, 0x3c, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3b, 0x70, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x28, - 0x6c, 0x3d, 0x6f, 0x5b, 0x70, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x73, 0x65, - 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22, 0x69, - 0x6e, 0x20, 0x6c, 0x3d, 0x3d, 0x21, 0x21, 0x61, 0x26, 0x26, 0x28, 0x61, - 0x3f, 0x6c, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x61, 0x6d, 0x65, - 0x3d, 0x3d, 0x3d, 0x61, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, 0x6e, - 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x7b, 0x74, 0x3d, - 0x6c, 0x2c, 0x6f, 0x5b, 0x70, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x3d, 0x61, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, 0x6f, 0x64, 0x65, - 0x28, 0x68, 0x29, 0x3b, 0x74, 0x3d, 0x5f, 0x3f, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, 0x22, 0x68, 0x74, - 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, 0x2e, - 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, 0x73, 0x76, 0x67, - 0x22, 0x2c, 0x61, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x28, 0x61, 0x2c, 0x68, 0x2e, 0x69, 0x73, 0x26, 0x26, - 0x68, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x3d, - 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x3d, 0x61, 0x29, 0x63, 0x3d, 0x3d, 0x3d, 0x68, 0x7c, 0x7c, 0x75, 0x26, - 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, 0x3d, 0x68, 0x7c, - 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x68, 0x29, 0x3b, - 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, 0x3d, 0x6f, 0x26, - 0x26, 0x6b, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x2c, 0x66, 0x3d, - 0x28, 0x63, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, 0x7c, - 0x50, 0x29, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, - 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, - 0x4d, 0x4c, 0x2c, 0x73, 0x3d, 0x68, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, - 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, - 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x2c, 0x21, 0x75, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x63, 0x3d, 0x7b, 0x7d, 0x2c, 0x70, 0x3d, 0x30, 0x3b, 0x70, - 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x70, 0x2b, 0x2b, - 0x29, 0x63, 0x5b, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x73, 0x5b, 0x70, 0x5d, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, - 0x3d, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x73, 0x5b, 0x70, 0x5d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x28, - 0x73, 0x7c, 0x7c, 0x66, 0x29, 0x26, 0x26, 0x28, 0x73, 0x26, 0x26, 0x28, - 0x66, 0x26, 0x26, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, - 0x3d, 0x66, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, - 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, - 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, - 0x3d, 0x73, 0x26, 0x26, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, - 0x7c, 0x7c, 0x22, 0x22, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x51, 0x28, - 0x74, 0x2c, 0x68, 0x2c, 0x63, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, 0x73, - 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x42, 0x28, 0x74, 0x2c, 0x54, 0x28, - 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x29, 0x3f, 0x70, 0x3a, 0x5b, 0x70, - 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x26, 0x26, 0x22, - 0x66, 0x6f, 0x72, 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x61, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, - 0x3f, 0x6f, 0x5b, 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, - 0x26, 0x52, 0x28, 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x70, - 0x3d, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x70, 0x2d, - 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x70, - 0x5d, 0x26, 0x26, 0x41, 0x28, 0x6f, 0x5b, 0x70, 0x5d, 0x29, 0x3b, 0x75, - 0x7c, 0x7c, 0x28, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x69, 0x6e, - 0x20, 0x68, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x28, 0x70, 0x3d, 0x68, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, - 0x26, 0x26, 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7c, 0x7c, 0x22, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, - 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x61, - 0x26, 0x26, 0x70, 0x21, 0x3d, 0x3d, 0x63, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x26, 0x26, 0x59, 0x28, 0x74, 0x2c, 0x22, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x22, 0x2c, 0x70, 0x2c, 0x63, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x22, 0x69, 0x6e, 0x20, 0x68, 0x26, 0x26, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x70, 0x3d, 0x68, 0x2e, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x29, 0x26, 0x26, 0x70, 0x21, 0x3d, - 0x3d, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x26, 0x26, - 0x59, 0x28, 0x74, 0x2c, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x22, 0x2c, 0x70, 0x2c, 0x63, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, - 0x64, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, - 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, - 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, - 0x29, 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, - 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x69, 0x2c, 0x5f, 0x3b, 0x69, 0x66, 0x28, 0x53, 0x2e, 0x75, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x53, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x69, 0x3d, 0x74, - 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x69, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x7c, 0x7c, 0x5f, 0x74, 0x28, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, - 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, - 0x69, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x50, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x69, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x5f, 0x3d, - 0x30, 0x3b, 0x5f, 0x3c, 0x69, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x69, 0x5b, 0x5f, 0x5d, 0x26, 0x26, 0x6f, - 0x74, 0x28, 0x69, 0x5b, 0x5f, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x41, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, - 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3b, 0x53, 0x2e, 0x5f, - 0x5f, 0x26, 0x26, 0x53, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x2c, 0x5f, 0x3d, 0x28, 0x69, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, - 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x74, 0x28, 0x6e, 0x2c, 0x74, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, 0x65, 0x61, + 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, + 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x29, + 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x74, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x3d, + 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, 0x22, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, + 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, + 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, + 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, + 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, 0x63, + 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, + 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x63, 0x2e, 0x5f, 0x73, + 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, 0x6f, + 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, + 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, 0x63, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, 0x68, + 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x2b, + 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, 0x69, + 0x3d, 0x46, 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, 0x2c, 0x63, + 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, 0x7c, 0x7c, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, + 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, + 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x64, 0x3d, + 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, + 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x7a, 0x28, 0x74, 0x2c, + 0x41, 0x28, 0x43, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x26, + 0x26, 0x73, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x52, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x6b, 0x65, 0x79, + 0x3f, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x73, 0x29, 0x3f, 0x43, 0x3a, 0x5b, + 0x43, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, + 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x2c, 0x63, 0x2e, + 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x2c, 0x76, 0x26, + 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, + 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x3b, 0x28, 0x73, 0x3d, + 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x73, + 0x28, 0x6e, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, + 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x28, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, + 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x75, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x21, 0x21, 0x66, 0x2c, 0x6f, 0x5b, 0x6f, + 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, + 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, + 0x69, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x69, 0x5d, 0x2c, 0x65, 0x5b, + 0x2b, 0x2b, 0x69, 0x5d, 0x29, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x26, + 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, + 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, + 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, + 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, + 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x6f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x73, 0x2c, 0x63, 0x2c, 0x68, + 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, 0x3d, 0x6e, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x6e, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2c, 0x64, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x5f, + 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, + 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x64, 0x3c, 0x6f, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, + 0x28, 0x6c, 0x3d, 0x6f, 0x5b, 0x64, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x73, + 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22, + 0x69, 0x6e, 0x20, 0x6c, 0x3d, 0x3d, 0x21, 0x21, 0x70, 0x26, 0x26, 0x28, + 0x70, 0x3f, 0x6c, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x61, 0x6d, + 0x65, 0x3d, 0x3d, 0x3d, 0x70, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, + 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x7b, 0x74, + 0x3d, 0x6c, 0x2c, 0x6f, 0x5b, 0x64, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x70, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, 0x6f, 0x64, + 0x65, 0x28, 0x61, 0x29, 0x3b, 0x74, 0x3d, 0x5f, 0x3f, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, 0x22, 0x68, + 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, + 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, 0x73, 0x76, + 0x67, 0x22, 0x2c, 0x70, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x70, 0x2c, 0x61, 0x2e, 0x69, 0x73, 0x26, + 0x26, 0x61, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, + 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x3d, 0x70, 0x29, 0x68, 0x3d, 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x75, + 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, 0x3d, 0x61, + 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x61, 0x29, + 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, 0x3d, 0x6f, + 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x2c, 0x73, + 0x3d, 0x28, 0x68, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, + 0x7c, 0x44, 0x29, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, + 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x2c, 0x63, 0x3d, 0x61, 0x2e, 0x64, 0x61, 0x6e, 0x67, + 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, + 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x2c, 0x21, 0x75, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, + 0x6f, 0x72, 0x28, 0x68, 0x3d, 0x7b, 0x7d, 0x2c, 0x64, 0x3d, 0x30, 0x3b, + 0x64, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, + 0x2b, 0x29, 0x68, 0x5b, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, + 0x75, 0x74, 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x5d, 0x3d, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x28, 0x63, 0x7c, 0x7c, 0x73, 0x29, 0x26, 0x26, 0x28, 0x63, 0x26, 0x26, + 0x28, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, + 0x3d, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, + 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, + 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, + 0x7c, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, + 0x4c, 0x3d, 0x63, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, + 0x6c, 0x7c, 0x7c, 0x22, 0x22, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x59, + 0x28, 0x74, 0x2c, 0x61, 0x2c, 0x68, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, + 0x63, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x3b, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x7a, 0x28, 0x74, 0x2c, 0x41, + 0x28, 0x64, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x29, 0x3f, 0x64, 0x3a, 0x5b, + 0x64, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x26, 0x26, + 0x22, 0x66, 0x6f, 0x72, 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x70, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, + 0x6f, 0x3f, 0x6f, 0x5b, 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, + 0x26, 0x26, 0x6a, 0x28, 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, + 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, + 0x72, 0x28, 0x64, 0x3d, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x64, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x6f, 0x5b, 0x64, 0x5d, 0x26, 0x26, 0x4d, 0x28, 0x6f, 0x5b, 0x64, 0x5d, + 0x29, 0x3b, 0x75, 0x7c, 0x7c, 0x28, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, 0x3d, 0x61, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x26, 0x26, 0x28, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7c, 0x7c, 0x22, 0x70, 0x72, 0x6f, 0x67, + 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x21, + 0x64, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, + 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x68, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, + 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x22, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, + 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, + 0x3d, 0x61, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x29, 0x26, + 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x22, 0x63, 0x68, + 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3b, 0x69, 0x66, + 0x28, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, + 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, + 0x2c, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, + 0x28, 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, + 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x69, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, + 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, + 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x69, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, + 0x69, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x69, 0x66, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, + 0x6f, 0x72, 0x28, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x69, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x69, 0x5b, + 0x5f, 0x5d, 0x26, 0x26, 0x75, 0x74, 0x28, 0x69, 0x5b, 0x5f, 0x5d, 0x2c, + 0x6e, 0x2c, 0x65, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, + 0x4d, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x6f, 0x72, 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, + 0x6f, 0x2c, 0x72, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x77, 0x2e, + 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x2c, 0x5f, 0x3d, 0x28, 0x69, + 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, + 0x75, 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, + 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, + 0x2c, 0x72, 0x3d, 0x5b, 0x5d, 0x2c, 0x69, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x69, 0x26, 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x46, 0x28, 0x4f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x5f, 0x7c, 0x7c, 0x50, 0x2c, 0x50, + 0x5f, 0x5f, 0x6b, 0x3d, 0x57, 0x28, 0x52, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x5f, 0x7c, 0x7c, 0x44, 0x2c, 0x44, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x5f, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, - 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x6b, 0x2e, + 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x5f, 0x3f, 0x5f, 0x2e, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, - 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x69, 0x29, 0x2c, 0x65, 0x74, - 0x28, 0x6f, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x75, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x56, 0x28, 0x7b, 0x7d, 0x2c, - 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, - 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, - 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, - 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, - 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, - 0x6b, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4d, - 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x69, 0x7c, - 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x5f, 0x7c, 0x7c, 0x74, 0x2e, - 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, + 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x69, 0x2c, 0x72, 0x29, 0x2c, + 0x5f, 0x74, 0x28, 0x6f, 0x2c, 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, - 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x4e, - 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, - 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x69, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, - 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, - 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x69, 0x3d, - 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, - 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, - 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x6a, 0x28, 0x74, 0x29, 0x7d, 0x29, - 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, - 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, - 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, - 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, - 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x6b, 0x3d, 0x44, 0x2e, 0x73, 0x6c, 0x69, - 0x63, 0x65, 0x2c, 0x53, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x5f, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x63, 0x29, 0x26, 0x26, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, - 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x5f, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, - 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, - 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x5f, - 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, + 0x6e, 0x29, 0x7b, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x73, 0x74, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x46, + 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, + 0x73, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, + 0x73, 0x29, 0x2c, 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, + 0x6f, 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, + 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, + 0x3a, 0x75, 0x5b, 0x6f, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, + 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, + 0x65, 0x29, 0x2c, 0x4f, 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, + 0x75, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x5f, + 0x7c, 0x7c, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, + 0x3d, 0x7b, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, + 0x43, 0x22, 0x2b, 0x24, 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, + 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, + 0x64, 0x72, 0x65, 0x6e, 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x64, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, + 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, + 0x2c, 0x28, 0x69, 0x3d, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, + 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x71, 0x28, + 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x75, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, + 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, + 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, + 0x65, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, + 0x74, 0x29, 0x2c, 0x31, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, + 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x64, 0x65, 0x72, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, + 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x54, + 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x2c, 0x77, 0x3d, 0x7b, 0x5f, 0x5f, + 0x65, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x5f, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x26, 0x26, 0x21, 0x5f, 0x2e, 0x5f, + 0x5f, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, + 0x5f, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x5f, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, - 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, - 0x68, 0x28, 0x74, 0x2c, 0x69, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, - 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x5f, - 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, - 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, - 0x78, 0x3d, 0x30, 0x2c, 0x77, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x4c, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, - 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, - 0x3d, 0x56, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, 0x28, 0x56, 0x28, 0x7b, - 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, 0x26, 0x56, 0x28, 0x65, - 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, - 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, 0x6a, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x4c, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, - 0x2c, 0x6a, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x4c, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, - 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x4f, 0x2c, 0x43, 0x3d, 0x5b, 0x5d, - 0x2c, 0x55, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x2c, 0x48, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, - 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, - 0x71, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, 0x4e, 0x3d, 0x30, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x63, 0x74, 0x2c, 0x68, 0x74, 0x2c, 0x61, 0x74, - 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x3d, 0x30, 0x2c, 0x76, 0x74, 0x3d, - 0x5b, 0x5d, 0x2c, 0x79, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x6d, 0x74, 0x3d, - 0x53, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x67, 0x74, 0x3d, 0x53, 0x2e, 0x5f, - 0x5f, 0x72, 0x2c, 0x62, 0x74, 0x3d, 0x53, 0x2e, 0x64, 0x69, 0x66, 0x66, - 0x65, 0x64, 0x2c, 0x6b, 0x74, 0x3d, 0x53, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, - 0x53, 0x74, 0x3d, 0x53, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x78, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x2e, 0x5f, 0x5f, 0x68, 0x26, - 0x26, 0x53, 0x2e, 0x5f, 0x5f, 0x68, 0x28, 0x68, 0x74, 0x2c, 0x74, 0x2c, - 0x64, 0x74, 0x7c, 0x7c, 0x6e, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x30, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x48, - 0x7c, 0x7c, 0x28, 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, - 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, - 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, - 0x5f, 0x5f, 0x56, 0x3a, 0x79, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, - 0x5f, 0x5b, 0x74, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x77, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x64, 0x74, 0x3d, 0x31, 0x2c, 0x43, 0x74, 0x28, 0x49, - 0x74, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, 0x2b, - 0x2b, 0x2c, 0x32, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x2e, 0x74, 0x3d, - 0x74, 0x2c, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x69, - 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, - 0x49, 0x74, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, - 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x69, 0x2e, - 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x2c, 0x65, 0x3d, 0x69, 0x2e, 0x74, 0x28, - 0x6e, 0x2c, 0x74, 0x29, 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, - 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x69, 0x2e, - 0x5f, 0x5f, 0x5b, 0x31, 0x5d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x63, - 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, - 0x29, 0x29, 0x7d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x68, - 0x74, 0x2c, 0x21, 0x68, 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, - 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x5f, - 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, - 0x5f, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, + 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x28, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, + 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x5f, + 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x5f, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x43, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x2c, 0x69, 0x7c, 0x7c, 0x7b, + 0x7d, 0x29, 0x2c, 0x72, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, + 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x5f, + 0x5f, 0x45, 0x3d, 0x5f, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, + 0x29, 0x7b, 0x74, 0x3d, 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x74, 0x7d, 0x7d, 0x2c, 0x43, 0x3d, 0x30, 0x2c, 0x45, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, - 0x29, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, + 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, + 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, + 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, + 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, + 0x26, 0x46, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, + 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, + 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, + 0x68, 0x28, 0x74, 0x29, 0x2c, 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x52, 0x2c, + 0x55, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, + 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, + 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, 0x50, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, + 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, + 0x5f, 0x62, 0x7d, 0x2c, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, + 0x24, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x61, 0x74, 0x2c, 0x70, + 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, 0x79, 0x74, 0x3d, 0x30, + 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, 0x74, 0x3d, 0x5b, 0x5d, + 0x2c, 0x62, 0x74, 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x6b, 0x74, + 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x74, 0x3d, 0x77, 0x2e, + 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x78, 0x74, 0x3d, 0x77, 0x2e, + 0x5f, 0x5f, 0x63, 0x2c, 0x77, 0x74, 0x3d, 0x77, 0x2e, 0x75, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, + 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x68, 0x28, 0x70, + 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, 0x6e, 0x29, 0x2c, 0x79, + 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x70, 0x74, + 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, 0x5f, 0x5f, 0x68, + 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, 0x67, 0x74, 0x7d, 0x29, + 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x31, 0x2c, + 0x55, 0x74, 0x28, 0x42, 0x74, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x69, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, + 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, 0x65, 0x3f, 0x65, + 0x28, 0x6e, 0x29, 0x3a, 0x42, 0x74, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x69, + 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x5b, 0x30, + 0x5d, 0x3a, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x2c, 0x65, 0x3d, + 0x69, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x3b, 0x6e, 0x21, 0x3d, + 0x3d, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x5b, + 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, 0x5d, 0x5d, 0x2c, 0x69, + 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, + 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, 0x74, 0x2e, 0x75, 0x29, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, + 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, + 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x7d, 0x29, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x72, 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x65, + 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, 0x29, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, + 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x21, 0x31, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x21, 0x30, + 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, 0x21, 0x72, 0x26, 0x26, + 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, + 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, 0x70, 0x74, 0x2e, 0x75, + 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x70, 0x74, + 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x2c, 0x72, + 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3b, + 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, + 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x6f, 0x3b, + 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, 0x69, 0x7d, 0x72, 0x26, + 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x2c, 0x70, 0x74, 0x2e, + 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x5f, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, + 0x7c, 0x7c, 0x69, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, + 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x77, 0x2e, 0x5f, 0x5f, 0x73, 0x26, + 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, + 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, + 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4e, 0x74, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, + 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, 0x29, 0x3b, 0x21, 0x77, + 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, + 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, + 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, + 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x4e, 0x74, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, + 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, + 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, + 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x44, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, + 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, + 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, + 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, + 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, + 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x38, 0x2c, + 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, + 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, 0x65, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, + 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, + 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, + 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, + 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x26, 0x26, 0x77, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, + 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, + 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, + 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x70, + 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x70, 0x74, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, + 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, + 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x69, 0x29, + 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, + 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, + 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, + 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, + 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, + 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, + 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x74, 0x28, + 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, + 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, + 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, + 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, + 0x68, 0x28, 0x49, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, + 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x75, + 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x77, 0x2e, 0x5f, + 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x62, + 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x77, 0x2e, + 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, 0x6b, 0x74, 0x28, 0x74, + 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, + 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, + 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, 0x64, 0x74, 0x3d, 0x3d, + 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, + 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, + 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, - 0x5d, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, - 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, - 0x5d, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, - 0x29, 0x2c, 0x21, 0x28, 0x21, 0x72, 0x26, 0x26, 0x69, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, - 0x26, 0x26, 0x28, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, - 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x68, 0x74, 0x2e, 0x73, 0x68, 0x6f, - 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x68, 0x74, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3b, 0x68, 0x74, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x2c, 0x6f, 0x3d, 0x69, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7d, 0x2c, 0x68, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, - 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x5f, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x69, 0x2e, - 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x45, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, - 0x3b, 0x21, 0x53, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x52, 0x74, 0x28, - 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, - 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, - 0x2b, 0x2b, 0x2c, 0x34, 0x29, 0x3b, 0x21, 0x53, 0x2e, 0x5f, 0x5f, 0x73, - 0x26, 0x26, 0x52, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, - 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, - 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x68, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x74, 0x3d, 0x35, 0x2c, - 0x50, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, - 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x4e, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x64, 0x74, - 0x3d, 0x36, 0x2c, 0x55, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, - 0x6e, 0x28, 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, - 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, - 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, - 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, - 0x6e, 0x63, 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, - 0x74, 0x2b, 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x52, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, - 0x29, 0x3f, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, - 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, - 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, - 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x64, 0x74, 0x3d, 0x38, 0x2c, 0x50, 0x74, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x5d, 0x2c, 0x65, 0x3d, 0x78, 0x74, 0x28, 0x63, 0x74, 0x2b, - 0x2b, 0x2c, 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x65, 0x2e, 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, - 0x68, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x2e, 0x75, 0x73, 0x65, 0x44, - 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x53, - 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x78, 0x74, - 0x28, 0x63, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, - 0x77, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x68, 0x74, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, - 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x68, 0x74, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, - 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, - 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x65, 0x5b, 0x31, - 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, - 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x41, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x78, - 0x74, 0x28, 0x63, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, - 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, 0x2e, 0x5f, 0x5f, - 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, - 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, - 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, - 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x76, 0x74, - 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x4f, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, - 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x4c, 0x74, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, - 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x53, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x68, - 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6d, 0x74, 0x26, 0x26, 0x6d, - 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x67, 0x74, 0x26, 0x26, 0x67, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x63, 0x74, - 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x68, 0x74, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, - 0x6e, 0x26, 0x26, 0x28, 0x61, 0x74, 0x3d, 0x3d, 0x3d, 0x68, 0x74, 0x3f, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x68, 0x74, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, - 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, - 0x79, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, - 0x63, 0x68, 0x28, 0x4f, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x4c, 0x74, 0x29, - 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x74, - 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x68, 0x74, 0x7d, 0x2c, - 0x53, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x62, 0x74, 0x26, - 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, - 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, - 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x76, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x6e, 0x29, 0x26, 0x26, 0x70, 0x74, 0x3d, 0x3d, 0x3d, 0x53, 0x2e, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, - 0x28, 0x70, 0x74, 0x3d, 0x53, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, - 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, 0x57, 0x74, 0x29, 0x28, 0x46, 0x74, - 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x79, 0x74, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x79, 0x74, 0x7d, 0x29, - 0x29, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x68, 0x74, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, - 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, + 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, + 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, + 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x4f, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, + 0x28, 0x49, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, + 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, + 0x70, 0x74, 0x7d, 0x2c, 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, + 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x6d, 0x74, 0x2e, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, 0x26, 0x76, 0x74, 0x3d, + 0x3d, 0x3d, 0x77, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, + 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, + 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, 0x77, 0x2e, 0x72, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, 0x4c, 0x74, + 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, + 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, - 0x7c, 0x7c, 0x4c, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x73, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, - 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, - 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, - 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, 0x2c, 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x73, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, - 0x2c, 0x6b, 0x74, 0x26, 0x26, 0x6b, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7d, 0x2c, 0x53, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, - 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, - 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x4f, 0x74, 0x28, - 0x74, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x6e, 0x3d, 0x74, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, - 0x48, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, - 0x53, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4d, 0x74, 0x3d, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, - 0x72, 0x61, 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x57, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x2c, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x28, 0x69, 0x29, 0x2c, 0x4d, 0x74, 0x26, 0x26, 0x63, - 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, - 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, - 0x7d, 0x2c, 0x69, 0x3d, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, - 0x75, 0x74, 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4d, 0x74, - 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, - 0x6d, 0x65, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, 0x68, 0x74, 0x3d, 0x6e, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x68, 0x74, - 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, - 0x29, 0x2c, 0x68, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, - 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, - 0x3a, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6a, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x53, 0x5b, 0x74, 0x5d, - 0x3d, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x53, 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, - 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x71, 0x74, 0x2c, - 0x42, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x47, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x42, 0x74, 0x29, - 0x42, 0x74, 0x28, 0x29, 0x3b, 0x42, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, - 0x2e, 0x53, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x7a, 0x74, 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, - 0x7d, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4b, - 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x74, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x50, - 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, - 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, - 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x24, 0x75, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x28, 0x28, 0x29, 0x3d, 0x3e, + 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x48, + 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x21, + 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, + 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x63, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, + 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x69, 0x6c, + 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7d, + 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x73, 0x29, 0x7b, + 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, + 0x5f, 0x5f, 0x65, 0x28, 0x73, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, + 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x78, 0x74, 0x26, 0x26, 0x78, 0x74, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x77, 0x74, 0x26, 0x26, 0x77, 0x74, 0x28, 0x74, + 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x48, + 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, + 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, + 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, 0x7d, 0x29, 0x29, 0x2c, + 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x6e, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, + 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, + 0x20, 0x4f, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, + 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x69, 0x29, 0x2c, 0x4f, + 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, + 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, + 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x69, 0x3d, 0x73, 0x65, 0x74, 0x54, + 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, + 0x29, 0x3b, 0x4f, 0x74, 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x74, 0x28, 0x74, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2c, 0x65, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, + 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, + 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, + 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, + 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, + 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x42, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, + 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x77, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x77, 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, + 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, 0x3b, 0x7a, 0x74, 0x3d, + 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, 0x28, 0x7b, 0x64, 0x61, + 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, + 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, + 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, + 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, + 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x21, + 0x45, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x29, 0x26, + 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, + 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3a, 0x74, 0x2e, 0x6e, + 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, + 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x3b, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, + 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x7d, 0x3b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x7a, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x28, 0x66, + 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x7a, + 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, @@ -1560,7 +1583,7 @@ unsigned char index_js[] = { 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, 0x7d, 0x7d, 0x29, 0x3b, - 0x6a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, + 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, @@ -1570,34 +1593,34 @@ unsigned char index_js[] = { 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x65, 0x5b, 0x69, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x20, - 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x66, + 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x63, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, 0x74, 0x5b, 0x69, 0x5d, 0x3d, 0x5f, 0x3b, 0x65, 0x5b, 0x69, 0x5d, 0x3d, 0x5f, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x6a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x47, 0x74, 0x28, 0x29, 0x3b, + 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x29, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3b, 0x62, 0x28, 0x28, + 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3b, 0x53, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, 0x71, 0x74, 0x3d, 0x69, - 0x3b, 0x47, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x6a, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x47, - 0x74, 0x28, 0x29, 0x3b, 0x71, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, 0x47, 0x74, 0x3d, 0x69, + 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, + 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, + 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7d, 0x29, - 0x3b, 0x6a, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x47, 0x74, 0x28, - 0x29, 0x3b, 0x71, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, + 0x3b, 0x71, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, + 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, + 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x65, @@ -1617,19 +1640,19 @@ unsigned char index_js[] = { 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, 0x29, 0x7b, 0x6f, 0x3d, - 0x4a, 0x74, 0x28, 0x65, 0x2c, 0x5f, 0x2c, 0x72, 0x2c, 0x69, 0x29, 0x3b, + 0x51, 0x74, 0x28, 0x65, 0x2c, 0x5f, 0x2c, 0x72, 0x2c, 0x69, 0x29, 0x3b, 0x6e, 0x5b, 0x5f, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x69, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6f, 0x3d, - 0x73, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, + 0x68, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, - 0x2c, 0x64, 0x3a, 0x62, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, + 0x2c, 0x64, 0x3a, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x69, 0x5b, 0x6e, @@ -1639,7 +1662,7 @@ unsigned char index_js[] = { 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x7d, - 0x7d, 0x29, 0x7d, 0x7d, 0x6a, 0x74, 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, + 0x7d, 0x29, 0x7d, 0x7d, 0x71, 0x74, 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, @@ -1656,196 +1679,198 @@ unsigned char index_js[] = { 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, 0x2e, 0x64, 0x28, 0x29, - 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x6a, 0x74, + 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x3c, - 0x33, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, - 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7d, 0x29, 0x3b, 0x4c, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, - 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, - 0x73, 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, - 0x5b, 0x69, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x69, 0x20, - 0x69, 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x50, - 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x73, 0x28, 0x74, 0x29, 0x2c, 0x5b, + 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x6e, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, + 0x2c, 0x69, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, + 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, + 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, 0x7c, 0x7c, 0x34, 0x26, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x33, + 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x69, + 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, + 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x5b, 0x69, 0x5d, 0x21, 0x3d, + 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, + 0x69, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x69, 0x66, 0x28, 0x21, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x74, 0x28, 0x74, 0x29, + 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x50, 0x74, 0x28, + 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, + 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x51, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x48, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x71, 0x74, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x50, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x64, 0x28, 0x28, - 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x48, 0x74, 0x28, 0x74, 0x29, - 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, - 0x3b, 0x45, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x62, 0x28, 0x28, 0x29, - 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, - 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x59, - 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, - 0x5d, 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, - 0x30, 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, - 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, - 0x2b, 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, - 0x30, 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, - 0x5b, 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, - 0x73, 0x73, 0x69, 0x67, 0x6e, 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, - 0x7b, 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, - 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x3d, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, - 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, - 0x75, 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, - 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, - 0x22, 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x5f, 0x3d, 0x74, 0x2e, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x59, 0x74, 0x28, 0x74, 0x2c, 0x75, - 0x2c, 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, - 0x29, 0x29, 0x2c, 0x69, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x5f, 0x29, - 0x2c, 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, - 0x32, 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, 0x5f, 0x29, 0x29, 0x3a, 0x69, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x5a, 0x74, 0x3d, 0x6e, 0x65, 0x77, 0x20, - 0x4d, 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x74, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x5a, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, - 0x28, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x5a, - 0x74, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, - 0x29, 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x59, 0x74, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, - 0x28, 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x69, - 0x3d, 0x31, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, - 0x2c, 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, - 0x3d, 0x69, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x5f, 0x3d, 0x5f, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, - 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, - 0x6e, 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, - 0x29, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, - 0x2c, 0x5f, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, - 0x74, 0x7c, 0x7c, 0x5f, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x69, 0x3d, 0x32, - 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x2e, 0x2e, - 0x2e, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, - 0x32, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x5f, 0x26, 0x26, 0x21, 0x74, - 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, - 0x21, 0x30, 0x2c, 0x5f, 0x29, 0x3a, 0x69, 0x3e, 0x3d, 0x35, 0x26, 0x26, - 0x28, 0x28, 0x5f, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, - 0x3d, 0x69, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x69, 0x2c, 0x30, 0x2c, 0x5f, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, - 0x36, 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x69, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x69, - 0x3d, 0x36, 0x29, 0x29, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x6c, - 0x3d, 0x30, 0x3b, 0x6c, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x7b, 0x6c, 0x26, 0x26, 0x28, 0x31, - 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, - 0x6c, 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x66, 0x3d, 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x5b, 0x6c, 0x5d, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, - 0x74, 0x5b, 0x6c, 0x5d, 0x5b, 0x66, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, - 0x69, 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, - 0x28, 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x69, 0x3d, 0x33, - 0x29, 0x3a, 0x5f, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x69, - 0x3f, 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, - 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x31, 0x2c, - 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x5f, 0x3d, 0x6e, 0x2b, 0x5f, 0x5b, - 0x30, 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, - 0x3d, 0x22, 0x22, 0x3a, 0x5f, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x31, 0x29, 0x3a, - 0x69, 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, - 0x28, 0x69, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x5f, 0x2c, 0x5f, 0x3d, 0x22, - 0x22, 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, - 0x28, 0x69, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, - 0x74, 0x5b, 0x6c, 0x5d, 0x5b, 0x66, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, - 0x75, 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x69, 0x3d, 0x72, 0x2c, - 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x69, 0x29, 0x2c, 0x69, 0x3d, 0x30, - 0x29, 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, - 0x5c, 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, - 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x32, - 0x29, 0x3a, 0x5f, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, - 0x69, 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, - 0x26, 0x26, 0x28, 0x69, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, - 0x5d, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, - 0x29, 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, - 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, - 0x29, 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, - 0x6e, 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x6e, 0x3d, 0x74, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x46, 0x29, - 0x3b, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x4c, 0x20, 0x61, 0x73, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x4f, - 0x20, 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, - 0x2c, 0x66, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x2c, 0x65, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, - 0x66, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x64, 0x20, 0x61, 0x73, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x73, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x2c, 0x46, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, - 0x57, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x66, 0x2c, 0x62, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, - 0x63, 0x74, 0x2c, 0x46, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x6e, 0x6e, - 0x20, 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x77, - 0x20, 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x53, 0x20, 0x61, 0x73, 0x20, - 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x75, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x73, 0x20, 0x61, - 0x73, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x7a, 0x20, 0x61, - 0x73, 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x51, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, - 0x65, 0x64, 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x54, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, - 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x56, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x6f, - 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x55, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, 0x75, 0x74, - 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x50, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x43, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, 0x63, 0x65, - 0x72, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, - 0x65, 0x66, 0x2c, 0x4b, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x45, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x2c, 0x77, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a + 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x6e, 0x3d, 0x50, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x48, 0x74, 0x28, 0x28, + 0x29, 0x3d, 0x3e, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, + 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, 0x3d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3b, 0x6e, 0x5b, 0x30, + 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x2c, 0x75, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x72, + 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, + 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x3b, 0x33, + 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x30, 0x5d, 0x3d, 0x75, 0x3a, + 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x3d, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, + 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x2c, 0x75, 0x29, + 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, 0x69, 0x5b, 0x31, 0x5d, + 0x3d, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, + 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, 0x3a, 0x36, 0x3d, 0x3d, + 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, + 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, 0x22, 0x3a, 0x72, 0x3f, + 0x28, 0x5f, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x75, + 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, 0x65, 0x2c, 0x5b, 0x22, + 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, 0x29, 0x2c, 0x69, 0x2e, + 0x70, 0x75, 0x73, 0x68, 0x28, 0x5f, 0x29, 0x2c, 0x75, 0x5b, 0x30, 0x5d, + 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, 0x3a, 0x28, 0x6e, 0x5b, + 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, + 0x5f, 0x29, 0x29, 0x3a, 0x69, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x75, + 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, + 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x6e, 0x6e, 0x2e, 0x67, + 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, 0x6e, 0x3d, 0x6e, 0x65, + 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, 0x2e, 0x73, 0x65, 0x74, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x28, 0x6e, + 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x2e, 0x67, + 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x73, 0x65, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, + 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, + 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, 0x72, 0x3d, 0x5b, 0x30, + 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, + 0x74, 0x7c, 0x7c, 0x28, 0x5f, 0x3d, 0x5f, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, + 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x24, + 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, 0x3f, 0x72, 0x2e, 0x70, + 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, 0x5f, 0x29, 0x3a, 0x33, + 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x5f, 0x29, + 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x33, 0x2c, 0x74, + 0x2c, 0x5f, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x32, 0x3d, 0x3d, + 0x3d, 0x69, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, 0x22, 0x3d, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x69, + 0x26, 0x26, 0x5f, 0x26, 0x26, 0x21, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, 0x30, 0x2c, 0x5f, 0x29, + 0x3a, 0x69, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, 0x28, 0x5f, 0x7c, 0x7c, + 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x26, 0x26, + 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x30, 0x2c, + 0x5f, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x2c, 0x74, 0x26, + 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x74, + 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x29, 0x2c, + 0x5f, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, 0x30, 0x3b, 0x66, 0x3c, + 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x2b, 0x2b, + 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, + 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, 0x29, 0x29, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x3d, 0x30, 0x3b, 0x6c, + 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, + 0x6c, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x3c, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x3d, + 0x5b, 0x72, 0x5d, 0x2c, 0x69, 0x3d, 0x33, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, + 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x2d, 0x2d, 0x22, + 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, + 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, + 0x3a, 0x5f, 0x3d, 0x6e, 0x2b, 0x5f, 0x5b, 0x30, 0x5d, 0x3a, 0x6f, 0x3f, + 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, 0x22, 0x22, 0x3a, 0x5f, + 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, + 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, + 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, + 0x29, 0x2c, 0x69, 0x3d, 0x31, 0x29, 0x3a, 0x69, 0x26, 0x26, 0x28, 0x22, + 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x35, 0x2c, + 0x65, 0x3d, 0x5f, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x22, 0x2f, + 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x69, 0x3c, 0x35, 0x7c, + 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, + 0x6c, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x33, + 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, + 0x5d, 0x29, 0x2c, 0x69, 0x3d, 0x72, 0x2c, 0x28, 0x72, 0x3d, 0x72, 0x5b, + 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x32, 0x2c, 0x30, + 0x2c, 0x69, 0x29, 0x2c, 0x69, 0x3d, 0x30, 0x29, 0x3a, 0x22, 0x20, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x74, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, + 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, + 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x21, + 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x69, 0x3d, + 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x7d, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x7d, 0x28, + 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, 0x29, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, 0x3a, 0x6e, 0x5b, 0x30, + 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, 0x3d, 0x65, 0x6e, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x57, 0x29, 0x3b, 0x65, 0x78, 0x70, 0x6f, + 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x52, 0x20, 0x61, 0x73, 0x20, 0x46, + 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x63, 0x20, 0x61, 0x73, + 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x65, 0x20, 0x61, 0x73, + 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x79, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, + 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, + 0x57, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, + 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x53, 0x20, + 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x57, 0x20, + 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, 0x61, 0x73, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, 0x20, 0x68, 0x79, + 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x45, 0x20, 0x61, 0x73, 0x20, 0x69, + 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x2c, 0x6c, 0x74, 0x20, 0x61, 0x73, 0x20, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x2c, 0x68, 0x20, 0x61, 0x73, 0x20, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x2c, 0x4b, 0x20, 0x61, 0x73, 0x20, 0x74, 0x6f, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2c, 0x72, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x64, + 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x61, + 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, + 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x6f, 0x75, 0x6e, + 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, + 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, 0x75, 0x74, 0x45, 0x66, + 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, 0x63, 0x65, 0x72, 0x2c, + 0x50, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, + 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x45, 0x66, 0x66, 0x65, + 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a }; -unsigned int index_js_len = 22174; +unsigned int index_js_len = 22472; diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp new file mode 100644 index 000000000..0a05c369d --- /dev/null +++ b/examples/server/json-schema-to-grammar.mjs.hpp @@ -0,0 +1,311 @@ +unsigned char json_schema_to_grammar_mjs[] = { + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, + 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, 0x3f, + 0x27, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, + 0x65, 0x61, 0x6e, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, + 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, 0x22, + 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, + 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, 0x29, + 0x29, 0x20, 0x28, 0x22, 0x2e, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, + 0x2b, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, 0x20, 0x5b, 0x2d, + 0x2b, 0x5d, 0x3f, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2b, 0x29, 0x3f, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x69, + 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, + 0x22, 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, + 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, + 0x29, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x60, 0x20, 0x22, + 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x5b, 0x5e, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, + 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x22, 0x5c, + 0x5c, 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, + 0x2f, 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, + 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2a, 0x20, + 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x20, 0x27, 0x22, + 0x6e, 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, + 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, 0x61, 0x2d, 0x7a, 0x2d, + 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, + 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, + 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5d, + 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, + 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, + 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, 0x3d, 0x20, + 0x7b, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x72, 0x27, + 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x6e, + 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x22, + 0x27, 0x7d, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, + 0x7c, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, + 0x65, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x27, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x27, 0x2c, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, 0x52, 0x55, + 0x4c, 0x45, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, + 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, + 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, + 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x20, + 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, + 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, + 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x7d, + 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, + 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, + 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, + 0x52, 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x65, 0x73, + 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x28, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, + 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, + 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x6b, 0x65, 0x79, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x72, 0x6f, 0x6f, + 0x74, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, + 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, + 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x6f, 0x6e, 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x2e, 0x6d, + 0x61, 0x70, 0x28, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, + 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, + 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, + 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x65, + 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x65, 0x6e, 0x75, 0x6d, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, + 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, + 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, 0x26, 0x26, + 0x20, 0x27, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, + 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x60, 0x72, 0x65, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x64, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x50, 0x61, 0x69, 0x72, 0x73, 0x20, 0x3d, 0x20, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, + 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, + 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, + 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, 0x66, 0x20, + 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, 0x20, 0x74, + 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6b, 0x65, 0x79, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x3d, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x61, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x27, 0x20, 0x3f, 0x20, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x61, 0x5b, + 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, + 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, + 0x20, 0x3d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, + 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x27, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, + 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x42, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x5b, 0x30, 0x5d, 0x2e, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, + 0x65, 0x28, 0x62, 0x5b, 0x30, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, + 0x50, 0x61, 0x69, 0x72, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, + 0x68, 0x28, 0x28, 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x5d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, + 0x69, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, + 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x20, + 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, + 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, + 0x2b, 0x3d, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, + 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, + 0x27, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, + 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, + 0x20, 0x26, 0x26, 0x20, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, + 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, + 0x44, 0x4f, 0x20, 0x60, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, + 0x65, 0x6d, 0x73, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, + 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x3a, 0x20, 0x22, 0x22, 0x7d, + 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, + 0x20, 0x3d, 0x20, 0x60, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x20, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x29, 0x2a, 0x29, + 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, + 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, + 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, + 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, + 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, + 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x67, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, + 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x2b, 0x3d, 0x20, + 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x3a, 0x3a, 0x3d, + 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, 0x6e, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a +}; +unsigned int json_schema_to_grammar_mjs_len = 3695; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index de41da187..5eedb0b28 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -141,14 +141,15 @@ } from '/index.js'; import { llama } from '/completion.js'; + import { SchemaConverter } from '/json-schema-to-grammar.mjs'; const session = signal({ - prompt: "This is a conversation between user and llama, a friendly chatbot. respond in simple markdown.", + prompt: "This is a conversation between User and Llama, a friendly chatbot. Llama is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.", template: "{{prompt}}\n\n{{history}}\n{{char}}:", historyTemplate: "{{name}}: {{message}}", transcript: [], type: "chat", - char: "llama", + char: "Llama", user: "User", }) @@ -166,8 +167,139 @@ mirostat: 0, // 0/1/2 mirostat_tau: 5, // target entropy mirostat_eta: 0.1, // learning rate + grammar: '', }) + /* START: Support for storing prompt templates and parameters in borwser LocalStorage */ + + const local_storage_storageKey = "llamacpp_server_local_storage"; + + function local_storage_setDataFromObject(tag, content) { + localStorage.setItem(local_storage_storageKey + '/' + tag, JSON.stringify(content)); + } + + function local_storage_setDataFromRawText(tag, content) { + localStorage.setItem(local_storage_storageKey + '/' + tag, content); + } + + function local_storage_getDataAsObject(tag) { + const item = localStorage.getItem(local_storage_storageKey + '/' + tag); + if (!item) { + return null; + } else { + return JSON.parse(item); + } + } + + function local_storage_getDataAsRawText(tag) { + const item = localStorage.getItem(local_storage_storageKey + '/' + tag); + if (!item) { + return null; + } else { + return item; + } + } + + // create a container for user templates and settings + + const savedUserTemplates = signal({}) + const selectedUserTemplate = signal({ name: '', template: { session: {}, params: {} } }) + + // let's import locally saved templates and settings if there are any + // user templates and settings are stored in one object + // in form of { "templatename": "templatedata" } and { "settingstemplatename":"settingsdata" } + + console.log('Importing saved templates') + + let importedTemplates = local_storage_getDataAsObject('user_templates') + + if (importedTemplates) { + // saved templates were successfuly imported. + + console.log('Processing saved templates and updating default template') + + //console.log(importedTemplates); + savedUserTemplates.value = importedTemplates; + + //override default template + savedUserTemplates.value.default = { session: session.value, params: params.value } + local_storage_setDataFromObject('user_templates', savedUserTemplates.value) + } else { + // no saved templates detected. + + console.log('Initializing LocalStorage and saving default template') + + savedUserTemplates.value = { "default": { session: session.value, params: params.value } } + local_storage_setDataFromObject('user_templates', savedUserTemplates.value) + } + + function userTemplateResetToDefault() { + console.log('Reseting themplate to default') + selectedUserTemplate.value.name = 'default'; + selectedUserTemplate.value.data = savedUserTemplates.value['default']; + } + + function userTemplateApply(t) { + session.value = t.data.session; + params.value = t.data.params; + } + + function userTemplateResetToDefaultAndApply() { + userTemplateResetToDefault() + userTemplateApply(selectedUserTemplate.value) + } + + function userTemplateLoadAndApplyAutosaved() { + // get autosaved last used template + let lastUsedTemplate = local_storage_getDataAsObject('user_templates_last') + + if (lastUsedTemplate) { + + console.log('Autosaved template found, restoring') + + selectedUserTemplate.value = lastUsedTemplate + } + else { + + console.log('No autosaved template found, using default template') + // no autosaved last used template was found, so load from default. + + userTemplateResetToDefault() + } + + console.log('Applying template') + // and update internal data from templates + + userTemplateApply(selectedUserTemplate.value) + } + + //console.log(savedUserTemplates.value) + //console.log(selectedUserTemplate.value) + + function userTemplateAutosave() { + console.log('Template Autosave...') + if (selectedUserTemplate.value.name == 'default') { + // we don't want to save over default template, so let's create a new one + let newTemplateName = 'UserTemplate-' + Date.now().toString() + let newTemplate = { 'name': newTemplateName, 'data': { 'session': session.value, 'params': params.value } } + + console.log('Saving as ' + newTemplateName) + + // save in the autosave slot + local_storage_setDataFromObject('user_templates_last', newTemplate) + + // and load it back and apply + userTemplateLoadAndApplyAutosaved() + } else { + local_storage_setDataFromObject('user_templates_last', { 'name': selectedUserTemplate.value.name, 'data': { 'session': session.value, 'params': params.value } }) + } + } + + console.log('Checking for autosaved last used template') + userTemplateLoadAndApplyAutosaved() + + /* END: Support for storing prompt templates and parameters in browsers LocalStorage */ + const llamaStats = signal(null) const controller = signal(null) @@ -304,6 +436,26 @@ const updateParamsFloat = (el) => params.value = { ...params.value, [el.target.name]: parseFloat(el.target.value) } const updateParamsInt = (el) => params.value = { ...params.value, [el.target.name]: Math.floor(parseFloat(el.target.value)) } + const grammarJsonSchemaPropOrder = signal('') + const updateGrammarJsonSchemaPropOrder = (el) => grammarJsonSchemaPropOrder.value = el.target.value + const convertJSONSchemaGrammar = () => { + try { + const schema = JSON.parse(params.value.grammar) + const converter = new SchemaConverter( + grammarJsonSchemaPropOrder.value + .split(',') + .reduce((acc, cur, i) => ({...acc, [cur.trim()]: i}), {}) + ) + converter.visit(schema, '') + params.value = { + ...params.value, + grammar: converter.formatGrammar(), + } + } catch (e) { + alert(`Convert failed: ${e.message}`) + } + } + const FloatField = ({label, max, min, name, step, value}) => { return html`
    @@ -324,8 +476,34 @@ ` }; + const userTemplateReset = (e) => { + e.preventDefault(); + userTemplateResetToDefaultAndApply() + } + + const UserTemplateResetButton = () => { + if (selectedUserTemplate.value.name == 'default') { + return html` + + ` + } + + return html` + + ` + }; + + useEffect(() => { + // autosave template on every change + userTemplateAutosave() + }, [session.value, params.value]) + return html`
    +
    + <${UserTemplateResetButton}/> +
    +
    @@ -355,6 +533,13 @@