From fc4779d4dcb6ccce51525e309702637852ed3a53 Mon Sep 17 00:00:00 2001 From: X0RSH1FT Date: Thu, 28 Dec 2023 21:55:40 -0500 Subject: [PATCH] Simplified scripts. Added environment variables. Renamed functions. --- bin/build.bat | 4 +- bin/env.config.ps1 | 28 ----------- bin/env.ps1 | 23 +++++++++ bin/help.bat | 4 +- bin/llama.ps1 | 117 +++++++++++++++++++++++++++------------------ bin/test.bat | 4 +- 6 files changed, 100 insertions(+), 80 deletions(-) delete mode 100644 bin/env.config.ps1 create mode 100644 bin/env.ps1 diff --git a/bin/build.bat b/bin/build.bat index 19b0d5a29..d84b27c42 100644 --- a/bin/build.bat +++ b/bin/build.bat @@ -1,4 +1,4 @@ @echo off @title LLAMA.CPP - BUILD -PowerShell -NoProfile -ExecutionPolicy Bypass -Command "& '.\llama.ps1'" "-build" -pause +PowerShell -ExecutionPolicy Bypass -File "%~dp0llama.ps1" -build +pause \ No newline at end of file diff --git a/bin/env.config.ps1 b/bin/env.config.ps1 deleted file mode 100644 index 74a25ad9d..000000000 --- a/bin/env.config.ps1 +++ /dev/null @@ -1,28 +0,0 @@ -Set-StrictMode -Version 3.0 - -#region Directories - -# Project directory -$PROJ_DIR = Resolve-Path "$PSScriptRoot\.." - -# Build directory -$BUILD_DIR = "$PROJ_DIR\build" - -#endregion - -#region Files - -# LLAMA.CPP executable -$LLAMA_EXE = "$BUILD_DIR\bin\Release\main.exe" - -#endregion - -#region Test data - -# Test model -$TEST_MODEL_PATH = "F:\Models\Wizard-Vicuna-13B-Uncensored-GGML\Wizard-Vicuna-13B-Uncensored.ggmlv3.q4_0.bin" - -# Test prompt -$TEST_PROMPT = "The life of a cyberpunk is" - -#endregion \ No newline at end of file diff --git a/bin/env.ps1 b/bin/env.ps1 new file mode 100644 index 000000000..6ecc047f1 --- /dev/null +++ b/bin/env.ps1 @@ -0,0 +1,23 @@ +<# +#* Environment configuration script for LLAMA.CPP +#! Sets environment variables for the current user +#> +Set-StrictMode -Version 3.0 +Write-Verbose "Loading environment configuration - $PSScriptRoot\env.ps1" + +#region Parameters +# LLAMA.CPP executable path +$llama_cpp_binary_path = "$PSScriptRoot\..\build\bin\Release\main.exe" +# Default model path for environment +$llm_model_path = "Z:\AI\LLM\MythoMax-L2-13B-GGUF\mythomax-l2-13b.Q4_K_M.gguf" +#endregion + +#region Set current environment variables +$env:LLAMA_CPP = Resolve-Path "$llama_cpp_binary_path" +$env:LLM_MODEL_PATH = Resolve-Path "$llm_model_path" +#endregion + +#Region Persistent environment variables +# [Environment]::SetEnvironmentVariable("LLAMA_CPP", "$env:LLAMA_CPP", "User") +# [Environment]::SetEnvironmentVariable("LLM_MODEL_PATH", "$env:LLM_MODEL_PATH", "User") +#endregion \ No newline at end of file diff --git a/bin/help.bat b/bin/help.bat index 363d499c1..2f15b379e 100644 --- a/bin/help.bat +++ b/bin/help.bat @@ -1,4 +1,4 @@ @echo off @title LLAMA.CPP - HELP -PowerShell -NoProfile -ExecutionPolicy Bypass -Command "& '.\llama.ps1'" "-help" -pause +PowerShell -ExecutionPolicy Bypass -File "%~dp0llama.ps1" -help +pause \ No newline at end of file diff --git a/bin/llama.ps1 b/bin/llama.ps1 index 4c931524a..8a2775d3c 100644 --- a/bin/llama.ps1 +++ b/bin/llama.ps1 @@ -1,5 +1,12 @@ +<# +#* Utility powerShell script to invoke the LLAMA.CPP binary +#! This script is used to invoke the LLAMA.CPP binary with configured environment paths and required arguments +#> param( + [string] $prompt, # Prompt text to complete + [string] $file, # File path to complete [switch] $clear, # Clear console prior to execution + [switch] $console, # Start the text completion console [switch] $debug, # Enable debug output [switch] $verbose, # Enable verbose output [switch] $build, # Build the executable @@ -15,87 +22,92 @@ if ($clear) { Clear-Host } if ($debug) { $DebugPreference = "Continue" } if ($verbose) { $VerbosePreference = "Continue" } -# Import environment configuration -. "$PSScriptRoot\env.config.ps1" +# Import environment configuration if one exists +if ((Test-Path "$PSScriptRoot\env.ps1")) { . "$PSScriptRoot\env.ps1" } +# else { throw "Could not find environment configuration at path - '$PSScriptRoot\env.ps1'" } #endregion #region Operations -function ExecuteCommand -{ - param( - [string] $command, - [string] $cwd = $PROJ_DIR - ) - Write-Verbose "Executing command: $command" - Write-Verbose "Working directory: $cwd" - - $cur_dir = Get-Location - Set-Location $cwd - - Invoke-Expression $command - - Set-Location $cur_dir -} - +<# Compile LLAMA.CPP binary #> function Build { param( - [string] $build_path = $BUILD_DIR + [string] $build_path = $null ) - $cur_dir = Get-Location - try { + # Set the build path to the default if not specified + if (!$build_path) { $build_path = "$PSScriptRoot\..\build" } # Test if the build directory exists and create it if needed if (!(Test-Path -Path $build_path -PathType Container)) { New-Item -Path $build_path -ItemType Directory | Out-Null } - # Set the location to the build directory Set-Location "$build_path" - # Run the build commands cmake .. cmake --build . --config Release } - catch { Write-Error "An error occurred during the build process: $_" } + catch { Write-Error "An error occurred during the build process - $_" } finally { Set-Location $cur_dir } } -function Validate -{ - # Check that llama.exe exists - if (!(Test-Path "$LLAMA_EXE")) { throw "Could not find llama.exe at path - '$LLAMA_EXE'"} -} +<# Test that a compiled LLAMA.CPP binary exists. Raise exception if not #> +function Validate { if (!(Test-Path "$env:LLAMA_CPP")) { throw "Could not find llama.exe at path - '$env:LLAMA_CPP'"} } -function GenerateTextFromPrompt +<# Complete text from input prompt #> +function Complete-Text { param( - [string] $model_path, [string] $prompt, + [string] $model_path = $null, [int] $context_size = 2048, [int] $thread_cnt = 4 ) Validate - $arguments = "-m '$model_path' -p '$prompt' -c '$context_size' -t '$thread_cnt' --color" - ExecuteCommand "$LLAMA_EXE $arguments" + if (!$model_path) { $model_path = $env:LLM_MODEL_PATH } + $arg_map = @{ + "-m" = $model_path + "-p" = $prompt + "-c" = $context_size + "-t" = $thread_cnt + } + $opt_set = @( + "--color" + ) + $arguments = $arg_map.GetEnumerator() | ForEach-Object { "$($_.Key) '$($_.Value)'" } + $options = $opt_set | ForEach-Object { "$($_)" } + Invoke-Expression "$env:LLAMA_CPP $arguments $options" } -function GenerateTextFromFile +<# Generate and complete text file #> +function Complete-TextFile { param( - [string] $model_path, [string] $file, + [string] $model_path = $null, [int] $context_size = 2048, [int] $thread_cnt = 4 ) Validate - $arguments = "-m '$model_path' -f '$file' -c '$context_size' -t '$thread_cnt' --color" - ExecuteCommand "$LLAMA_EXE $arguments" + if (!$model_path) { $model_path = $env:LLM_MODEL_PATH } + $arg_map = @{ + "-m" = $model_path + "-f" = $file + "-c" = $context_size + "-t" = $thread_cnt + } + $opt_set = @( + "--color" + ) + $arguments = $arg_map.GetEnumerator() | ForEach-Object { "$($_.Key) '$($_.Value)'" } + $options = $opt_set | ForEach-Object { "$($_)" } + Invoke-Expression "$env:LLAMA_CPP $arguments $options" } -function GenerateInteractiveText +<# Start text completion console #> +function Start-TextCompletionConsole { param( [string] $model_path, @@ -103,26 +115,39 @@ function GenerateInteractiveText [int] $thread_cnt = 4 ) Validate - $arguments = "-m '$model_path' --interactive-first -c '$context_size' -t '$thread_cnt' --color" - ExecuteCommand "$LLAMA_EXE $arguments" # Wait for input before starting + if (!$model_path) { $model_path = $env:LLM_MODEL_PATH } + $arg_map = @{ + "-m" = $model_path + "-c" = $context_size + "-t" = $thread_cnt + } + $opt_set = @( + "--interactive-first", + "--color" + ) + $arguments = $arg_map.GetEnumerator() | ForEach-Object { "$($_.Key) '$($_.Value)'" } + $options = $opt_set | ForEach-Object { "$($_)" } + Invoke-Expression "$env:LLAMA_CPP $arguments $options" } +<# Invoke help details #> function Help { Validate - ExecuteCommand "$LLAMA_EXE --help" + Invoke-Expression "$env:LLAMA_CPP --help" } -function Test -{ - GenerateTextFromPrompt "$TEST_MODEL_PATH" "$TEST_PROMPT" -} +<# Invoke test function #> +function Test { Complete-Text "The life of a cyberpunk is" } #endregion #region Execution if ($build) { Build } +if ($prompt) { Complete-Text -prompt "$prompt" } +if ($file) { Complete-TextFile -file "$file" } +if ($console) { Start-TextCompletionConsole } if ($test) { Test } if ($help) { Help } diff --git a/bin/test.bat b/bin/test.bat index bbf9d9c33..9695eb732 100644 --- a/bin/test.bat +++ b/bin/test.bat @@ -1,4 +1,4 @@ @echo off @title LLAMA.CPP - TEST -PowerShell -NoProfile -ExecutionPolicy Bypass -Command "& '.\llama.ps1'" "-test" -pause +PowerShell -ExecutionPolicy Bypass -File "%~dp0llama.ps1" -test +pause \ No newline at end of file