Simplified scripts. Added environment variables. Renamed functions.

This commit is contained in:
X0RSH1FT 2023-12-28 21:55:40 -05:00
parent 0a21ad6e3f
commit fc4779d4dc
6 changed files with 100 additions and 80 deletions

View file

@ -1,4 +1,4 @@
@echo off
@title LLAMA.CPP - BUILD
PowerShell -NoProfile -ExecutionPolicy Bypass -Command "& '.\llama.ps1'" "-build"
pause
PowerShell -ExecutionPolicy Bypass -File "%~dp0llama.ps1" -build
pause

View file

@ -1,28 +0,0 @@
Set-StrictMode -Version 3.0
#region Directories
# Project directory
$PROJ_DIR = Resolve-Path "$PSScriptRoot\.."
# Build directory
$BUILD_DIR = "$PROJ_DIR\build"
#endregion
#region Files
# LLAMA.CPP executable
$LLAMA_EXE = "$BUILD_DIR\bin\Release\main.exe"
#endregion
#region Test data
# Test model
$TEST_MODEL_PATH = "F:\Models\Wizard-Vicuna-13B-Uncensored-GGML\Wizard-Vicuna-13B-Uncensored.ggmlv3.q4_0.bin"
# Test prompt
$TEST_PROMPT = "The life of a cyberpunk is"
#endregion

23
bin/env.ps1 Normal file
View file

@ -0,0 +1,23 @@
<#
#* Environment configuration script for LLAMA.CPP
#! Sets environment variables for the current user
#>
Set-StrictMode -Version 3.0
Write-Verbose "Loading environment configuration - $PSScriptRoot\env.ps1"
#region Parameters
# LLAMA.CPP executable path
$llama_cpp_binary_path = "$PSScriptRoot\..\build\bin\Release\main.exe"
# Default model path for environment
$llm_model_path = "Z:\AI\LLM\MythoMax-L2-13B-GGUF\mythomax-l2-13b.Q4_K_M.gguf"
#endregion
#region Set current environment variables
$env:LLAMA_CPP = Resolve-Path "$llama_cpp_binary_path"
$env:LLM_MODEL_PATH = Resolve-Path "$llm_model_path"
#endregion
#Region Persistent environment variables
# [Environment]::SetEnvironmentVariable("LLAMA_CPP", "$env:LLAMA_CPP", "User")
# [Environment]::SetEnvironmentVariable("LLM_MODEL_PATH", "$env:LLM_MODEL_PATH", "User")
#endregion

View file

@ -1,4 +1,4 @@
@echo off
@title LLAMA.CPP - HELP
PowerShell -NoProfile -ExecutionPolicy Bypass -Command "& '.\llama.ps1'" "-help"
pause
PowerShell -ExecutionPolicy Bypass -File "%~dp0llama.ps1" -help
pause

View file

@ -1,5 +1,12 @@
<#
#* Utility powerShell script to invoke the LLAMA.CPP binary
#! This script is used to invoke the LLAMA.CPP binary with configured environment paths and required arguments
#>
param(
[string] $prompt, # Prompt text to complete
[string] $file, # File path to complete
[switch] $clear, # Clear console prior to execution
[switch] $console, # Start the text completion console
[switch] $debug, # Enable debug output
[switch] $verbose, # Enable verbose output
[switch] $build, # Build the executable
@ -15,87 +22,92 @@ if ($clear) { Clear-Host }
if ($debug) { $DebugPreference = "Continue" }
if ($verbose) { $VerbosePreference = "Continue" }
# Import environment configuration
. "$PSScriptRoot\env.config.ps1"
# Import environment configuration if one exists
if ((Test-Path "$PSScriptRoot\env.ps1")) { . "$PSScriptRoot\env.ps1" }
# else { throw "Could not find environment configuration at path - '$PSScriptRoot\env.ps1'" }
#endregion
#region Operations
function ExecuteCommand
{
param(
[string] $command,
[string] $cwd = $PROJ_DIR
)
Write-Verbose "Executing command: $command"
Write-Verbose "Working directory: $cwd"
$cur_dir = Get-Location
Set-Location $cwd
Invoke-Expression $command
Set-Location $cur_dir
}
<# Compile LLAMA.CPP binary #>
function Build
{
param(
[string] $build_path = $BUILD_DIR
[string] $build_path = $null
)
$cur_dir = Get-Location
try
{
# Set the build path to the default if not specified
if (!$build_path) { $build_path = "$PSScriptRoot\..\build" }
# Test if the build directory exists and create it if needed
if (!(Test-Path -Path $build_path -PathType Container)) { New-Item -Path $build_path -ItemType Directory | Out-Null }
# Set the location to the build directory
Set-Location "$build_path"
# Run the build commands
cmake ..
cmake --build . --config Release
}
catch { Write-Error "An error occurred during the build process: $_" }
catch { Write-Error "An error occurred during the build process - $_" }
finally { Set-Location $cur_dir }
}
function Validate
{
# Check that llama.exe exists
if (!(Test-Path "$LLAMA_EXE")) { throw "Could not find llama.exe at path - '$LLAMA_EXE'"}
}
<# Test that a compiled LLAMA.CPP binary exists. Raise exception if not #>
function Validate { if (!(Test-Path "$env:LLAMA_CPP")) { throw "Could not find llama.exe at path - '$env:LLAMA_CPP'"} }
function GenerateTextFromPrompt
<# Complete text from input prompt #>
function Complete-Text
{
param(
[string] $model_path,
[string] $prompt,
[string] $model_path = $null,
[int] $context_size = 2048,
[int] $thread_cnt = 4
)
Validate
$arguments = "-m '$model_path' -p '$prompt' -c '$context_size' -t '$thread_cnt' --color"
ExecuteCommand "$LLAMA_EXE $arguments"
if (!$model_path) { $model_path = $env:LLM_MODEL_PATH }
$arg_map = @{
"-m" = $model_path
"-p" = $prompt
"-c" = $context_size
"-t" = $thread_cnt
}
$opt_set = @(
"--color"
)
$arguments = $arg_map.GetEnumerator() | ForEach-Object { "$($_.Key) '$($_.Value)'" }
$options = $opt_set | ForEach-Object { "$($_)" }
Invoke-Expression "$env:LLAMA_CPP $arguments $options"
}
function GenerateTextFromFile
<# Generate and complete text file #>
function Complete-TextFile
{
param(
[string] $model_path,
[string] $file,
[string] $model_path = $null,
[int] $context_size = 2048,
[int] $thread_cnt = 4
)
Validate
$arguments = "-m '$model_path' -f '$file' -c '$context_size' -t '$thread_cnt' --color"
ExecuteCommand "$LLAMA_EXE $arguments"
if (!$model_path) { $model_path = $env:LLM_MODEL_PATH }
$arg_map = @{
"-m" = $model_path
"-f" = $file
"-c" = $context_size
"-t" = $thread_cnt
}
$opt_set = @(
"--color"
)
$arguments = $arg_map.GetEnumerator() | ForEach-Object { "$($_.Key) '$($_.Value)'" }
$options = $opt_set | ForEach-Object { "$($_)" }
Invoke-Expression "$env:LLAMA_CPP $arguments $options"
}
function GenerateInteractiveText
<# Start text completion console #>
function Start-TextCompletionConsole
{
param(
[string] $model_path,
@ -103,26 +115,39 @@ function GenerateInteractiveText
[int] $thread_cnt = 4
)
Validate
$arguments = "-m '$model_path' --interactive-first -c '$context_size' -t '$thread_cnt' --color"
ExecuteCommand "$LLAMA_EXE $arguments" # Wait for input before starting
if (!$model_path) { $model_path = $env:LLM_MODEL_PATH }
$arg_map = @{
"-m" = $model_path
"-c" = $context_size
"-t" = $thread_cnt
}
$opt_set = @(
"--interactive-first",
"--color"
)
$arguments = $arg_map.GetEnumerator() | ForEach-Object { "$($_.Key) '$($_.Value)'" }
$options = $opt_set | ForEach-Object { "$($_)" }
Invoke-Expression "$env:LLAMA_CPP $arguments $options"
}
<# Invoke help details #>
function Help
{
Validate
ExecuteCommand "$LLAMA_EXE --help"
Invoke-Expression "$env:LLAMA_CPP --help"
}
function Test
{
GenerateTextFromPrompt "$TEST_MODEL_PATH" "$TEST_PROMPT"
}
<# Invoke test function #>
function Test { Complete-Text "The life of a cyberpunk is" }
#endregion
#region Execution
if ($build) { Build }
if ($prompt) { Complete-Text -prompt "$prompt" }
if ($file) { Complete-TextFile -file "$file" }
if ($console) { Start-TextCompletionConsole }
if ($test) { Test }
if ($help) { Help }

View file

@ -1,4 +1,4 @@
@echo off
@title LLAMA.CPP - TEST
PowerShell -NoProfile -ExecutionPolicy Bypass -Command "& '.\llama.ps1'" "-test"
pause
PowerShell -ExecutionPolicy Bypass -File "%~dp0llama.ps1" -test
pause