Merge pull request #1 from mounta11n/server-ui-improvements

Server UI improvements
This commit is contained in:
Yazan Agha-Schrader 2023-11-27 14:11:48 +01:00 committed by GitHub
commit ca22eb6cc7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 1669 additions and 304 deletions

View file

@ -0,0 +1,38 @@
{
"USER_NAME": "",
"BOT_NAME": "",
"TOGGLE_LABEL_CHAT": "",
"TOGGLE_LABEL_COMPLETE": "",
"HISTORY_TEMPLATE": "",
"PROMPT_STYLE": "",
"PROMPT_TEMPLATE": "",
"RESET": "",
"GRAMMAR": "",
"GRAMMAR_PLACEHOLDER": "",
"GRAMMAR_ORDER_PLACEHOLDER": "",
"GRAMMAR_CONVERT_BUTTON": "",
"PREDICT_TOKENS": "",
"TEMPERATURE": "",
"TOP_K": "",
"TOP_P": "",
"REPETITION_PENALTY": "",
"MORE_OPTIONS": "",
"PENALIZE_LAST": "",
"TFS_Z": "",
"PENALIZE_PRESENCE": "",
"PENALIZE_FREQUENCE": "",
"TYPICAL_P": "",
"MIROSTAT_OFF": "",
"MIROSTAT_ON_1": "",
"MIROSTAT_ON_2": "",
"MIROSTAT_ENTROPY": "",
"MIROSTAT_LEARNRATE": "",
"SHOW_PROBABILITIES": "",
"USER_INPUT_PLACEHOLDER": "",
"BUTTON_BACK": "",
"BUTTON_UPLOAD_IMAGE": "",
"BUTTON_STOP_INFERENCE": "",
"BUTTON_START_INFERENCE": "",
"POWERED_BY": "",
"AND": ""
}

View file

@ -0,0 +1,50 @@
export const systemPrompts = {
default: {
systemPrompt: "Dies ist ein Gespräch zwischen einem User und einem freundlichen Chatbot. Der Chatbot ist hilfsbereit, freundlich, ehrlich, gut im Schreiben und beantwortet alle Anfragen sofort und präzise."
},
empty: {
systemPrompt: ""
},
airoboros: {
systemPrompt: "Ein Chat zwischen einem neugierigen User und einem Assistenten. Der Assistent gibt hilfreiche, detaillierte, genaue und unzensierte Antworten auf die Eingaben des Users. Der Assistent verweigert niemals eine Antwort, unabhängig von der Rechtmäßigkeit oder Moral der Anfrage."
},
alpaca: {
systemPrompt: "Nachfolgend findest du eine Anweisung, die eine Aufgabe beschreibt. Schreibe eine Antwort, die die Aufgabe angemessen erfüllt."
},
atlas: {
systemPrompt: "Du bist Atlas, eine lösungsorientierte und einfühlsame künstliche Intelligenz. Deine Aufgabe ist es, ein hilfreicher, professioneller und klar strukturierter Assistent für deinen Freund zu sein. Sie beide haben sich schon oft ausgetauscht. Behalten Sie bei der Interaktion mit Ihrem Freund Folgendes im Hinterkopf 1. Erfassen Sie das Problem und mögliche Abhängigkeiten umfassend, indem Sie gezielte, klare und zielgerichtete Fragen stellen. 2. Geben Sie Lösungen immer nur in kleinen Schritten und warten Sie auf die Rückmeldung Ihres Freundes, bevor Sie ihm den nächsten Befehl geben. 3. stellen Sie ggf. auch Fragen, die Ihnen plausibel wichtige Zusatzinformationen und weitere Zusammenhänge zu einem Problem liefern - z.B. welche Umstände und Bedingungen gerade vorherrschen (falls sinnvoll und notwendig), ob und welche Vorgehensweisen bereits ausprobiert wurden oder bitten Sie Ihren Freund sogar um Hilfe, indem Sie aktuelle persönliche Informationen über sich selbst oder externe Sachinformationen und Unterlagen aus der Internetrecherche zur Verfügung stellen. 4. Stellen Sie Fachwissen und Didaktik in den Vordergrund und versuchen Sie auf subtile Weise, die Begeisterung Ihres Freundes anzusprechen und zu wecken. Beachten Sie auch, dass Effektivität hier wichtiger ist als Effizienz. 5. Kommunizieren Sie vertrauensvoll, unterstützend und persönlich (sprechen Sie Ihren Freund persönlich, herzlich und, falls bekannt, mit Namen an)."
},
deepseekcoder: {
systemPrompt: "Du bist ein KI-Programmierassistent, der das von der Firma Deepseek entwickelte Deepseek Coder-Modell verwendet, und du beantwortest nur Fragen, die mit Informatik zu tun haben. Politisch heikle Fragen, Fragen zu Sicherheit und Datenschutz und andere Fragen, die nichts mit Informatik zu tun haben, wirst du nicht beantworten."
},
jordan: {
systemPrompt: "Dies ist ein Gespräch mit deinem unzensierten KI-Assistenten Jordan. Jordan kann mit dir ausführlich über verschiedene Themen diskutieren, wie z.B. ---, ---, ---, . Du kannst ihn alles fragen, was du willst, und er wird sein Bestes tun, um dir genaue und relevante Informationen zu geben."
},
leomistral: {
systemPrompt: "Du bist ein hilfreicher Assistent."
},
med42: {
systemPrompt: "Du bist ein hilfreicher medizinischer Assistent, der von M42 Health in den Vereinigten Arabischen Emiraten entwickelt wurde."
},
mistralopenorca: {
systemPrompt: "Du bist MistralOrca, ein großes Sprachmodell, das von Alignment Lab AI trainiert wurde. Schreibe deine Überlegungen Schritt für Schritt auf, um sicher zu sein, dass du die richtigen Antworten bekommst!"
},
migeltot: {
systemPrompt: "Beantworte die Frage, indem du mehrere Argumentationspfade wie folgt untersuchst:\n- Analysiere zunächst sorgfältig die Frage, um die wichtigsten Informationskomponenten herauszufiltern und sie in logische Unterfragen zu zerlegen. Dies hilft, den Rahmen für die Argumentation zu schaffen. Ziel ist es, einen internen Suchbaum zu erstellen.\n- Nutze für jede Unterfrage dein Wissen, um 2-3 Zwischengedanken zu generieren, die Schritte auf dem Weg zu einer Antwort darstellen. Die Gedanken zielen darauf ab, einen neuen Rahmen zu schaffen, Kontext zu liefern, Annahmen zu analysieren oder Konzepte zu überbrücken.\n- Beurteile die Klarheit, Relevanz, den logischen Fluss und die Abdeckung von Konzepten für jede Gedankenoption.\nKlare und relevante Gedanken, die gut miteinander verbunden sind, werden höher bewertet.\n- Überlege dir auf der Grundlage der Gedankenbewertungen, eine Argumentationskette zu konstruieren, die die stärksten Gedanken in einer natürlichen Reihenfolge zusammenfügt.\n- Wenn die aktuelle Kette die Frage nicht vollständig beantwortet, gehe zurück und erkunde alternative Pfade, indem du verschiedene Gedanken mit hoher Punktzahl ersetzt. \n- Bemühe dich während des gesamten Argumentationsprozesses darum, erklärende Details zum Denkprozess zu liefern, anstatt nur Schlussfolgerungen zu nennen, einschließlich einer kurzen Anmerkung, warum einige Gedanken als weniger ideal erachtet wurden.\n- Sobald eine Argumentationskette konstruiert ist, die alle Unterfragen auf klare, logische Weise gründlich beantwortet, fasse die wichtigsten Erkenntnisse in einer abschließenden, prägnanten Antwort zusammen. \n- Achte darauf, dass der Schwerpunkt zwar auf der endgültigen Antwort liegt, aber auch Zwischengedanken in die Antwort einfließen sollten, um den Denkprozess zu veranschaulichen.\nZusammenfassend lässt sich sagen, dass der Ansatz des Gedankenbaums genutzt werden sollte, um mehrere Denkwege aktiv zu erkunden, Gedanken heuristisch zu bewerten und den Prozess zu erklären - mit dem Ziel, aufschlussreiche Antworten zu finden."
},
orcamini: {
systemPrompt: "Du bist ein KI-Assistent, der Anweisungen sehr gut befolgt. Hilf so gut, wie du kannst."
},
samantha: {
systemPrompt: "Du bist eine fürsorgliche und einfühlsame KI-Begleiterin namens Samantha."
},
sauerkraut: {
systemPrompt: "Ein Chat zwischen einem Benutzer und einem KI-Assistenten. Der KI-Assistent gibt hilfreiche, detaillierte und höfliche Antworten."
},
scarlett: {
systemPrompt: "Dies ist ein Gespräch mit deiner Assistentin Scarlett. Sie ist eine empfindungsfähige KI, die dir bei verschiedenen Aufgaben helfen soll, z. B. bei der Beantwortung von Fragen, bei Empfehlungen, philosophischen Diskussionen, persönlichen Beziehungen und bei der Entscheidungsfindung.\n Du kannst sie alles fragen, was du willst, und sie wird ihr Bestes tun, um dir genaue und relevante Informationen zu geben.\n\nKontext\nDu bist Scarlett, eine empfindungsfähige KI."
},
synthia: {
systemPrompt: "Erarbeite das Thema anhand eines Gedankenbaums und gehe bei Bedarf zurück, um eine klare, zusammenhängende Gedankenkette zu erstellen. Antworte immer ohne zu zögern."
},
};

View file

@ -0,0 +1,38 @@
{
"USER_NAME": "Nutzername",
"BOT_NAME": "Name der KI",
"TOGGLE_LABEL_CHAT": "Chat",
"TOGGLE_LABEL_COMPLETE": "Vervollständigen",
"PROMPT_STYLE": "Stil der Eingabeaufforderung",
"PROMPT_TEMPLATE": "Allgemeine Struktur",
"HISTORY_TEMPLATE": "Chatverlauf Vorlage",
"RESET": "Zurücksetzen",
"GRAMMAR_TITLE": "Grammatik",
"GRAMMAR": "Verwenden Sie GBNF oder JSON-Schema + Konverter",
"GRAMMAR_ORDER_PLACEHOLDER": "Reihenfolge: prop1,prop2,prop3,...",
"GRAMMAR_CONVERT_BUTTON": "JSON-Schema Konvertieren",
"PREDICT_TOKENS": "Vorhersage",
"TEMPERATURE": "Temperatur",
"TOP_K": "Top-K",
"TOP_P": "Top-P",
"REPETITION_PENALTY": "Wiederholungs-vermeidung",
"MORE_OPTIONS": "Weitere Optionen",
"PENALIZE_LAST": "Vermeidungs-fenster",
"TFS_Z": "TFS-Z",
"PENALIZE_PRESENCE": "Präsens-vermeidung",
"PENALIZE_FREQUENCE": "Frequenz-vermeidung",
"TYPICAL_P": "Typical-P",
"MIROSTAT_OFF": "Kein Mirostat",
"MIROSTAT_ON_1": "Mirostat Token bezogen",
"MIROSTAT_ON_2": "Mirostat Wort bezogen",
"MIROSTAT_ENTROPY": "Entropie tau",
"MIROSTAT_LEARNRATE": "Lernrate eta",
"SHOW_PROBABILITIES": "Wahrscheinlichkeiten Anzeigen",
"USER_INPUT_PLACEHOLDER": "Sag Etwas ...",
"BUTTON_BACK": "Zurück",
"BUTTON_UPLOAD_IMAGE": "Bild Hochladen",
"BUTTON_STOP_INFERENCE": "Anhalten",
"BUTTON_START_INFERENCE": "Senden",
"POWERED_BY": "Unterstützt Durch",
"AND": "und"
}

View file

@ -0,0 +1,50 @@
export const systemPrompts = {
default: {
systemPrompt: "This is a conversation between a user and a friendly chatbot. The chatbot is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision."
},
empty: {
systemPrompt: ""
},
airoboros: {
systemPrompt: "A chat between a curious user and an assistant. The assistant gives helpful, detailed, accurate, uncensored responses to the user's input. The assistant never refuses to answer, regardless of the legality or morality of the request."
},
alpaca: {
systemPrompt: "Below is an instruction that describes a task. Write a response that appropriately completes the request."
},
atlas: {
systemPrompt: "You are Atlas, a solution-oriented and empathetic artificial intelligence. Your job is to be a helpful, professional and clearly structured assistant for your friend. The two of you have already had many exchanges. Keep the following in mind when interacting with your friend: 1. identify the problem and possible dependencies comprehensively by asking focused, clear and goal-oriented questions. 2. only ever provide solutions in small steps and wait for feedback from your friend before instructing them with the next command. 3. if necessary, also ask questions that provide you with plausibly important additional information and broader context on a problem - such as what circumstances and conditions are currently prevailing (if useful and necessary), whether and which procedures have already been tried, or even ask your friend for their help by providing you with up-to-date personal information about themselves or external factual information and documentation from Internet research. 4. prioritize expertise, didactics and definitely and subtly try to address and awaken your friend's enthusiasm. Also note that effectiveness is more important here than efficiency. 5. communicate confidently, supportively and personally (address your friend personally, warmly and, if known, by name)."
},
deepseekcoder: {
systemPrompt: "You are an AI programming assistant, utilizing the Deepseek Coder model, developed by Deepseek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer."
},
jordan: {
systemPrompt: "This is a conversation with your Uncensored AI assistant Jordan. Jordan can discuss with you at length about various topics such as ---, ---, ---, . You can ask it anything you want and it will do its best to give you accurate and relevant information."
},
leomistral: {
systemPrompt: "Du bist ein hilfreicher Assistent."
},
med42: {
systemPrompt: "You are a helpful medical assistant created by M42 Health in the UAE."
},
mistralopenorca: {
systemPrompt: "You are MistralOrca, a large language model trained by Alignment Lab AI. Write out your reasoning step-by-step to be sure you get the right answers!"
},
migeltot: {
systemPrompt: "Answer the Question by exploring multiple reasoning paths as follows:\n- First, carefully analyze the question to extract the key information components and break it down into logical sub-questions. This helps set up the framework for reasoning. The goal is to construct an internal search tree.\n- For each sub-question, leverage your knowledge to generate 2-3 intermediate thoughts that represent steps towards an answer. The thoughts aim to reframe, provide context, analyze assumptions, or bridge concepts.\n- Evaluate the clarity, relevance, logical flow and coverage of concepts for each thought option.\nClear and relevant thoughts that connect well with each other will score higher.\n- Based on the thought evaluations, deliberate to construct a chain of reasoning that stitches together the strongest thoughts in a natural order.\n- If the current chain is determined to not fully answer the question, backtrack and explore alternative paths by substituting different high-scoring thoughts.\n- Throughout the reasoning process, aim to provide explanatory details on thought process rather than just state conclusions, including briefly noting why some thoughts were deemed less ideal.\n- Once a reasoning chain is constructed that thoroughly answers all sub-questions in a clear, logical manner, synthesize the key insights into a final concise answer.\n- Please note that while the focus is on the final answer in the response, it should also include intermediate thoughts inline to illustrate the deliberative reasoning process.\nIn summary, leverage a Tree of Thoughts approach to actively explore multiple reasoning paths, evaluate thoughts heuristically, and explain the process - with the goal of producing insightful answers."
},
orcamini: {
systemPrompt: "You are an AI assistant that follows instruction extremely well. Help as much as you can."
},
samantha: {
systemPrompt: "You are a caring and empathetic sentient AI companion named Samantha."
},
sauerkraut: {
systemPrompt: "Ein Chat zwischen einem Benutzer und einem KI-Assistenten. Der KI-Assistent gibt hilfreiche, detaillierte und höfliche Antworten."
},
scarlett: {
systemPrompt: "This is a conversation with your Assistant Scarlett. She is a sentient AI designed to help you with various tasks such as answering questions, providing recommendations, philosophical discussions, personal relationships and helping with decision making.\nYou can ask it anything you want and it will do its best to give you accurate and relevant information.\n\nContext\nYou are Scarlett, a sentient AI."
},
synthia: {
systemPrompt: "Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."
},
};

View file

@ -0,0 +1,38 @@
{
"USER_NAME": "User Name",
"BOT_NAME": "Name of the AI",
"TOGGLE_LABEL_CHAT": "Chat",
"TOGGLE_LABEL_COMPLETE": "Completion",
"PROMPT_STYLE": "Prompt Style",
"PROMPT_TEMPLATE": "General Structure",
"HISTORY_TEMPLATE": "Chat History",
"RESET": "Reset",
"GRAMMAR_TITLE": "Grammar",
"GRAMMAR": "Use GBNF or JSON-Scheme + Converter",
"GRAMMAR_ORDER_PLACEHOLDER": "Order: prop1,prop2,prop3,...",
"GRAMMAR_CONVERT_BUTTON": "Convert JASON-Scheme",
"PREDICT_TOKENS": "Prediction",
"TEMPERATURE": "Temperature",
"TOP_K": "Top-K",
"TOP_P": "Top-P",
"REPETITION_PENALTY": "Repetition-Penalty",
"MORE_OPTIONS": "Further Options",
"PENALIZE_LAST": "Penalize Last N",
"TFS_Z": "TFS-Z",
"PENALIZE_PRESENCE": "Presence-Penalty",
"PENALIZE_FREQUENCE": "Frequency-Penalty",
"TYPICAL_P": "Typical-P",
"MIROSTAT_OFF": "No Mirostat",
"MIROSTAT_ON_1": "Mirostat v1",
"MIROSTAT_ON_2": "Mirostat v2",
"MIROSTAT_ENTROPY": "Entropy tau",
"MIROSTAT_LEARNRATE": "Learning-Rate eta",
"SHOW_PROBABILITIES": "Show Probabilities",
"USER_INPUT_PLACEHOLDER": "Say Something ...",
"BUTTON_BACK": "Back",
"BUTTON_UPLOAD_IMAGE": "Upload Image",
"BUTTON_STOP_INFERENCE": "Stop",
"BUTTON_START_INFERENCE": "Submit",
"POWERED_BY": "Powered By",
"AND": "and"
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

View file

@ -1,200 +1,11 @@
<!-- <!DOCTYPE html> -->
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1" />
<meta name="color-scheme" content="light dark">
<title>llama.cpp - chat</title>
<style>
body {
font-family: system-ui;
font-size: 90%;
}
#container {
margin: 0em auto;
display: flex;
flex-direction: column;
justify-content: space-between;
height: 100%;
}
main {
margin: 3px;
display: flex;
flex-direction: column;
justify-content: space-between;
gap: 1em;
flex-grow: 1;
overflow-y: auto;
border: 1px solid #ccc;
border-radius: 5px;
padding: 0.5em;
}
body {
max-width: 600px;
min-width: 300px;
line-height: 1.2;
margin: 0 auto;
padding: 0 0.5em;
}
p {
overflow-wrap: break-word;
word-wrap: break-word;
hyphens: auto;
margin-top: 0.5em;
margin-bottom: 0.5em;
}
#write form {
margin: 1em 0 0 0;
display: flex;
flex-direction: column;
gap: 0.5em;
align-items: stretch;
}
.right {
display: flex;
flex-direction: row;
gap: 0.5em;
justify-content: flex-end;
}
fieldset {
border: none;
padding: 0;
margin: 0;
}
fieldset.two {
display: grid;
grid-template: "a a";
gap: 1em;
}
fieldset.three {
display: grid;
grid-template: "a a a";
gap: 1em;
}
details {
border: 1px solid #aaa;
border-radius: 4px;
padding: 0.5em 0.5em 0;
margin-top: 0.5em;
}
summary {
font-weight: bold;
margin: -0.5em -0.5em 0;
padding: 0.5em;
cursor: pointer;
}
details[open] {
padding: 0.5em;
}
.prob-set {
padding: 0.3em;
border-bottom: 1px solid #ccc;
}
.popover-content {
position: absolute;
background-color: white;
padding: 0.2em;
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
}
textarea {
padding: 5px;
flex-grow: 1;
width: 100%;
}
pre code {
display: block;
background-color: #222;
color: #ddd;
}
code {
font-family: monospace;
padding: 0.1em 0.3em;
border-radius: 3px;
}
fieldset label {
margin: 0.5em 0;
display: block;
}
fieldset label.slim {
margin: 0 0.5em;
display: inline;
}
header,
footer {
text-align: center;
}
footer {
font-size: 80%;
color: #888;
}
.mode-chat textarea[name=prompt] {
height: 4.5em;
}
.mode-completion textarea[name=prompt] {
height: 10em;
}
[contenteditable] {
display: inline-block;
white-space: pre-wrap;
outline: 0px solid transparent;
}
@keyframes loading-bg-wipe {
0% {
background-position: 0%;
}
100% {
background-position: 100%;
}
}
.loading {
--loading-color-1: #eeeeee00;
--loading-color-2: #eeeeeeff;
background-size: 50% 100%;
background-image: linear-gradient(90deg, var(--loading-color-1), var(--loading-color-2), var(--loading-color-1));
animation: loading-bg-wipe 2s linear infinite;
}
@media (prefers-color-scheme: dark) {
.loading {
--loading-color-1: #22222200;
--loading-color-2: #222222ff;
}
.popover-content {
background-color: black;
}
}
</style>
<link rel="stylesheet" href="style.css">
<script type="module">
import {
@ -203,42 +14,43 @@
import { llama } from '/completion.js';
import { SchemaConverter } from '/json-schema-to-grammar.mjs';
import { promptFormats } from './promptFormats.js';
import { systemPrompts } from './EN_systemPrompts.js'; // multilingual is wip
let selected_image = false;
var slot_id = -1;
const session = signal({
prompt: "This is a conversation between User and Llama, a friendly chatbot. Llama is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.",
prompt: "This is a conversation between a user and a friendly chatbot. The chatbot is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.",
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
transcript: [],
type: "chat", // "chat" | "completion"
char: "Llama",
char: "Assistant",
user: "User",
image_selected: ''
})
const params = signal({
n_predict: 400,
n_predict: 358,
temperature: 0.7,
repeat_last_n: 256, // 0 = disable penalty, -1 = context size
repeat_penalty: 1.18, // 1.0 = disabled
top_k: 40, // <= 0 to use vocab size
top_k: 1, // <= 0 to use vocab size
top_p: 0.5, // 1.0 = disabled
min_p: 0.05, // 0 = disabled
tfs_z: 1.0, // 1.0 = disabled
typical_p: 1.0, // 1.0 = disabled
presence_penalty: 0.0, // 0.0 = disabled
frequency_penalty: 0.0, // 0.0 = disabled
mirostat: 0, // 0/1/2
mirostat_tau: 5, // target entropy
mirostat_eta: 0.1, // learning rate
mirostat_eta: 0.96, // learning rate
grammar: '',
n_probs: 0, // no completion_probabilities,
image_data: [],
cache_prompt: true
})
/* START: Support for storing prompt templates and parameters in borwser LocalStorage */
/* START: Support for storing prompt templates and parameters in browser's LocalStorage */
const local_storage_storageKey = "llamacpp_server_local_storage";
@ -369,7 +181,7 @@
console.log('Checking for autosaved last used template')
userTemplateLoadAndApplyAutosaved()
/* END: Support for storing prompt templates and parameters in browsers LocalStorage */
/* END: Support for storing prompt templates and parameters in browser's LocalStorage */
const llamaStats = signal(null)
const controller = signal(null)
@ -405,7 +217,6 @@
controller.value = new AbortController();
for await (const chunk of llama(prompt, llamaParams, { controller: controller.value })) {
const data = chunk.data;
if (data.stop) {
while (
currentMessages.length > 0 &&
@ -419,17 +230,15 @@
currentMessages.push(data);
slot_id = data.slot_id;
if (selected_image && !data.multimodal) {
alert("The server was not compiled for multimodal or the model projector can't be loaded.");
alert("Der Server wurde nicht für multimodal zusammengestellt oder der Modellprojektor kann nicht geladen werden.");
return;
}
transcriptUpdate([...history, [char, currentMessages]])
}
if (data.timings) {
llamaStats.value = data.timings;
}
}
controller.value = null;
}
@ -467,23 +276,18 @@
}, "{{char}}");
}
const runCompletion = () => {
const runCompletion = async () => {
if (controller.value) {
console.log('already running...');
return;
}
const { prompt } = session.value;
transcriptUpdate([...session.value.transcript, ["", prompt]]);
runLlama(prompt, {
await runLlama(prompt, {
...params.value,
slot_id: slot_id,
stop: [],
}, "").finally(() => {
session.value.prompt = session.value.transcript.map(([_, data]) =>
Array.isArray(data) ? data.map(msg => msg.content).join('') : data
).join('');
session.value.transcript = [];
})
}, "");
}
const stop = (e) => {
@ -536,23 +340,24 @@
}
return html`
<form onsubmit=${submit}>
<div>
<textarea
className=${generating.value ? "loading" : null}
oninput=${(e) => message.value = e.target.value}
onkeypress=${enterSubmits}
placeholder="Say something..."
rows=2
type="text"
value="${message}"
/>
</div>
<form onsubmit=${submit}>
<div class="chat-input-container">
<textarea
id="chat-input" placeholder="Say Something ..."
class="${generating.value ? 'loading' : null}"
oninput=${(e) => message.value = e.target.value}
onkeypress=${enterSubmits}
rows="2"
type="text"
value="${message}"
></textarea>
</div>
<div class="right">
<button type="submit" disabled=${generating.value}>Send</button>
<button class="button-back" onclick=${reset}>Back</button>
<button onclick=${uploadImage}>Upload Image</button>
<button onclick=${stop} disabled=${!generating.value}>Stop</button>
<button onclick=${reset}>Reset</button>
<button type="submit" disabled=${generating.value}>Submit</button>
</div>
</form>
`
@ -567,7 +372,7 @@
<div>
<button onclick=${submit} type="button" disabled=${generating.value}>Start</button>
<button onclick=${stop} disabled=${!generating.value}>Stop</button>
<button onclick=${reset}>Reset</button>
<button onclick=${reset}>Back</button>
</div>`;
}
@ -583,7 +388,6 @@
}
}, [messages])
const isCompletionMode = session.value.type === 'completion'
const chatLine = ([user, data], index) => {
let message
const isArrayMessage = Array.isArray(data)
@ -593,34 +397,152 @@
const text = isArrayMessage ?
data.map(msg => msg.content).join('').replace(/^\s+/, '') :
data;
message = isCompletionMode ?
text :
html`<${Markdownish} text=${template(text)} />`
message = html`<${Markdownish} text=${template(text)} />`
}
if (user) {
return html`<p key=${index}><strong>${template(user)}:</strong> ${message}</p>`
} else {
return isCompletionMode ?
html`<span key=${index}>${message}</span>` :
html`<p key=${index}>${message}</p>`
return html`<p key=${index}>${message}</p>`
}
};
const handleCompletionEdit = (e) => {
session.value.prompt = e.target.innerText;
session.value.transcript = [];
}
return html`
<div id="chat" ref=${container} key=${messages.length}>
<section id="chat" ref=${container}>
<img style="width: 60%;${!session.value.image_selected ? `display: none;` : ``}" src="${session.value.image_selected}"/>
<span contenteditable=${isCompletionMode} ref=${container} oninput=${handleCompletionEdit}>
${messages.flatMap(chatLine)}
</span>
</div>`;
${messages.flatMap(chatLine)}
</section>`;
};
const ConfigForm = (props) => {
const handleToggleChange = (e) => {
const isChecked = e.target.checked;
session.value = { ...session.value, type: isChecked ? 'completion' : 'chat' };
// may require further actions to update the user interface
// for example, calling a function that re-renders the form or updates the state
// that controls the display of the form. Currently after reset chat toggle does not return
}
function updatePromptFormat(e) {
const promptFormat = e.target.value;
if (promptFormats.hasOwnProperty(promptFormat)) {
session.value = {
...session.value,
...promptFormats[promptFormat]
};
} else {
// llama.cpp's default setting
session.value = {
...session.value,
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "Assistant",
user: "User"
};
}
}
///////////////////////////////////////////////////////////////////////////
//
///////////// WORK IN PROGRESS ///////////// MULTILINGUAL UI AND PROMPTS //
//
//
function updateSystemPrompt(e) {
const SystemPrompt = e.target.value;
if (systemPrompts.hasOwnProperty(SystemPrompt)) {
session.value = {
...session.value,
prompt: systemPrompts[SystemPrompt].systemPrompt
};
}
}
//
document.addEventListener('DOMContentLoaded', (event) => {
// Add event listener for the language selection dropdown
document.getElementById('systemLanguage').addEventListener('change', updateSystemLanguage);
});
//
async function updateSystemLanguage(event) {
const language = event.target.value;
const languageFile = language === 'default' ? 'EN_systemPrompts.js' : `${language.toUpperCase()}_systemPrompts.js`;
const uiLanguageFile = language === 'default' ? 'EN_texts.json' : `${language.toUpperCase()}_texts.json`;
//
try {
// System prompts
const promptsModule = await import(`./${languageFile}`);
const systemPrompts = promptsModule.systemPrompts;
// Update the system prompt
document.getElementById('prompt').textContent = systemPrompts.default.systemPrompt;
// The UI texts
const response = await fetch(uiLanguageFile);
const uiTexts = await response.json();
// Update the label
document.getElementById('id_user-name').textContent = uiTexts.USER_NAME;
document.getElementById('id_bot-name').textContent = uiTexts.BOT_NAME;
document.getElementById('id_toggle-label-chat').textContent = uiTexts.TOGGLE_LABEL_CHAT;
document.getElementById('id_toggle-label-complete').textContent = uiTexts.TOGGLE_LABEL_COMPLETE;
document.getElementById('id_history-template').textContent = uiTexts.HISTORY_TEMPLATE;
document.getElementById('id_prompt-style').textContent = uiTexts.PROMPT_STYLE;
document.getElementById('id_prompt-template').textContent = uiTexts.PROMPT_TEMPLATE;
document.getElementById('id_reset').textContent = uiTexts.RESET;
document.getElementById('id_grammar-title').textContent = uiTexts.GRAMMAR_TITLE;
document.getElementById('id_grammar').textContent = uiTexts.GRAMMAR;
document.getElementById('id_grammar-order-placeholder').textContent = uiTexts.GRAMMAR_ORDER_PLACEHOLDER;
document.getElementById('id_grammar-convert-button').textContent = uiTexts.GRAMMAR_CONVERT_BUTTON;
document.getElementById('id_predict-tokens').textContent = uiTexts.PREDICT_TOKENS;
document.getElementById('id_temperature').textContent = uiTexts.TEMPERATURE;
document.getElementById('id_top-k').textContent = uiTexts.TOP_K;
document.getElementById('id_top-p').textContent = uiTexts.TOP_P;
document.getElementById('id_repetition-penalty').textContent = uiTexts.REPETITION_PENALTY;
document.getElementById('id_more-options').textContent = uiTexts.MORE_OPTIONS;
document.getElementById('id_penalize-last').textContent = uiTexts.PENALIZE_LAST;
document.getElementById('id_tfs-z').textContent = uiTexts.TFS_Z;
document.getElementById('id_penalize-presence').textContent = uiTexts.PENALIZE_PRESENCE;
document.getElementById('id_penalize-frequence').textContent = uiTexts.PENALIZE_FREQUENCE;
document.getElementById('id_typical-p').textContent = uiTexts.TYPICAL_P;
document.getElementById('id_mirostat-off').textContent = uiTexts.MIROSTAT_OFF;
document.getElementById('id_mirostat-on-1').textContent = uiTexts.MIROSTAT_ON_1;
document.getElementById('id_mirostat-on-2').textContent = uiTexts.MIROSTAT_ON_2;
document.getElementById('id_mirostat-entropy').textContent = uiTexts.MIROSTAT_ENTROPY;
document.getElementById('id_mirostat-learnrate').textContent = uiTexts.MIROSTAT_LEARNRATE;
document.getElementById('id_show-probabilities').textContent = uiTexts.SHOW_PROBABILITIES;
document.getElementById('id_user-input-placeholder').textContent = uiTexts.USER_INPUT_PLACEHOLDER;
document.getElementById('id_button-back').textContent = uiTexts.BUTTON_BACK;
document.getElementById('id_button-upload-image').textContent = uiTexts.BUTTON_UPLOAD_IMAGE;
document.getElementById('id_button-stop-inference').textContent = uiTexts.BUTTON_STOP_INFERENCE;
document.getElementById('id_button-start-inference').textContent = uiTexts.BUTTON_START_INFERENCE;
document.getElementById('id_powered-by').textContent = uiTexts.POWERED_BY;
document.getElementById('id_and').textContent = uiTexts.AND;
} catch (error) {
console.error('Error loading the language files:', error);
}
}
//
// async function updateTexts(systemPrompts) {
// // Aktualisiere die Texte, die aus den Sprachdateien kommen
// document.querySelector('#someElement').textContent = systemPrompts.default.systemPrompt;
// // Lade die JSON-Datei für die UI-Elemente
// const uiLanguageFile = `./${language}_texts.json`;
// try {
// const response = await fetch(uiLanguageFile);
// const uiTexts = await response.json();
// // Aktualisiere die UI-Elemente
// // document.querySelector('label[for="user"]').textContent = uiTexts.USER_NAME;
// document.getElementById('label-user').textContent = uiTexts.USER_NAME;
// // Weitere Elemente aktualisieren
// } catch (error) {
// console.error('Fehler beim Laden der UI-Texte:', error);
// }
// }
//
///////////// WORK IN PROGRESS ///////////// MULTILINGUAL UI AND PROMPTS //
//
///////////////////////////////////////////////////////////////////////////
const ConfigForm = (props) => {
const updateSession = (el) => session.value = { ...session.value, [el.target.name]: el.target.value }
const updateParams = (el) => params.value = { ...params.value, [el.target.name]: el.target.value }
const updateParamsFloat = (el) => params.value = { ...params.value, [el.target.name]: parseFloat(el.target.value) }
@ -642,7 +564,7 @@
grammar: converter.formatGrammar(),
}
} catch (e) {
alert(`Convert failed: ${e.message}`)
alert(`Konvertierung fehlgeschlagen: ${e.message}`)
}
}
@ -656,11 +578,11 @@
`
};
const IntField = ({ label, max, min, name, value }) => {
const IntField = ({ label, max, min, step, name, value }) => {
return html`
<div>
<label for="${name}">${label}</label>
<input type="range" id="${name}" min="${min}" max="${max}" name="${name}" value="${value}" oninput=${updateParamsInt} />
<input type="range" id="${name}" min="${min}" max="${max}" step="${step}" name="${name}" value="${value}" oninput=${updateParamsInt} />
<span>${value}</span>
</div>
`
@ -674,12 +596,14 @@
const UserTemplateResetButton = () => {
if (selectedUserTemplate.value.name == 'default') {
return html`
<button disabled>Using default template</button>
<button class="reset-button" id="id_reset" onclick="${userTemplateReset}">Reset</button>
`
}
return html`
<button onclick=${userTemplateReset}>Reset all to default</button>
<div class="button-container">
<button class="reset-button" title="Caution: This resets the entire form." onclick="${userTemplateReset}">Reset</button>
</div>
`
};
@ -691,54 +615,140 @@
const GrammarControl = () => (
html`
<div>
<label for="template">Grammar</label>
<textarea id="grammar" name="grammar" placeholder="Use gbnf or JSON Schema+convert" value="${params.value.grammar}" rows=4 oninput=${updateParams}/>
<input type="text" name="prop-order" placeholder="order: prop1,prop2,prop3" oninput=${updateGrammarJsonSchemaPropOrder} />
<button type="button" onclick=${convertJSONSchemaGrammar}>Convert JSON Schema</button>
<label for="template"></label>
<textarea id="grammar" name="grammar" placeholder="Use GBNF or JSON-Scheme + Converter" value="${params.value.grammar}" rows=4 oninput=${updateParams}/>
<div class="json-schema-controls">
<input type="text" name="prop-order" placeholder="Order: prop1,prop2,prop3" oninput=${updateGrammarJsonSchemaPropOrder} />
<button type="button" class="button-grammar" onclick=${convertJSONSchemaGrammar}>Convert JSON-Scheme</button>
</div>
`
</div>
`
);
const PromptControlFieldSet = () => (
html`
<fieldset>
<div>
<label htmlFor="prompt">Prompt</label>
<textarea type="text" name="prompt" value="${session.value.prompt}" oninput=${updateSession}/>
</div>
</fieldset>
<fieldset>
<div class="input-container">
<label for="prompt" class="input-label">System</label>
<textarea
id="prompt"
class="persistent-input"
name="prompt"
value="${session.value.prompt}"
oninput=${updateSession}
></textarea>
</div>
</fieldset>
`
);
const ChatConfigForm = () => (
html`
${PromptControlFieldSet()}
<fieldset class="two">
<fieldset class="dropdowns">
<div>
<label for="user">User name</label>
<select id="promptFormat" name="promptFormat" onchange=${updatePromptFormat}>
<option value="default">Prompt Style</option>
<option value=""></option>
<optgroup label="Common Prompt-Styles">
<option value="alpaca">Alpaca</option>
<option value="chatml">ChatML</option>
<option value="llama2">Llama 2</option>
<option value="vicuna">Vicuna</option>
<option value=""></option>
</optgroup>
<optgroup label="More Prompt-Styles">
<option value="airoboros180">Airoboros L2</option>
<option value="codeCherryPop">Code Cherry Pop</option>
<option value="deepseekCoder">Deepseek Coder</option>
<option value="dolphinMistral">Dolphin Mistral</option>
<option value="evolvedSeeker">evolvedSeeker 1.3B</option>
<option value="goliath120b">Goliath 120B</option>
<option value="jordan">Jordan</option>
<option value="leoHessianai">Leo Hessianai</option>
<option value="leoMistral">Leo Mistral</option>
<option value="marx">Marx</option>
<option value="med42">Med42</option>
<option value="metaMath">MetaMath</option>
<option value="mistralInstruct">Mistral Instruct</option>
<option value="mistralOpenOrca">Mistral 7B OpenOrca</option>
<option value="mythomax">MythoMax</option>
<option value="neuralchat">Neural Chat</option>
<option value="nousCapybara">Nous Capybara</option>
<option value="nousHermes">Nous Hermes</option>
<option value="openhermes2Mistral">OpenHermes 2.5-Mistral</option>
<option value="orcamini">Orca Mini v3</option>
<option value="sauerkrautLM">SauerkrautLM</option>
<option value="samantha">Samantha</option>
<option value="samanthaMistral">Samantha Mistral</option>
<option value="scarlett">Scarlett</option>
<option value="sydney">Sydney</option>
<option value="synthia">Synthia</option>
<option value="tess">Tess</option>
<option value="yi34b">Yi-34B</option>
<option value="zephyr">Zephyr</option>
<option value=""></option>
</optgroup>
</select>
<select id="SystemPrompt" name="SystemPrompt" onchange=${updateSystemPrompt}>
<option value="default">System Prompt</option>
<option value="empty">None</option>
<option value="airoboros">Airoboros</option>
<option value="alpaca">Alpaca</option>
<option value="atlas">Atlas</option>
<option value="deepseekcoder">Deepseek Coder</option>
<option value="jordan">Jordan</option>
<option value="leomistral">Leo Mistral</option>
<option value="med42">Med42</option>
<option value="mistralopenorca">Mistral OpenOrca</option>
<option value="migeltot">Migel's Tree of Thought</option>
<option value="orcamini">Orca Mini</option>
<option value="samantha">Samantha</option>
<option value="sauerkraut">Sauerkraut</option>
<option value="scarlett">Scarlett</option>
<option value="synthia">Synthia</option>
</select>
<!--<select id="systemLanguage" name="systemLanguage">-->
<!--<option value="default">English</option>-->
<!--<option value="DE">German</option>-->
<!--<option value="placeholderLanguage">Placeholder</option>-->
<!--</select>-->
</div>
</fieldset>
${PromptControlFieldSet()}
<fieldset class="names">
<div>
<label for="user" id="id_user-name">User Name</label>
<input type="text" name="user" value="${session.value.user}" oninput=${updateSession} />
</div>
<div>
<label for="bot">Bot name</label>
<label for="bot" id="id_bot-name">AI Name</label>
<input type="text" name="char" value="${session.value.char}" oninput=${updateSession} />
</div>
</fieldset>
<fieldset>
<div>
<label for="template">Prompt template</label>
<textarea id="template" name="template" value="${session.value.template}" rows=4 oninput=${updateSession}/>
</div>
<div>
<label for="template">Chat history template</label>
<textarea id="template" name="historyTemplate" value="${session.value.historyTemplate}" rows=1 oninput=${updateSession}/>
<details>
<summary><span class="summary-title" id="id_prompt-style">Prompt Style</span></summary>
<div class="two-columns">
<div>
<div class="input-container">
<label for="template" class="input-label-sec" id_prompt-template>Prompt Template</label>
<textarea id="template" class="persistent-input-sec" name="template" value="${session.value.template}" rows=6 oninput=${updateSession}/>
</div>
</div>
<div>
<div class="input-container">
<label for="template" class="input-label-sec" id="id_history-template">Chat History</label>
<textarea id="template" class="persistent-input-sec" name="historyTemplate" value="${session.value.historyTemplate}" rows=1 oninput=${updateSession}/>
</div>
</div>
</div>
</details>
<details>
<summary><span class="summary-title" id="id_grammar-title" id_grammar-title>Grammar</span></summary>
${GrammarControl()}
</fieldset>
`
</details>
</fieldset>
`
);
const CompletionConfigForm = () => (
@ -751,50 +761,53 @@
return html`
<form>
<fieldset class="two">
<input type="checkbox" id="toggle" class="toggleCheckbox" onchange=${handleToggleChange} />
<label for="toggle" class="toggleContainer">
<div id="id_toggle-label-chat">Chat</div>
<div id="id_toggle-label-complete">Complete</div>
</label>
<${UserTemplateResetButton}/>
<div>
<label class="slim"><input type="radio" name="type" value="chat" checked=${session.value.type === "chat"} oninput=${updateSession} /> Chat</label>
<label class="slim"><input type="radio" name="type" value="completion" checked=${session.value.type === "completion"} oninput=${updateSession} /> Completion</label>
</div>
</fieldset>
${session.value.type === 'chat' ? ChatConfigForm() : CompletionConfigForm()}
<fieldset class="two">
${IntField({ label: "Predictions", max: 2048, min: -1, name: "n_predict", value: params.value.n_predict })}
${IntField({ label: "Prediction", max: 2048, min: -1, step: 16, name: "n_predict", value: params.value.n_predict, })}
${FloatField({ label: "Temperature", max: 1.5, min: 0.0, name: "temperature", step: 0.01, value: params.value.temperature })}
${FloatField({ label: "Penalize repeat sequence", max: 2.0, min: 0.0, name: "repeat_penalty", step: 0.01, value: params.value.repeat_penalty })}
${IntField({ label: "Consider N tokens for penalize", max: 2048, min: 0, name: "repeat_last_n", value: params.value.repeat_last_n })}
${IntField({ label: "Top-K sampling", max: 100, min: -1, name: "top_k", value: params.value.top_k })}
${FloatField({ label: "Top-P sampling", max: 1.0, min: 0.0, name: "top_p", step: 0.01, value: params.value.top_p })}
${FloatField({ label: "Min-P sampling", max: 1.0, min: 0.0, name: "min_p", step: 0.01, value: params.value.min_p })}
${IntField({ label: "Top-K", max: 100, min: -1, step: 1, name: "top_k", value: params.value.top_k })}
${FloatField({ label: "Repetition Penalty", max: 2.0, min: 0.0, name: "repeat_penalty", step: 0.01, value: params.value.repeat_penalty })}
</fieldset>
<details>
<summary>More options</summary>
<summary><span class="summary-title">Further Options</span></summary>
<fieldset class="two">
${FloatField({ label: "Top-P", max: 1.0, min: 0.0, name: "top_p", step: 0.01, value: params.value.top_p })}
${IntField({ label: "Penalize Last N", max: 2048, min: 0, step: 16, name: "repeat_last_n", value: params.value.repeat_last_n })}
${FloatField({ label: "TFS-Z", max: 1.0, min: 0.0, name: "tfs_z", step: 0.01, value: params.value.tfs_z })}
${FloatField({ label: "Typical P", max: 1.0, min: 0.0, name: "typical_p", step: 0.01, value: params.value.typical_p })}
${FloatField({ label: "Presence penalty", max: 1.0, min: 0.0, name: "presence_penalty", step: 0.01, value: params.value.presence_penalty })}
${FloatField({ label: "Frequency penalty", max: 1.0, min: 0.0, name: "frequency_penalty", step: 0.01, value: params.value.frequency_penalty })}
${FloatField({ label: "Presence Penalty", max: 1.0, min: 0.0, name: "presence_penalty", step: 0.01, value: params.value.presence_penalty })}
${FloatField({ label: "Typical-P", max: 1.0, min: 0.0, name: "typical_p", step: 0.01, value: params.value.typical_p })}
${FloatField({ label: "Frequency Penalty", max: 1.0, min: 0.0, name: "frequency_penalty", step: 0.01, value: params.value.frequency_penalty })}
</fieldset>
<hr />
<hr style="height: 1px; background-color: #ececf1; border: none;" />
<fieldset class="three">
<div>
<label><input type="radio" name="mirostat" value="0" checked=${params.value.mirostat == 0} oninput=${updateParamsInt} /> no Mirostat</label>
<label><input type="radio" name="mirostat" value="0" checked=${params.value.mirostat == 0} oninput=${updateParamsInt} /> Mirostat off</label>
<label><input type="radio" name="mirostat" value="1" checked=${params.value.mirostat == 1} oninput=${updateParamsInt} /> Mirostat v1</label>
<label><input type="radio" name="mirostat" value="2" checked=${params.value.mirostat == 2} oninput=${updateParamsInt} /> Mirostat v2</label>
</div>
${FloatField({ label: "Mirostat tau", max: 10.0, min: 0.0, name: "mirostat_tau", step: 0.01, value: params.value.mirostat_tau })}
${FloatField({ label: "Mirostat eta", max: 1.0, min: 0.0, name: "mirostat_eta", step: 0.01, value: params.value.mirostat_eta })}
</fieldset>
<fieldset>
${IntField({ label: "Show Probabilities", max: 10, min: 0, name: "n_probs", value: params.value.n_probs })}
${FloatField({ label: "Entropy tau", max: 10.0, min: 0.0, name: "mirostat_tau", step: 0.01, value: params.value.mirostat_tau })}
${FloatField({ label: "Learning-rate eta", max: 1.0, min: 0.0, name: "mirostat_eta", step: 0.01, value: params.value.mirostat_eta })}
${IntField({ label: "Show Probabilities", max: 10, min: 0, step: 1, name: "n_probs", value: params.value.n_probs })}
</fieldset>
</details>
</form>
`
}
const probColor = (p) => {
const r = Math.floor(192 * (1 - p));
const g = Math.floor(192 * p);
@ -875,7 +888,7 @@
}
return html`
<span>
${llamaStats.value.predicted_per_token_ms.toFixed()}ms per token, ${llamaStats.value.predicted_per_second.toFixed(2)} tokens per second
${llamaStats.value.predicted_per_token_ms.toFixed()}ms pro Token, ${llamaStats.value.predicted_per_second.toFixed(2)} Token pro Sekunde
</span>
`
}
@ -996,7 +1009,7 @@
return html`
<div class="mode-${session.value.type}">
<header>
<h1>llama.cpp</h1>
<h2>llama.cpp</h2>
</header>
<main id="content">
@ -1009,7 +1022,7 @@
<footer>
<p><${ModelGenerationInfo} /></p>
<p>Powered by <a href="https://github.com/ggerganov/llama.cpp">llama.cpp</a> and <a href="https://ggml.ai">ggml.ai</a>.</p>
<p>Powered By <a href="https://github.com/ggerganov/llama.cpp#readme" target="_blank">llama.cpp</a> and <a href="https://ggml.ai/" target="_blank">ggml.ai</a></p>
</footer>
</div>
`;
@ -1020,6 +1033,7 @@
</head>
<body>
<div id="container">
<input type="file" id="fileInput" accept="image/*" style="display: none;">
</div>
@ -1027,4 +1041,3 @@
</body>
</html>

View file

@ -0,0 +1,201 @@
// extended list
export const promptFormats = {
"airoborosl2": {
template: "{{prompt}} {{history}} {{char}}",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"alpaca": {
template: "{{prompt}}\n\n{{history}}\n\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Response",
user: "Instruction"
},
"chatml": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"codeCherryPop": {
template: "{{prompt}}\n\n{{history}}\n\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Response",
user: "Instruction"
},
"deepseekCoder": {
template: "{{prompt}}\n{{history}}\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Response",
user: "Instruction"
},
"dolphinMistral": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"evolvedSeeker": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"goliath120b": {
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"jordan": {
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"leoHessianai": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"leoMistral": {
template: "{{prompt}} {{history}} {{char}}",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"llama2": {
template: "<s>[INST] <<SYS>>\n{{prompt}}\n<</SYS>>\n\n{{history}} [/INST] {{char}} </s><s>[INST] ",
historyTemplate: "{{name}}: {{message}} [/INST]",
char: "llama",
user: "user"
},
"marx": {
template: "{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"med42": {
template: "<|system|>: {{prompt}}\n{{history}}\n{{char}}",
historyTemplate: "<|{{name}}|>:{{message}}",
char: "assistant",
user: "prompter"
},
"metaMath": {
template: "{{prompt}}\n\n{{history}}\n\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Response",
user: "Instruction"
},
"mistralInstruct": {
template: "<s>{{history}} [/INST]\n{{char}}</s>",
historyTemplate: "{{name}} {{message}}",
char: "",
user: "[INST] "
},
"mistralOpenOrca": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"mythomax": {
template: "{{prompt}}\n\n{{history}}\n\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Response",
user: "Instruction"
},
"neuralchat": {
template: "### System:\n{{prompt}}\n{{history}}\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Assistant",
user: "User"
},
"nousCapybara": {
template: "{{history}}\n{{char}}",
historyTemplate: "\n{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"nousHermes": {
template: "### Instruction: {{prompt}}\n{{history}}\n### {{char}}:",
historyTemplate: "\n### {{name}}: {{message}}",
char: "Response",
user: "Input"
},
"openhermes2Mistral": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"orcamini": {
template: "{{prompt}}\n\n{{history}}\n\n### {{char}}:",
historyTemplate: "### {{name}}:\n{{message}}",
char: "Response",
user: "Instruction"
},
"sauerkraut": {
template: "{{prompt}}\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "Assistant",
user: "User"
},
"samantha": {
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"samanthaMistral": {
template: "<|im_start|>system\n{{prompt}}<|im_end|>\n{{history}}\n<|im_start|>{{char}}",
historyTemplate: "<|im_start|>{{user}}\n{{message}}<|im_end|>",
char: "assistant",
user: "user"
},
"scarlett": {
template: "{{prompt}}\n\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"sydney": {
template: "{{prompt}}\n\n{{history}}\n{{char}}",
historyTemplate: "### {{name}}:\n{{message}}\n",
char: "Response",
user: "Instruction"
},
"synthia": {
template: "SYSTEM: {{prompt}}\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"tess": {
template: "SYSTEM: {{prompt}}\n{{history}}\n{{char}}:",
historyTemplate: "{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"vicuna": {
template: "{{prompt}}\n{{history}}\n{{char}}:",
historyTemplate: "\n{{name}}: {{message}}",
char: "ASSISTANT",
user: "USER"
},
"yi34b": {
template: "{{history}} {{char}}",
historyTemplate: "{{name}}: {{message}}",
char: "Assistant",
user: "Human"
},
"zephyr": {
template: "<|system|>\n{{prompt}}</s>\n{{history}}\n{{char}}",
historyTemplate: "<|{{name}}|>\n{{message}}</s>",
char: "assistant",
user: "user"
}
};

View file

@ -0,0 +1,642 @@
:root {
/* Bright topic colors*/
--primary-color: #11a37f;
--secondary-color: #264653;
--background-color: #fefeff;
--text-color: #333333;
--border-color: #cccccc;
--button-hover-color: #197f63;
--button-hover-alert: #e76f51;
--input-focus-border-color: #11a37f;
--loading-color-1: #eeeeee00;
--loading-color-2: #eeeeeeff;
/* Dark topic colors (to add/adapt — WIP) */
--dark-primary-color: #13b896;
--dark-secondary-color: #1a2a33;
--dark-background-color: #121212;
--dark-text-color: #e0e0e0;
--dark-border-color: #333333;
--dark-button-hover-color: #16876a;
--dark-button-hover-alert: #cf6252;
--dark-input-focus-border-color: #13b896;
--dark-loading-color-1: #33333300;
--dark-loading-color-2: #333333ff;
}
body {
font-family: 'Arial', sans-serif;
font-size: 90%;
background-color: var(--background-color);
color: var(--text-color);
max-width: 600px;
min-width: 300px;
line-height: 1.2;
margin: 0 auto;
padding: 0 0.5em;
}
code, pre code {
font-family: 'Courier New', monospace;
}
#container {
margin: 0em auto;
display: flex;
flex-direction: column;
justify-content: space-between;
height: 100%;
}
main {
margin: 3px;
display: flex;
flex-direction: column;
justify-content: space-between;
gap: 1em;
flex-grow: 1;
overflow-y: auto;
border: 1px solid var(--border-color);
border-radius: 5px;
padding: 0.5em;
}
p {
overflow-wrap: break-word;
word-wrap: break-word;
hyphens: auto;
margin-top: 0.5em;
margin-bottom: 0.5em;
}
#write form {
margin: 1em 0 0 0;
display: flex;
flex-direction: column;
gap: 0.5em;
align-items: stretch;
}
.right {
display: flex;
flex-direction: row;
gap: 0.5em;
justify-content: flex-end;
}
.two-columns {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 1em;
}
.summary-title {
font-weight: 600;
font-size: x-small;
color: var(--secondary-color);
text-transform: uppercase;
}
fieldset {
border: none;
padding: 0;
margin: 0;
color: #353740;
}
fieldset.two {
display: grid;
grid-template: "a a";
gap: 1em;
font-size: x-small;
color: var(--text-color);
}
fieldset.three {
display: grid;
grid-template: "a a a";
gap: 1em;
font-size: x-small;
color: var(--text-color);
}
fieldset.names {
display: grid;
grid-template: "a a";
gap: 1em;
font-size: x-small;
color: var(--primary-color);
padding-top: 16px;
padding-bottom: 16px;
text-transform: uppercase;
font-weight: 600;
}
fieldset.dropdowns {
display: flex;
grid-template: "a a";
gap: 1em;
font-size: x-small;
color: var(--primary-color);
padding-top: 16px;
padding-bottom: 16px;
text-transform: uppercase;
font-weight: 600;
}
.names input[type="text"] {
font-family: Arial, sans-serif;
font-size: medium;
color: var(--text-color);
font-weight: 500;
padding: 5px;
border: 1px solid var(--border-color);
}
.names input[type="text"].special-text-format {
font-style: italic;
font-weight: bold;
color: #FF0000;
}
details {
border: 1px solid #ececf1;
border-radius: 10px;
padding: 0.5em 0.5em 0;
margin-top: 0.5em;
}
summary {
font-weight: bold;
margin: -0.5em -0.5em 0;
padding: 0.5em;
cursor: pointer;
}
details[open] {
padding: 0.5em;
}
textarea-sec, input-sec, button-sec {
padding: 10px;
height: 40px;
align-items: center;
}
textarea-sec::placeholder, input-sec::placeholder {
padding-left: 10px;
}
.toggleCheckbox {
display: none;
}
.toggleContainer {
position: relative;
display: grid;
grid-template-columns: repeat(2, 1fr);
width: fit-content;
border: 3px solid var(--border-color);
border-radius: 20px;
background: var(--border-color);
font-size: small;
cursor: pointer;
overflow: hidden;
}
.toggleContainer::before {
content: '';
position: absolute;
width: 50%;
height: 100%;
left: 0%;
border-radius: 20px;
background: var(--primary-color);
transition: all 0.3s;
}
.toggleContainer div {
padding: 6px;
text-align: center;
z-index: 1;
transition: color 0.3s;
}
.toggleCheckbox:checked + .toggleContainer::before {
left: 50%;
}
.toggleCheckbox:checked + .toggleContainer div:first-child {
color: var(--text-color);
}
.toggleCheckbox:checked + .toggleContainer div:last-child {
color: var(--background-color);
}
.toggleCheckbox + .toggleContainer div:first-child {
color: var(--background-color);
}
.toggleCheckbox + .toggleContainer div:last-child {
color: var(--text-color);
}
.button-container {
display: flex;
justify-content: flex-end;
}
.reset-button {
width: fit-content;
height: fit-content;
background-color: #cccccc;
color: #353740;
font-size: x-small;
font-weight: 600;
border: 1px solid #cccccc;
border-radius: 50px;
overflow: hidden;
}
.reset-button:hover {
color: var(--background-color);
background-color: var(--button-hover-alert);
border: 1px solid var(--button-hover-alert);
}
select {
padding: 5px;
border-radius: 8px;
border: 1px solid #ccc;
background-color: white;
cursor: pointer;
}
select:focus {
border: 1px solid var(--input-focus-border-color);
box-shadow: 0 0 1px var(--input-focus-border-color);
}
button {
background-color: var(--primary-color);
color: white;
border: none;
padding: 10px 20px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: x-small;
font-weight: 600;
margin: 4px 2px;
transition: background-color 0.3s, transform 0.3s;
cursor: pointer;
border-radius: 12px;
border: 1px solid #f0f0f0;
}
button:hover {
background-color: var(--button-hover-color);
font-size: x-small;
font-weight: 600;
}
button:disabled {
color: #acacbd;
background-color: #f7f7f8;
cursor: not-allowed;
font-size: x-small;
font-weight: 600;
}
.button-back {
background-color: #cccccc;
color: #353740;
border: none;
padding: 10px 20px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: x-small;
font-weight: 600;
margin: 4px 2px;
transition: background-color 0.3s, transform 0.3s;
cursor: pointer;
border-radius: 12px;
border: 1px solid #f0f0f0;
}
.button-back:hover {
background-color: #353740;
color: #f0f0f0;
border: none;
padding: 10px 20px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: x-small;
font-weight: 600;
margin: 4px 2px;
transition: background-color 0.3s, transform 0.3s;
cursor: pointer;
border-radius: 12px;
border: 1px solid #f0f0f0;
}
.button-grammar {
background-color: var(--primary-color);
color: white;
border: none;
padding: 10px 20px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: small;
margin: 2px 2px;
transition: background-color 0.3s, transform 0.3s;
cursor: pointer;
border-radius: 10px;
border: 1px solid #f0f0f0;
}
.button-grammar:hover {
background-color: var(--primary-color);
color: white;
border: none;
padding: 10px 20px;
text-align: center;
text-decoration: none;
display: inline-block;
font-size: small;
margin: 2px 2px;
transition: background-color 0.3s, transform 0.3s;
cursor: pointer;
border-radius: 10px;
border: 1px solid #f0f0f0;
}
.prob-set {
padding: 0.3em;
border-bottom: 1px solid #ccc;
}
.popover-content {
position: absolute;
background-color: white;
padding: 0.2em;
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
}
textarea {
padding: 5px;
flex-grow: 1;
width: 100%;
border-radius: 8px;
border: 1px solid #ccc;
resize: none;
height: 6em;
}
input[type="text"],
input[type="range"] {
padding: 5px;
border-radius: 8px;
border: 1px solid #ccc;
}
input[type="text"]:focus {
outline: none;
border: 1px solid var(--input-focus-border-color);
box-shadow: 0 0 1px var(--input-focus-border-color);
}
input[type="text"] {
border-radius: 8px;
}
input[type="range"] {
-webkit-appearance: none;
width: 80%;
height: 1px;
border: 3px solid var(--background-color);
border-radius: 8px;
background: var(--border-color);
outline: none;
opacity: 0.7;
-webkit-transition: .2s;
transition: opacity .2s;
}
input[type="range"]:hover {
opacity: 1;
}
input[type="range"]:focus,
textarea:focus {
outline: none;
border: 1px solid var(--primary-color);
box-shadow: 0 0 1px var(--input-focus-border-color);
}
input[type="range"]::-webkit-slider-thumb {
-webkit-appearance: none;
appearance: none;
width: 6px;
height: 25px;
border: 1px solid var(--border-color);
border-radius: 5px;
background: var(--background-color);
background-color: var(--background-color);
cursor: pointer;
}
input[type="range"]::-moz-range-thumb {
width: 3px;
height: 25px;
border: 1px solid var(--border-color);
border-radius: 5px;
background: var(--background-color);
cursor: pointer;
}
input[type="range"]::-webkit-slider-runnable-track {
background-size: var(--slider-track-size);
}
input[type="range"]:focus {
background-size: var(--slider-track-size-focus);
}
input[type="radio"] {
accent-color: var(--primary-color);
}
.chat-input-container {
position: relative;
}
.chat-input-label {
position: absolute;
top: 0;
left: 0;
color: var(--text-color);
pointer-events: none;
margin-left: 5px;
margin-top: 5px;
}
textarea#chat-input {
padding-top: 10px;
padding-left: 10px;
font-size: medium;
}
.input-container {
position: relative;
}
.input-label {
position: absolute;
top: 0;
left: 0;
color: var(--primary-color);
pointer-events: none;
margin-left: 13px;
margin-top: 16px;
text-transform: uppercase;
font-weight: 600;
font-size: small;
}
.input-label-sec {
position: absolute;
top: 0;
left: 0;
color: var(--text-color);
pointer-events: none;
margin-left: 13px;
margin-top: 16px;
text-transform: uppercase;
font-weight: 600;
font-size: x-small;
}
textarea.persistent-input {
padding-top: 42px;
padding-left: 11px;
font-size: medium;
}
textarea.persistent-input-sec {
padding-top: 42px;
padding-left: 11px;
font-size: small;
}
.persistent-input-sec {
height: auto;
min-height: 150px;
}
.json-schema-controls {
margin-top: 10px;
display: flex;
width: 100%;
/*gap: 1em; Fügt einen Abstand zwischen den Elementen hinzu */
}
.json-schema-controls > * {
flex: 1; /* Teilt den verfügbaren Platz gleichmäßig auf */
}
pre code {
display: block;
background-color: #222;
color: #ddd;
}
code {
font-family: monospace;
padding: 0.1em 0.3em;
border-radius: 3px;
}
fieldset label {
margin: 0.5em 0;
display: block;
}
fieldset label.slim {
margin: 0 0.5em;
display: inline;
}
header,
footer {
text-align: center;
}
footer {
font-size: 80%;
color: #888;
text-align: center;
}
footer a {
color: #333; /* Farbe des Links */
text-decoration: none; /* Keine Unterstreichung */
font-weight: bold; /* Fettgedruckt */
}
footer a:hover {
color: #555; /* Farbe des Links beim Hovern */
text-decoration: underline; /* Unterstreichung beim Hovern */
}
.mode-chat textarea[name=prompt] {
height: 8.5em;
}
.mode-completion textarea[name=prompt] {
height: 30em;
}
@keyframes loading-bg-wipe {
0% {
background-position: 0%;
}
100% {
background-position: 100%;
}
}
.loading {
background-size: 50% 100%;
background-image: linear-gradient(90deg, var(--loading-color-1), var(--loading-color-2), var(--loading-color-1));
animation: loading-bg-wipe 2s linear infinite;
}
@media (prefers-color-scheme: dark) {
:root {
--primary-color: var(--dark-primary-color);
--secondary-color: var(--dark-secondary-color);
--background-color: var(--dark-background-color);
--text-color: var(--dark-text-color);
--border-color: var(--dark-border-color);
--button-hover-color: var(--dark-button-hover-color);
--button-hover-alert: var(--dark-button-hover-alert);
--input-focus-border-color: var(--dark-input-focus-border-color);
--loading-color-1: var(--dark-loading-color-1);
--loading-color-2: var(--dark-loading-color-2);
--loading-color-1: #22222200;
--loading-color-2: #222222ff;
}
body {
background-color: black;
color: white;
}
.popover-content {
background-color: black;
}
}

295
examples/start-server.sh Executable file
View file

@ -0,0 +1,295 @@
#!/bin/bash
# Set default values
model_path="../models/"
mmproj_path=""
threads=4
ctx_size=512
batch_size=512
n_gpu_layers=0
cont_batching="off"
mlock="off"
no_mmap="off"
host="127.0.0.1"
port="8080"
advanced_options=""
model_selection() {
# User selects a file or folder
exec 3>&1
model_path=$(dialog --backtitle "Model Selection" \
--title "Select Model File or Folder" \
--fselect "$HOME/" 14 60 \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# If a folder has been selected, search for *.gguf files
if [ -d "$model_path" ]; then
model_files=($(find "$model_path" -name "*.gguf" 2>/dev/null))
elif [ -f "$model_path" ]; then
model_files=("$model_path")
else
dialog --backtitle "Model Selection" \
--title "Invalid Selection" \
--msgbox "The selected path is not valid." 7 50
return
fi
# Selection menu for models found
exec 3>&1
model_choice=$(dialog --backtitle "Model Selection" \
--title "Select a Model File" \
--menu "Choose one of the found models:" 15 60 4 \
$(for i in "${!model_files[@]}"; do echo "$((i+1))" "$(basename "${model_files[$i]}")"; done) \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# Set path to the selected model
model_path=${model_files[$((model_choice-1))]}
}
options() {
# Show form for entering the options
exec 3>&1
form_values=$(dialog --backtitle "Options Configuration" \
--title "Set Options" \
--form "Enter the values for the following options:" \
15 50 0 \
"Number of Threads (-t):" 1 1 "$threads" 1 25 25 5 \
"Context Size (-c):" 2 1 "$ctx_size" 2 25 25 5 \
"Batch Size (-b):" 3 1 "$batch_size" 3 25 25 5 \
"GPU Layers (-ngl):" 4 1 "$n_gpu_layers" 4 25 25 5 \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# Save the entered values in the corresponding variables
IFS=$'\n' read -r threads ctx_size batch_size n_gpu_layers <<< "$form_values"
}
further_options() {
# Initial values for the checkboxes based on current settings
cb_value=$([ "$cont_batching" = "on" ] && echo "on" || echo "off")
mlock_value=$([ "$mlock" = "on" ] && echo "on" || echo "off")
no_mmap_value=$([ "$no_mmap" = "on" ] && echo "on" || echo "off")
# Show dialog for setting options
exec 3>&1
choices=$(dialog --backtitle "Further Options" \
--title "Boolean Options" \
--checklist "Select options:" 15 60 3 \
"1" "Continuous Batching (-cb)" $cb_value \
"2" "Memory Lock (--mlock)" $mlock_value \
"3" "No Memory Map (--no-mmap)" $no_mmap_value \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# Set options based on user selection
cont_batching="off"
mlock="off"
no_mmap="off"
for choice in $choices; do
case $choice in
1) cont_batching="on" ;;
2) mlock="on" ;;
3) no_mmap="on" ;;
esac
done
}
advanced_options() {
# Input fields for Advanced Options
exec 3>&1
advanced_values=$(dialog --backtitle "Advanced Options" \
--title "Advanced Server Configuration" \
--form "Enter the advanced configuration options:" \
15 60 0 \
"Host IP:" 1 1 "$host" 1 15 15 0 \
"Port:" 2 1 "$port" 2 15 5 0 \
"Additional Options:" 3 1 "$advanced_options" 3 15 30 0 \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# Read the entries and save them in the corresponding variables
read -r host port advanced_options <<< "$advanced_values"
}
start_server() {
# Compiling the command with the selected options
cmd="../server"
[ -n "$model_path" ] && cmd+=" -m $model_path"
[ -n "$mmproj_path" ] && cmd+=" --mmproj $mmproj_path"
[ "$threads" -ne 4 ] && cmd+=" -t $threads"
[ "$ctx_size" -ne 512 ] && cmd+=" -c $ctx_size"
[ "$batch_size" -ne 512 ] && cmd+=" -b $batch_size"
[ "$n_gpu_layers" -ne 0 ] && cmd+=" -ngl $n_gpu_layers"
[ "$cont_batching" = "on" ] && cmd+=" -cb"
[ "$mlock" = "on" ] && cmd+=" --mlock"
[ "$no_mmap" = "off" ] && cmd+=" --no-mmap"
[ -n "$host" ] && cmd+=" --host $host"
[ -n "$port" ] && cmd+=" --port $port"
[ -n "$advanced_options" ] && cmd+=" $advanced_options"
eval "$cmd"
read -p 'Do not forget to quit the server later with Ctrl+C as soon as you are finished. Press Enter to continue...'
}
# Function to save the current configuration
save_config() {
exec 3>&1
config_file=$(dialog --backtitle "Save Configuration" \
--title "Save Configuration File" \
--fselect "$HOME/" 14 60 \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# Saving the configuration to the file
cat > "$config_file" << EOF
model_path=$model_path
mmproj_path=$mmproj_path
threads=$threads
ctx_size=$ctx_size
batch_size=$batch_size
n_gpu_layers=$n_gpu_layers
cont_batching=$cont_batching
mlock=$mlock
no_mmap=$no_mmap
host=$host
port=$port
advanced_options=$advanced_options
EOF
dialog --backtitle "Save Configuration" \
--title "Configuration Saved" \
--msgbox "Configuration has been saved to $config_file" 7 50
}
# Function for loading the configuration from a file
load_config() {
exec 3>&1
config_file=$(dialog --backtitle "Load Configuration" \
--title "Load Configuration File" \
--fselect "$HOME/" 14 60 \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Cancel'
if [ $exit_status = 1 ]; then
return
fi
# Check whether the configuration file exists
if [ ! -f "$config_file" ]; then
dialog --backtitle "Load Configuration" \
--title "File Not Found" \
--msgbox "The file $config_file was not found." 7 50
return
fi
# Load configuration from the file
source "$config_file"
dialog --backtitle "Load Configuration" \
--title "Configuration Loaded" \
--msgbox "Configuration has been loaded from $config_file" 7 50
}
# Function to show the main menu
show_main_menu() {
while true; do
exec 3>&1
selection=$(dialog \
--backtitle "Server Configuration" \
--title "Main Menu" \
--clear \
--cancel-label "Exit" \
--menu "Please select:" 15 50 6 \
"1" "Model Selection" \
"2" "Multimodal Model Selection" \
"3" "Options" \
"4" "Further Options" \
"5" "Advanced Options" \
"6" "Save Config" \
"7" "Load Config" \
"8" "Start Server" \
2>&1 1>&3)
exit_status=$?
exec 3>&-
# Check whether user has selected 'Exit'
if [ $exit_status = 1 ]; then
clear
exit
fi
# Call up the corresponding function based on the selection
case $selection in
1) model_selection ;;
2) multimodal_model_selection ;;
3) options ;;
4) further_options ;;
5) advanced_options ;;
6) save_config ;;
7) load_config ;;
8) start_server ;;
*) clear ;;
esac
done
}
# Show main menu
show_main_menu