SimpleChat: Consolidate global vars into gMe, Display to user
This allows the end user to see the settings used by the logic, as well as allows users to change/update the settings if they want to by using devel-tools/console
This commit is contained in:
parent
4b29736da5
commit
f0dd91d550
2 changed files with 68 additions and 23 deletions
|
@ -93,8 +93,17 @@ Once inside
|
||||||
|
|
||||||
## Devel note
|
## Devel note
|
||||||
|
|
||||||
gChatRequestOptions maintains the list of options/fields to send along with chat request,
|
Me/gMe consolidates the settings which control the behaviour into one object.
|
||||||
irrespective of whether /chat/completions or /completions endpoint.
|
One can see the current settings, as well as change/update them using browsers devel-tool/console.
|
||||||
|
|
||||||
|
bCompletionFreshChatAlways - whether Completion mode collates completion history when communicating
|
||||||
|
with the server.
|
||||||
|
|
||||||
|
bCompletionInsertStandardRolePrefix - whether Completion mode inserts role related prefix wrt the
|
||||||
|
messages that get inserted into prompt field wrt /Completion endpoint.
|
||||||
|
|
||||||
|
chatRequestOptions - maintains the list of options/fields to send along with chat request,
|
||||||
|
irrespective of whether /chat/completions or /completions endpoint.
|
||||||
|
|
||||||
If you want to add additional options/fields to send to the server/ai-model, and or
|
If you want to add additional options/fields to send to the server/ai-model, and or
|
||||||
modify the existing options value, for now you can update this global var using
|
modify the existing options value, for now you can update this global var using
|
||||||
|
|
|
@ -14,6 +14,7 @@ class ApiEP {
|
||||||
}
|
}
|
||||||
|
|
||||||
let gUsageMsg = `
|
let gUsageMsg = `
|
||||||
|
<p class="role-system">Usage</p>
|
||||||
<ul class="ul1">
|
<ul class="ul1">
|
||||||
<li> Set system prompt above, to try control ai response charactersitic, if model supports same.</li>
|
<li> Set system prompt above, to try control ai response charactersitic, if model supports same.</li>
|
||||||
<ul class="ul2">
|
<ul class="ul2">
|
||||||
|
@ -32,11 +33,6 @@ let gUsageMsg = `
|
||||||
</ul>
|
</ul>
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// Add needed fields wrt json object to be sent wrt LLM web services completions endpoint.
|
|
||||||
let gChatRequestOptions = {
|
|
||||||
"temperature": 0.7,
|
|
||||||
"max_tokens": 512
|
|
||||||
};
|
|
||||||
|
|
||||||
class SimpleChat {
|
class SimpleChat {
|
||||||
|
|
||||||
|
@ -92,6 +88,7 @@ class SimpleChat {
|
||||||
} else {
|
} else {
|
||||||
if (bClear) {
|
if (bClear) {
|
||||||
div.innerHTML = gUsageMsg;
|
div.innerHTML = gUsageMsg;
|
||||||
|
gMe.show_info(div);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -103,8 +100,8 @@ class SimpleChat {
|
||||||
* @param {Object} obj
|
* @param {Object} obj
|
||||||
*/
|
*/
|
||||||
request_jsonstr(obj) {
|
request_jsonstr(obj) {
|
||||||
for(let k in gChatRequestOptions) {
|
for(let k in gMe.chatRequestOptions) {
|
||||||
obj[k] = gChatRequestOptions[k];
|
obj[k] = gMe.chatRequestOptions[k];
|
||||||
}
|
}
|
||||||
return JSON.stringify(obj);
|
return JSON.stringify(obj);
|
||||||
}
|
}
|
||||||
|
@ -206,8 +203,6 @@ let gChatURL = {
|
||||||
'chat': `${gBaseURL}/chat/completions`,
|
'chat': `${gBaseURL}/chat/completions`,
|
||||||
'completion': `${gBaseURL}/completions`,
|
'completion': `${gBaseURL}/completions`,
|
||||||
}
|
}
|
||||||
const gbCompletionFreshChatAlways = true;
|
|
||||||
let gbCompletionInsertStandardRolePrefix = false;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -395,7 +390,7 @@ class MultiChatUI {
|
||||||
// So if user wants to simulate a multi-chat based completion query,
|
// So if user wants to simulate a multi-chat based completion query,
|
||||||
// they will have to enter the full thing, as a suitable multiline
|
// they will have to enter the full thing, as a suitable multiline
|
||||||
// user input/query.
|
// user input/query.
|
||||||
if ((apiEP == ApiEP.Completion) && (gbCompletionFreshChatAlways)) {
|
if ((apiEP == ApiEP.Completion) && (gMe.bCompletionFreshChatAlways)) {
|
||||||
chat.clear();
|
chat.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -413,7 +408,7 @@ class MultiChatUI {
|
||||||
if (apiEP == ApiEP.Chat) {
|
if (apiEP == ApiEP.Chat) {
|
||||||
theBody = chat.request_messages_jsonstr();
|
theBody = chat.request_messages_jsonstr();
|
||||||
} else {
|
} else {
|
||||||
theBody = chat.request_prompt_jsonstr(gbCompletionInsertStandardRolePrefix);
|
theBody = chat.request_prompt_jsonstr(gMe.bCompletionInsertStandardRolePrefix);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.elInUser.value = "working...";
|
this.elInUser.value = "working...";
|
||||||
|
@ -525,17 +520,58 @@ class MultiChatUI {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
let gMultiChat;
|
class Me {
|
||||||
const gChatIds = [ "Default", "Other" ];
|
|
||||||
|
constructor() {
|
||||||
|
this.defaultChatIds = [ "Default", "Other" ];
|
||||||
|
this.multiChat = new MultiChatUI();
|
||||||
|
this.bCompletionFreshChatAlways = true;
|
||||||
|
this.bCompletionInsertStandardRolePrefix = false;
|
||||||
|
// Add needed fields wrt json object to be sent wrt LLM web services completions endpoint.
|
||||||
|
this.chatRequestOptions = {
|
||||||
|
"temperature": 0.7,
|
||||||
|
"max_tokens": 512
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {HTMLDivElement} elDiv
|
||||||
|
*/
|
||||||
|
show_info(elDiv) {
|
||||||
|
|
||||||
|
var p = document.createElement("p");
|
||||||
|
p.innerText = "Settings (gMe)";
|
||||||
|
p.className = "role-system";
|
||||||
|
elDiv.appendChild(p);
|
||||||
|
|
||||||
|
var p = document.createElement("p");
|
||||||
|
p.innerText = `bCompletionFreshChatAlways:${this.bCompletionFreshChatAlways}`;
|
||||||
|
elDiv.appendChild(p);
|
||||||
|
|
||||||
|
p = document.createElement("p");
|
||||||
|
p.innerText = `bCompletionInsertStandardRolePrefix:${this.bCompletionInsertStandardRolePrefix}`;
|
||||||
|
elDiv.appendChild(p);
|
||||||
|
|
||||||
|
p = document.createElement("p");
|
||||||
|
p.innerText = `chatRequestOptions:${JSON.stringify(this.chatRequestOptions)}`;
|
||||||
|
elDiv.appendChild(p);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** @type {Me} */
|
||||||
|
let gMe;
|
||||||
|
|
||||||
function startme() {
|
function startme() {
|
||||||
console.log("INFO:SimpleChat:StartMe:Starting...");
|
console.log("INFO:SimpleChat:StartMe:Starting...");
|
||||||
gMultiChat = new MultiChatUI();
|
gMe = new Me();
|
||||||
for (let cid of gChatIds) {
|
for (let cid of gMe.defaultChatIds) {
|
||||||
gMultiChat.new_chat_session(cid);
|
gMe.multiChat.new_chat_session(cid);
|
||||||
}
|
}
|
||||||
gMultiChat.setup_ui(gChatIds[0], true);
|
gMe.multiChat.setup_ui(gMe.defaultChatIds[0], true);
|
||||||
gMultiChat.show_sessions();
|
gMe.multiChat.show_sessions();
|
||||||
}
|
}
|
||||||
|
|
||||||
document.addEventListener("DOMContentLoaded", startme);
|
document.addEventListener("DOMContentLoaded", startme);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue