SimpleChat: GarbageTrim enable/disable, show trimmed part ifany

This commit is contained in:
HanishKVC 2024-05-27 04:18:51 +05:30
parent 1db965d00d
commit 42b4fe555e
2 changed files with 28 additions and 2 deletions

View file

@ -116,7 +116,7 @@ Skeletal logic has been implemented to explore some of the end points and ideas/
Me/gMe consolidates the settings which control the behaviour into one object.
One can see the current settings, as well as change/update them using browsers devel-tool/console.
It is attached to the document object.
It is attached to the document object. Some of these can also be updated using the Settings UI.
bCompletionFreshChatAlways - whether Completion mode collates complete/sliding-window history when
communicating with the server or only sends the latest user query/message.
@ -124,6 +124,16 @@ It is attached to the document object.
bCompletionInsertStandardRolePrefix - whether Completion mode inserts role related prefix wrt the
messages that get inserted into prompt field wrt /Completion endpoint.
bTrimGarbage - whether garbage repeatation at the end of the generated ai response, should be
trimmed or left as is. If enabled, it will be trimmed so that it wont be sent back as part of
subsequent chat history. At the same time the actual trimmed text is shown to the user, once
when it was generated, so user can check if any useful info/data was there in the response.
One may be able to request the ai-model to continue (wrt the last response) (if chat-history is
enabled as part of the chat-history-in-context setting), and chances are the ai-model will
continue starting from the trimmed part, thus allows long response to be recovered/continued
indirectly, in many cases.
chatRequestOptions - maintains the list of options/fields to send along with chat request,
irrespective of whether /chat/completions or /completions endpoint.

View file

@ -438,6 +438,7 @@ class MultiChatUI {
//let respBody = await this.read_json_early(resp);
console.debug(`DBUG:SimpleChat:MCUI:${chatId}:HandleUserSubmit:RespBody:${JSON.stringify(respBody)}`);
let assistantMsg;
let trimmedMsg = "";
if (apiEP == ApiEP.Chat) {
assistantMsg = respBody["choices"][0]["message"]["content"];
} else {
@ -447,10 +448,17 @@ class MultiChatUI {
assistantMsg = respBody["content"];
}
}
if (gMe.bTrimGarbage) {
let origMsg = assistantMsg;
assistantMsg = du.trim_hist_garbage_at_end_loop(assistantMsg, 8, 16, 72);
trimmedMsg = origMsg.substring(assistantMsg.length);
}
chat.add(Roles.Assistant, assistantMsg);
if (chatId == this.curChatId) {
chat.show(this.elDivChat);
if (trimmedMsg.length > 0) {
ui.el_create_append_p(`TRIMMED:${trimmedMsg}`, this.elDivChat);
}
} else {
console.debug(`DBUG:SimpleChat:MCUI:HandleUserSubmit:ChatId has changed:[${chatId}] [${this.curChatId}]`);
}
@ -540,6 +548,7 @@ class Me {
this.multiChat = new MultiChatUI();
this.bCompletionFreshChatAlways = true;
this.bCompletionInsertStandardRolePrefix = false;
this.bTrimGarbage = true;
this.iRecentUserMsgCnt = 2;
this.sRecentUserMsgCnt = {
"Full": -1,
@ -570,6 +579,8 @@ class Me {
ui.el_create_append_p(`bCompletionInsertStandardRolePrefix:${this.bCompletionInsertStandardRolePrefix}`, elDiv);
ui.el_create_append_p(`bTrimGarbage:${this.bTrimGarbage}`, elDiv);
ui.el_create_append_p(`iRecentUserMsgCnt:${this.iRecentUserMsgCnt}`, elDiv);
ui.el_create_append_p(`chatRequestOptions:${JSON.stringify(this.chatRequestOptions)}`, elDiv);
@ -592,6 +603,11 @@ class Me {
});
elDiv.appendChild(bb);
bb = ui.el_creatediv_boolbutton("SetTrimGarbage", "TrimGarbage", {true: "[+] yes trim", false: "[-] dont trim"}, this.bTrimGarbage, (val)=>{
this.bTrimGarbage = val;
});
elDiv.appendChild(bb);
let sel = ui.el_creatediv_select("SetChatHistoryInCtxt", "ChatHistoryInCtxt", this.sRecentUserMsgCnt, this.iRecentUserMsgCnt, (val)=>{
this.iRecentUserMsgCnt = this.sRecentUserMsgCnt[val];
});