SimpleChat: model request field for openai/equivalent compat
May help testing with openai/equivalent web services, if they require this field.
This commit is contained in:
parent
85fd2d0d84
commit
0e7880a694
2 changed files with 8 additions and 2 deletions
|
@ -30,7 +30,7 @@ NOTE: Wrt options sent with the request, it mainly sets temperature, max_tokens
|
|||
However if someone wants they can update the js file or equivalent member in gMe as needed.
|
||||
|
||||
NOTE: One may be able to use this to chat with openai api web-service /chat/completions endpoint, in a very
|
||||
limited / minimal way. One will need to set openai url and authorization bearer key in settings ui.
|
||||
limited / minimal way. One will need to set model, openai url and authorization bearer key in settings ui.
|
||||
|
||||
|
||||
## usage
|
||||
|
@ -232,7 +232,7 @@ for a minimal chatting experimentation by setting the below.
|
|||
* https://api.openai.com/v1 or similar
|
||||
|
||||
* Wrt request body - gMe.chatRequestOptions
|
||||
* model
|
||||
* model (settings ui)
|
||||
* any additional fields if required in future
|
||||
|
||||
* Wrt request headers - gMe.headers
|
||||
|
|
|
@ -699,6 +699,7 @@ class Me {
|
|||
}
|
||||
// Add needed fields wrt json object to be sent wrt LLM web services completions endpoint.
|
||||
this.chatRequestOptions = {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"temperature": 0.7,
|
||||
"max_tokens": 1024,
|
||||
"n_predict": 1024,
|
||||
|
@ -763,6 +764,11 @@ class Me {
|
|||
inp.el.placeholder = "Bearer OPENAI_API_KEY";
|
||||
elDiv.appendChild(inp.div);
|
||||
|
||||
inp = ui.el_creatediv_input("SetModel", "Model", "text", this.chatRequestOptions["model"], (val)=>{
|
||||
this.chatRequestOptions["model"] = val;
|
||||
});
|
||||
elDiv.appendChild(inp.div);
|
||||
|
||||
let bb = ui.el_creatediv_boolbutton("SetStream", "Stream", {true: "[+] yes stream", false: "[-] do oneshot"}, this.bStream, (val)=>{
|
||||
this.bStream = val;
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue