From b13a768813d6fcdabce99c811c834e1e3d4bd9d7 Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sat, 25 Mar 2023 10:12:47 +0800 Subject: [PATCH] added softprompt endpoint --- llama_for_kobold.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/llama_for_kobold.py b/llama_for_kobold.py index bea9bfd55..1ef26e546 100644 --- a/llama_for_kobold.py +++ b/llama_for_kobold.py @@ -118,6 +118,12 @@ class ServerRequestHandler(http.server.SimpleHTTPRequestHandler): global maxctx self.wfile.write(json.dumps({"value":maxctx}).encode()) return + + if self.path.endswith('/api/v1/config/soft_prompt') or self.path.endswith('/api/v1/config/soft_prompt/') or self.path.endswith('/api/latest/config/soft_prompt') or self.path.endswith('/api/latest/config/soft_prompt/'): + self.send_response(200) + self.end_headers() + self.wfile.write(json.dumps({"value":""}).encode()) + return self.send_response(404) self.end_headers()