Add flake app to run openai proxy
Works with: nix run .#llama-server-openai-proxy If merged, then you can run both the server and proxy via two commands: nix run github:ggerganov/llama.cpp#llama-server nix run github:ggerganov/llama.cpp#llama-server-openai-proxy
This commit is contained in:
parent
708e179e85
commit
bc1b0d5351
1 changed files with 9 additions and 0 deletions
|
@ -110,6 +110,15 @@
|
|||
type = "app";
|
||||
program = "${self.packages.${system}.default}/bin/llama-server";
|
||||
};
|
||||
apps.llama-server-openai-proxy = {
|
||||
type = "app";
|
||||
program = "${
|
||||
(let pythonWithPkgs = pkgs.python3.withPackages (ps: with ps; [ flask requests ]);
|
||||
in pkgs.writeShellScriptBin "run-openai-proxy" ''
|
||||
${pythonWithPkgs}/bin/python3 ${self}/examples/server/api_like_OAI.py
|
||||
'')
|
||||
}/bin/run-openai-proxy";
|
||||
};
|
||||
apps.llama-embedding = {
|
||||
type = "app";
|
||||
program = "${self.packages.${system}.default}/bin/embedding";
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue