Add flake app to run openai proxy

Works with:

nix run .#llama-server-openai-proxy

If merged, then you can run both the server and proxy via two commands:

nix run github:ggerganov/llama.cpp#llama-server
nix run github:ggerganov/llama.cpp#llama-server-openai-proxy
This commit is contained in:
paretoOptimalDev 2023-12-23 16:50:23 -06:00
parent 708e179e85
commit bc1b0d5351

View file

@ -110,6 +110,15 @@
type = "app";
program = "${self.packages.${system}.default}/bin/llama-server";
};
apps.llama-server-openai-proxy = {
type = "app";
program = "${
(let pythonWithPkgs = pkgs.python3.withPackages (ps: with ps; [ flask requests ]);
in pkgs.writeShellScriptBin "run-openai-proxy" ''
${pythonWithPkgs}/bin/python3 ${self}/examples/server/api_like_OAI.py
'')
}/bin/run-openai-proxy";
};
apps.llama-embedding = {
type = "app";
program = "${self.packages.${system}.default}/bin/embedding";