From bc1b0d5351a2a03de9c92bd118fb46ff3bfa8ce7 Mon Sep 17 00:00:00 2001 From: paretoOptimalDev Date: Sat, 23 Dec 2023 16:50:23 -0600 Subject: [PATCH] Add flake app to run openai proxy Works with: nix run .#llama-server-openai-proxy If merged, then you can run both the server and proxy via two commands: nix run github:ggerganov/llama.cpp#llama-server nix run github:ggerganov/llama.cpp#llama-server-openai-proxy --- flake.nix | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/flake.nix b/flake.nix index 4cf28d5c11c0f..a774d315337fb 100644 --- a/flake.nix +++ b/flake.nix @@ -110,6 +110,15 @@ type = "app"; program = "${self.packages.${system}.default}/bin/llama-server"; }; + apps.llama-server-openai-proxy = { + type = "app"; + program = "${ + (let pythonWithPkgs = pkgs.python3.withPackages (ps: with ps; [ flask requests ]); + in pkgs.writeShellScriptBin "run-openai-proxy" '' + ${pythonWithPkgs}/bin/python3 ${self}/examples/server/api_like_OAI.py + '') + }/bin/run-openai-proxy"; + }; apps.llama-embedding = { type = "app"; program = "${self.packages.${system}.default}/bin/embedding";