diff --git a/.devops/nix/devshells.nix b/.devops/nix/devshells.nix index 9895d803f..858c5bea9 100644 --- a/.devops/nix/devshells.nix +++ b/.devops/nix/devshells.nix @@ -4,7 +4,6 @@ { devShells = lib.concatMapAttrs (name: package: { ${name} = package.passthru.shell; - ${name + "-extra"} = package.passthru.shell-extra; }) config.packages; }; } diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index ff0f2188b..824908fbb 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -6,6 +6,7 @@ mkShell, runCommand, cmake, + gcc, ninja, pkg-config, git, @@ -74,44 +75,6 @@ let executableSuffix = effectiveStdenv.hostPlatform.extensions.executable; mapToPythonPackages = ps: packages: map (package: ps.${package}) packages; - # TODO: package the Python in this repository in a Nix-like way. - # It'd be nice to migrate to buildPythonPackage, as well as ensure this repo - # is PEP 517-compatible, and ensure the correct .dist-info is generated. - # https://peps.python.org/pep-0517/ - # - # TODO: Package up each Python script or service appropriately, by making - # them into "entrypoints" - llama-python = python3.withPackages (ps: [ - ps.numpy - ps.sentencepiece - gguf-py - ]); - - # TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime - llama-python-extra = python3.withPackages (ps: [ - ps.numpy - ps.sentencepiece - ps.tiktoken - ps.torchWithoutCuda - ps.transformers - - # server bench - ps.matplotlib - - # server tests - ps.openai - ps.behave - ps.prometheus-client - - # for examples/pydantic-models-to-grammar-examples.py - ps.docstring-parser - ps.pydantic - - # for scripts/compare-llama-bench.py - ps.gitpython - ps.tabulate - ]); - xcrunHost = runCommand "xcrunHost" { } '' mkdir -p $out/bin ln -s /usr/bin/xcrun $out/bin @@ -268,21 +231,8 @@ effectiveStdenv.mkDerivation (finalAttrs: { name = "shell-${finalAttrs.finalPackage.name}"; description = "contains numpy and sentencepiece"; buildInputs = [ - python3.withPackages - (ps: mapToPythonPackages ps llama-python-base-deps) - ]; - inputsFrom = [ finalAttrs.finalPackage ]; - shellHook = '' - addToSearchPath "LD_LIBRARY_PATH" "${lib.getLib effectiveStdenv.cc.cc}/lib" - ''; - }; - - shell-extra = mkShell { - name = "shell-extra-${finalAttrs.finalPackage.name}"; - description = "contains numpy, sentencepiece, torchWithoutCuda, and transformers"; - buildInputs = [ - python3.withPackages - (ps: mapToPythonPackages ps llama-python-full-deps) + cmake + gcc ]; inputsFrom = [ finalAttrs.finalPackage ]; }; diff --git a/.devops/nix/python-scripts.nix b/.devops/nix/python-scripts.nix new file mode 100644 index 000000000..fa0d60654 --- /dev/null +++ b/.devops/nix/python-scripts.nix @@ -0,0 +1,42 @@ +{ + lib, + stdenv, + buildPythonPackage, + poetry-core, + breakpointHook, + mkShell, + python3Packages, + gguf-py, +}@inputs: + +let + llama-python-deps = with python3Packages; [ + numpy + sentencepiece + transformers + protobuf + torchWithoutCuda + gguf-py + ]; +in + +buildPythonPackage ({ + pname = "llama-scripts"; + src = ../../.; + version = "0.0.0"; + pyproject = true; + nativeBuildInputs = [ poetry-core ]; + projectDir = ../../.; + propagatedBuildInputs = llama-python-deps; + + passthru = { + shell = mkShell { + name = "shell-python-scripts"; + description = "contains numpy and sentencepiece"; + buildInputs = llama-python-deps; + shellHook = '' + addToSearchPath "LD_LIBRARY_PATH" "${lib.getLib stdenv.cc.cc}/lib" + ''; + }; + }; +}) diff --git a/flake.nix b/flake.nix index 4c75cbbcc..26a258816 100644 --- a/flake.nix +++ b/flake.nix @@ -159,6 +159,7 @@ default = config.legacyPackages.llamaPackages.llama-cpp; vulkan = config.packages.default.override { useVulkan = true; }; windows = config.legacyPackages.llamaPackagesWindows.llama-cpp; + python-scripts = config.legacyPackages.llamaPackages.python-scripts; } // lib.optionalAttrs pkgs.stdenv.isLinux { cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp;