dev(nix): Break up python/C devShells

This commit is contained in:
ditsuke 2024-02-27 00:51:12 +05:30
parent 51056d932c
commit ef2dae9249
No known key found for this signature in database
GPG key ID: 71B6C31C8A5A9D21
4 changed files with 46 additions and 54 deletions

View file

@ -4,7 +4,6 @@
{
devShells = lib.concatMapAttrs (name: package: {
${name} = package.passthru.shell;
${name + "-extra"} = package.passthru.shell-extra;
}) config.packages;
};
}

View file

@ -6,6 +6,7 @@
mkShell,
runCommand,
cmake,
gcc,
ninja,
pkg-config,
git,
@ -74,44 +75,6 @@ let
executableSuffix = effectiveStdenv.hostPlatform.extensions.executable;
mapToPythonPackages = ps: packages: map (package: ps.${package}) packages;
# TODO: package the Python in this repository in a Nix-like way.
# It'd be nice to migrate to buildPythonPackage, as well as ensure this repo
# is PEP 517-compatible, and ensure the correct .dist-info is generated.
# https://peps.python.org/pep-0517/
#
# TODO: Package up each Python script or service appropriately, by making
# them into "entrypoints"
llama-python = python3.withPackages (ps: [
ps.numpy
ps.sentencepiece
gguf-py
]);
# TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime
llama-python-extra = python3.withPackages (ps: [
ps.numpy
ps.sentencepiece
ps.tiktoken
ps.torchWithoutCuda
ps.transformers
# server bench
ps.matplotlib
# server tests
ps.openai
ps.behave
ps.prometheus-client
# for examples/pydantic-models-to-grammar-examples.py
ps.docstring-parser
ps.pydantic
# for scripts/compare-llama-bench.py
ps.gitpython
ps.tabulate
]);
xcrunHost = runCommand "xcrunHost" { } ''
mkdir -p $out/bin
ln -s /usr/bin/xcrun $out/bin
@ -268,21 +231,8 @@ effectiveStdenv.mkDerivation (finalAttrs: {
name = "shell-${finalAttrs.finalPackage.name}";
description = "contains numpy and sentencepiece";
buildInputs = [
python3.withPackages
(ps: mapToPythonPackages ps llama-python-base-deps)
];
inputsFrom = [ finalAttrs.finalPackage ];
shellHook = ''
addToSearchPath "LD_LIBRARY_PATH" "${lib.getLib effectiveStdenv.cc.cc}/lib"
'';
};
shell-extra = mkShell {
name = "shell-extra-${finalAttrs.finalPackage.name}";
description = "contains numpy, sentencepiece, torchWithoutCuda, and transformers";
buildInputs = [
python3.withPackages
(ps: mapToPythonPackages ps llama-python-full-deps)
cmake
gcc
];
inputsFrom = [ finalAttrs.finalPackage ];
};

View file

@ -0,0 +1,42 @@
{
lib,
stdenv,
buildPythonPackage,
poetry-core,
breakpointHook,
mkShell,
python3Packages,
gguf-py,
}@inputs:
let
llama-python-deps = with python3Packages; [
numpy
sentencepiece
transformers
protobuf
torchWithoutCuda
gguf-py
];
in
buildPythonPackage ({
pname = "llama-scripts";
src = ../../.;
version = "0.0.0";
pyproject = true;
nativeBuildInputs = [ poetry-core ];
projectDir = ../../.;
propagatedBuildInputs = llama-python-deps;
passthru = {
shell = mkShell {
name = "shell-python-scripts";
description = "contains numpy and sentencepiece";
buildInputs = llama-python-deps;
shellHook = ''
addToSearchPath "LD_LIBRARY_PATH" "${lib.getLib stdenv.cc.cc}/lib"
'';
};
};
})

View file

@ -159,6 +159,7 @@
default = config.legacyPackages.llamaPackages.llama-cpp;
vulkan = config.packages.default.override { useVulkan = true; };
windows = config.legacyPackages.llamaPackagesWindows.llama-cpp;
python-scripts = config.legacyPackages.llamaPackages.python-scripts;
}
// lib.optionalAttrs pkgs.stdenv.isLinux {
cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp;