style: format with nixfmt/rfc101-style

This commit is contained in:
ditsuke 2024-02-22 23:55:55 +05:30
parent c6d4cb4655
commit 0126788271
No known key found for this signature in database
GPG key ID: 71B6C31C8A5A9D21
5 changed files with 213 additions and 224 deletions

View file

@ -2,12 +2,9 @@
perSystem =
{ config, lib, ... }:
{
devShells =
lib.concatMapAttrs
(name: package: {
devShells = lib.concatMapAttrs (name: package: {
${name} = package.passthru.shell;
${name + "-extra"} = package.passthru.shell-extra;
})
config.packages;
}) config.packages;
};
}

View file

@ -26,16 +26,14 @@
config.cudaSupport = true;
config.allowUnfreePredicate =
p:
builtins.all
(
builtins.all (
license:
license.free
|| builtins.elem license.shortName [
"CUDA EULA"
"cuDNN EULA"
]
)
(p.meta.licenses or [ p.meta.license ]);
) (p.meta.licenses or [ p.meta.license ]);
};
# Ensure dependencies use ROCm consistently
pkgsRocm = import inputs.nixpkgs {

View file

@ -20,12 +20,14 @@
vulkan-loader,
curl,
shaderc,
useBlas ? builtins.all (x: !x) [
useBlas ?
builtins.all (x: !x) [
useCuda
useMetalKit
useRocm
useVulkan
] && blas.meta.available,
]
&& blas.meta.available,
useCuda ? config.cudaSupport,
useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin,
useMpi ? false, # Increases the runtime closure size by ~700M
@ -38,7 +40,7 @@
# otherwise we get libstdc++ errors downstream.
effectiveStdenv ? if useCuda then cudaPackages.backendStdenv else stdenv,
enableStatic ? effectiveStdenv.hostPlatform.isStatic,
precompileMetalShaders ? false
precompileMetalShaders ? false,
}@inputs:
let
@ -63,9 +65,9 @@ let
pnameSuffix =
strings.optionalString (suffices != [ ])
"-${strings.concatMapStringsSep "-" strings.toLower suffices}";
descriptionSuffix =
strings.optionalString (suffices != [ ])
", accelerated with ${strings.concatStringsSep ", " suffices}";
descriptionSuffix = strings.optionalString (
suffices != [ ]
) ", accelerated with ${strings.concatStringsSep ", " suffices}";
executableSuffix = effectiveStdenv.hostPlatform.extensions.executable;
@ -76,16 +78,13 @@ let
#
# TODO: Package up each Python script or service appropriately, by making
# them into "entrypoints"
llama-python = python3.withPackages (
ps: [
llama-python = python3.withPackages (ps: [
ps.numpy
ps.sentencepiece
]
);
]);
# TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime
llama-python-extra = python3.withPackages (
ps: [
llama-python-extra = python3.withPackages (ps: [
ps.numpy
ps.sentencepiece
ps.tiktoken
@ -107,8 +106,7 @@ let
# for scripts/compare-llama-bench.py
ps.gitpython
ps.tabulate
]
);
]);
xcrunHost = runCommand "xcrunHost" { } ''
mkdir -p $out/bin
@ -145,8 +143,7 @@ let
];
in
effectiveStdenv.mkDerivation (
finalAttrs: {
effectiveStdenv.mkDerivation (finalAttrs: {
pname = "llama-cpp${pnameSuffix}";
version = llamaVersion;
@ -193,13 +190,11 @@ effectiveStdenv.mkDerivation (
]
++ optionals useCuda [
cudaPackages.cuda_nvcc
autoAddDriverRunpath
]
++ optionals (effectiveStdenv.hostPlatform.isGnu && enableStatic) [
glibc.static
] ++ optionals (effectiveStdenv.isDarwin && useMetalKit && precompileMetalShaders) [
xcrunHost
];
++ optionals (effectiveStdenv.hostPlatform.isGnu && enableStatic) [ glibc.static ]
++ optionals (effectiveStdenv.isDarwin && useMetalKit && precompileMetalShaders) [ xcrunHost ];
buildInputs =
optionals effectiveStdenv.isDarwin darwinBuildInputs
@ -318,5 +313,4 @@ effectiveStdenv.mkDerivation (
# Extend `badPlatforms` instead
platforms = lib.platforms.all;
};
}
)
})

View file

@ -8,12 +8,10 @@
# because it allows users to apply overlays later using `overrideScope'`.
# Cf. https://noogle.dev/f/lib/makeScope
lib.makeScope newScope (
self: {
lib.makeScope newScope (self: {
inherit llamaVersion;
llama-cpp = self.callPackage ./package.nix { };
docker = self.callPackage ./docker.nix { };
docker-min = self.callPackage ./docker.nix { interactive = false; };
sif = self.callPackage ./sif.nix { };
}
)
})

View file

@ -145,7 +145,9 @@
# the same path you would with an overlay.
legacyPackages = {
llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackagesWindows = pkgs.pkgsCross.mingwW64.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackagesWindows = pkgs.pkgsCross.mingwW64.callPackage .devops/nix/scope.nix {
inherit llamaVersion;
};
llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
};