nix: migrate to pname+version

This commit is contained in:
Someone Serge 2023-12-25 17:02:36 +00:00
parent a28c9acca3
commit a629371245
No known key found for this signature in database
GPG key ID: 7B0E3B1390D61DA4
3 changed files with 40 additions and 21 deletions

View file

@ -24,6 +24,7 @@
useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL,
useOpenCL ? false,
useRocm ? config.rocmSupport,
llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake
}@inputs:
let
@ -31,6 +32,7 @@ let
cmakeBool
cmakeFeature
optionals
strings
versionOlder
;
@ -39,18 +41,19 @@ let
stdenv = throw "Use effectiveStdenv instead";
effectiveStdenv = if useCuda then cudaPackages.backendStdenv else inputs.stdenv;
# Give a little description difference between the flavors.
suffices =
lib.optionals useOpenCL [ "OpenCL" ]
++ lib.optionals useCuda [ "CUDA" ]
++ lib.optionals useRocm [ "ROCm" ]
++ lib.optionals useMetalKit [ "MetalKit" ]
++ lib.optionals useBlas [ "BLAS" ];
pnameSuffix =
strings.optionalString (suffices != [ ])
"-${strings.concatMapStringsSep "-" strings.toLower suffices}";
descriptionSuffix =
if useOpenCL then
" (OpenCL accelerated)"
else if useCuda then
" (CUDA accelerated)"
else if useRocm then
" (ROCm accelerated)"
else if useMetalKit then
" (MetalKit accelerated)"
else
"";
strings.optionalString (suffices != [ ])
", accelerated with ${strings.concatStringsSep ", " suffices}";
# TODO: package the Python in this repository in a Nix-like way.
# It'd be nice to migrate to buildPythonPackage, as well as ensure this repo
@ -99,7 +102,9 @@ in
effectiveStdenv.mkDerivation (
finalAttrs: {
name = "llama.cpp";
pname = "llama-cpp${pnameSuffix}";
version = llamaVersion;
src = ../../.;
postPatch = ''
@ -171,14 +176,14 @@ effectiveStdenv.mkDerivation (
;
shell = mkShell {
name = "default${descriptionSuffix}";
name = "shell-${finalAttrs.finalPackage.name}";
description = "contains numpy and sentencepiece";
buildInputs = [ llama-python ];
inputsFrom = [ finalAttrs.finalPackage ];
};
shell-extra = mkShell {
name = "extra${descriptionSuffix}";
name = "shell-extra-${finalAttrs.finalPackage.name}";
description = "contains numpy, sentencepiece, torchWithoutCuda, and transformers";
buildInputs = [ llama-python-extra ];
inputsFrom = [ finalAttrs.finalPackage ];

View file

@ -1,3 +1,12 @@
{ lib, newScope }:
{
lib,
newScope,
llamaVersion ? "0.0.0",
}:
lib.makeScope newScope (self: { llama-cpp = self.callPackage ./package.nix { }; })
lib.makeScope newScope (
self: {
inherit llamaVersion;
llama-cpp = self.callPackage ./package.nix { };
}
)

View file

@ -16,7 +16,10 @@
# { program = "/nix/store/00000000000000000000000000000000-llama.cpp/bin/quantize"; type = "app"; }
# ```
outputs =
{ flake-parts, ... }@inputs:
{ self, flake-parts, ... }@inputs:
let
llamaVersion = self.dirtyShortRev or self.shortRev;
in
flake-parts.lib.mkFlake { inherit inputs; }
{
@ -48,7 +51,9 @@
#
# Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format
flake.overlays.default =
(final: prev: { llamaPackages = final.callPackage .devops/nix/scope.nix { }; });
(final: prev: {
llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
});
systems = [
"aarch64-darwin"
@ -69,10 +74,10 @@
# We don't use the overlay here so as to avoid making too many instances of nixpkgs,
# cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs
packages = {
default = (pkgs.callPackage .devops/nix/scope.nix { }).llama-cpp;
default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
opencl = config.packages.default.override { useOpenCL = true; };
cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { }).llama-cpp;
rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { }).llama-cpp;
cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp;
};
};
};