From a41c736c2fe03058c70cc59c7d91f7d234d38afd Mon Sep 17 00:00:00 2001 From: Philip Taron Date: Tue, 16 Jan 2024 09:23:53 -0800 Subject: [PATCH] nix: remove nixConfig from flake.nix Why? I've demoed `nix run ggerganov/llama.cpp` to three people now, and the prompts caused by this setting have been a fly in the ointment every time, where I have to hand-wave away what's being asked... and because it happens on every single command run, it's a major pain in the ass. It's fine to leave it in the flake for those who read it, but the Nix ecosystem isn't yet ready to have flakes with nixConfig, in my judgement. --- flake.nix | 57 ++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/flake.nix b/flake.nix index 488ed6c59..ec62c773a 100644 --- a/flake.nix +++ b/flake.nix @@ -6,28 +6,41 @@ flake-parts.url = "github:hercules-ci/flake-parts"; }; - # Optional binary cache - nixConfig = { - extra-substituters = [ - # Populated by the CI in ggerganov/llama.cpp - "https://llama-cpp.cachix.org" - - # A development cache for nixpkgs imported with `config.cudaSupport = true`. - # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. - # This lets one skip building e.g. the CUDA-enabled openmpi. - # TODO: Replace once nix-community obtains an official one. - "https://cuda-maintainers.cachix.org" - ]; - - # Verify these are the same keys as published on - # - https://app.cachix.org/cache/llama-cpp - # - https://app.cachix.org/cache/cuda-maintainers - extra-trusted-public-keys = [ - "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" - "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" - ]; - }; - + # There's an optional binary cache available. The details are below, but they're commented out. + # + # Why? The terrible experience of being prompted to accept them on every single Nix command run. + # Plus, there are warnings shown about not being a trusted user on a default Nix install + # if you *do* say yes to the prompts. + # + # This experience makes having `nixConfig` in a flake a persistent UX problem. + # + # To make use of the binary cache, please add the relevant settings to your `nix.conf`. + # It's located at `/etc/nix/nix.conf` on non-NixOS systems. On NixOS, adjust the `nix.settings` + # option in your NixOS configuration to add `extra-substituters` and `extra-trusted-public-keys`, + # as shown below. + # + # ``` + # nixConfig = { + # extra-substituters = [ + # # Populated by the CI in ggerganov/llama.cpp + # "https://llama-cpp.cachix.org" + # + # # A development cache for nixpkgs imported with `config.cudaSupport = true`. + # # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. + # # This lets one skip building e.g. the CUDA-enabled openmpi. + # # TODO: Replace once nix-community obtains an official one. + # "https://cuda-maintainers.cachix.org" + # ]; + # + # # Verify these are the same keys as published on + # # - https://app.cachix.org/cache/llama-cpp + # # - https://app.cachix.org/cache/cuda-maintainers + # extra-trusted-public-keys = [ + # "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" + # "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" + # ]; + # }; + # ``` # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl: #