{ description = "A Nix-flake-based Rust development environment"; nixConfig = { extra-substituters = [ "https://nixcache.vlt81.de" "https://llama-cpp.cachix.org" "https://cuda-maintainers.cachix.org" ]; extra-trusted-public-keys = [ "nixcache.vlt81.de:nw0FfUpePtL6P3IMNT9X6oln0Wg9REZINtkkI9SisqQ=" "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" ]; }; inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; rust-overlay.url = "github:oxalica/rust-overlay"; flake-utils.url = "github:numtide/flake-utils"; flake-parts.url = "github:hercules-ci/flake-parts"; devshell.url = "github:numtide/devshell"; llama-cpp = { url = "github:ggerganov/llama.cpp/b5216"; inputs.nixpkgs.follows = "nixpkgs"; inputs.flake-parts.follows = "flake-parts"; }; ik_llama-cpp = { url = "github:ikawrakow/ik_llama.cpp/main"; inputs.nixpkgs.follows = "nixpkgs"; inputs.flake-parts.follows = "flake-parts"; }; }; outputs = { self , nixpkgs , rust-overlay , flake-utils , devshell , llama-cpp , ik_llama-cpp , ... }: flake-utils.lib.eachDefaultSystem (system: let overlays = [ rust-overlay.overlays.default devshell.overlays.default (final: prev: { customRustToolchain = prev.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml; }) (final: prev: { prev.rocmPackages.clr = prev.rocmPackages.clr.overrideDerivation (oldAttrs: { passthru = { gpuTargets = rocmTargets; updateScript = oldAttrs.passthru.updateScript; impureTests = oldAttrs.passthru.impureTests; }; }); }) ]; config = { allowUnfree = true; rocmSupport = true; }; pkgs = import nixpkgs { inherit system overlays config; }; buildInputs = with pkgs; [ aider-chat harfbuzz openssl pango sqlite mariadb zlib clang libclang gzip coreutils gdb glib glibc wayland-utils waylandpp kdePackages.wayland libxkbcommon webkitgtk_4_1 libsoup_3 gtk3 libGL wayland ]; rocmTargets = [ "gfx1030" "gfx1100" "gfx1102" "gfx1103" ]; lib = pkgs.lib; myOverride = oldAttrs: { # speeds up builts by only building for a needed rocmTargets... cmakeFlags = [ (lib.cmakeBool "LLAMA_BUILD_SERVER" true) (lib.cmakeBool "BUILD_SHARED_LIBS" true) (lib.cmakeBool "CMAKE_SKIP_BUILD_RPATH" true) (lib.cmakeBool "LLAMA_CURL" true) (lib.cmakeBool "GGML_NATIVE" true) (lib.cmakeBool "GGML_BLAS" false) (lib.cmakeBool "GGML_CUDA" false) (lib.cmakeBool "GGML_HIP" true) # new one ? kinda undocumented ? (lib.cmakeBool "GGML_HIPBLAS" true) # seems to be depr (lib.cmakeBool "GGML_METAL" false) (lib.cmakeBool "GGML_VULKAN" false) (lib.cmakeBool "GGML_STATIC" false) (lib.cmakeBool "GGML_FMA" true) (lib.cmakeBool "GGML_F16C" true) (lib.cmakeBool "GGML_AVX2" true) (lib.cmakeBool "GGML_AVX512" false) (lib.cmakeFeature "CMAKE_HIP_COMPILER" "${pkgs.rocmPackages.llvm.clang}/bin/clang") (lib.cmakeFeature "CMAKE_HIP_ARCHITECTURES" (builtins.concatStringsSep ";" rocmTargets)) (lib.cmakeFeature "AMDGPU_TARGETS" (builtins.concatStringsSep ";" rocmTargets)) ]; }; in { packages = (import ./.nix { inherit pkgs lib config; }) // { myikllamacpp = ik_llama-cpp.legacyPackages.${system}.llamaPackages.llama-cpp; myikllamacpp-rocm = ik_llama-cpp.legacyPackages.${system}.llamaPackagesRocm.llama-cpp.overrideDerivation myOverride; myllamacpp = llama-cpp.legacyPackages.${system}.llamaPackages.llama-cpp; myllamacpp-rocm = llama-cpp.legacyPackages.${system}.llamaPackagesRocm.llama-cpp.overrideDerivation myOverride; }; devShells.default = pkgs.mkShell { packages = with pkgs; [ customRustToolchain # self.packages.${system}.myllamacpp self.packages.${system}.myikllamacpp-rocm aider-chat bacon binaryen cacert trunk cargo-bloat cargo-docset cargo-machete cargo-limit cargo-deny cargo-edit cargo-watch cargo-make cargo-generate cargo-udeps wasm-bindgen-cli_0_2_100 cargo-outdated cargo-release rust-script calc # jre8 # needed for xmlls dart-sass # trunk fish inotify-tools mold # nodejs_20 pkg-config rustywind sccache sqlx-cli unzip rocmPackages.rocminfo ] ++ buildInputs; buildInputs = buildInputs; shellHook = '' # export NIX_LD_LIBRARY_PATH=${pkgs.lib.makeLibraryPath buildInputs}:$NIX_LD_LIBRARY_PATH export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath buildInputs}" export MALLOC_CONF=thp:always,metadata_thp:always ''; }; }); }