Add ikllamacpp
This commit is contained in:
parent
f799554173
commit
75a38bad8a
2 changed files with 41 additions and 9 deletions
25
flake.lock
generated
25
flake.lock
generated
|
@ -54,6 +54,30 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"ik_llama-cpp": {
|
||||
"inputs": {
|
||||
"flake-parts": [
|
||||
"flake-parts"
|
||||
],
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1746018689,
|
||||
"narHash": "sha256-N1/mQPOZVe0L8nJU8U7aDVRdpUJle97AE6k+I3RVb6Y=",
|
||||
"owner": "ikawrakow",
|
||||
"repo": "ik_llama.cpp",
|
||||
"rev": "98d1626469879d35faba9cb7e9d0b1ddaf853eee",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "ikawrakow",
|
||||
"ref": "main",
|
||||
"repo": "ik_llama.cpp",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"llama-cpp": {
|
||||
"inputs": {
|
||||
"flake-parts": [
|
||||
|
@ -146,6 +170,7 @@
|
|||
"devshell": "devshell",
|
||||
"flake-parts": "flake-parts",
|
||||
"flake-utils": "flake-utils",
|
||||
"ik_llama-cpp": "ik_llama-cpp",
|
||||
"llama-cpp": "llama-cpp",
|
||||
"nixpkgs": "nixpkgs_2",
|
||||
"rust-overlay": "rust-overlay"
|
||||
|
|
25
flake.nix
25
flake.nix
|
@ -23,6 +23,11 @@
|
|||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
inputs.flake-parts.follows = "flake-parts";
|
||||
};
|
||||
ik_llama-cpp = {
|
||||
url = "github:ikawrakow/ik_llama.cpp/main";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
inputs.flake-parts.follows = "flake-parts";
|
||||
};
|
||||
};
|
||||
|
||||
outputs =
|
||||
|
@ -32,6 +37,7 @@
|
|||
, flake-utils
|
||||
, devshell
|
||||
, llama-cpp
|
||||
, ik_llama-cpp
|
||||
, ...
|
||||
}:
|
||||
flake-utils.lib.eachDefaultSystem
|
||||
|
@ -39,6 +45,7 @@
|
|||
let
|
||||
overlays = [
|
||||
llama-cpp.overlays.default
|
||||
ik_llama-cpp.overlays.default
|
||||
rust-overlay.overlays.default
|
||||
devshell.overlays.default
|
||||
(final: prev: {
|
||||
|
@ -95,14 +102,7 @@
|
|||
"gfx1103"
|
||||
];
|
||||
lib = pkgs.lib;
|
||||
in
|
||||
{
|
||||
apps.devshell = self.outputs.devShells.${system}.default.flakeApp;
|
||||
packages =
|
||||
(import ./.nix { inherit pkgs lib config; })
|
||||
// {
|
||||
myllamacpp = with pkgs;
|
||||
llamaPackages.llama-cpp.overrideDerivation (oldAttrs: {
|
||||
myOverride = (oldAttrs: {
|
||||
# speeds up builts by only building for a needed rocmTargets...
|
||||
cmakeFlags = [
|
||||
(lib.cmakeBool "LLAMA_BUILD_SERVER" true)
|
||||
|
@ -121,11 +121,18 @@
|
|||
(lib.cmakeBool "GGML_F16C" true)
|
||||
(lib.cmakeBool "GGML_AVX2" true)
|
||||
(lib.cmakeBool "GGML_AVX512" false)
|
||||
(lib.cmakeFeature "CMAKE_HIP_COMPILER" "${rocmPackages.llvm.clang}/bin/clang")
|
||||
(lib.cmakeFeature "CMAKE_HIP_COMPILER" "${pkgs.rocmPackages.llvm.clang}/bin/clang")
|
||||
(lib.cmakeFeature "CMAKE_HIP_ARCHITECTURES" (builtins.concatStringsSep ";" rocmTargets))
|
||||
(lib.cmakeFeature "AMDGPU_TARGETS" (builtins.concatStringsSep ";" rocmTargets))
|
||||
];
|
||||
});
|
||||
in
|
||||
{
|
||||
packages =
|
||||
(import ./.nix { inherit pkgs lib config; })
|
||||
// {
|
||||
myikllamacpp = ik_llama-cpp.packages.${system}.default;
|
||||
myllamacpp = pkgs.llamaPackages.llama-cpp.overrideDerivation myOverride;
|
||||
};
|
||||
devShells.default = pkgs.mkShell {
|
||||
packages = with pkgs;
|
||||
|
|
Loading…
Add table
Reference in a new issue