|
6 | 6 | outputs = { self, nixpkgs, flake-utils }:
|
7 | 7 | flake-utils.lib.eachDefaultSystem (system:
|
8 | 8 | let
|
| 9 | + name = "llama.cpp"; |
| 10 | + src = ./.; |
| 11 | + meta.mainProgram = "llama"; |
9 | 12 | inherit (pkgs.stdenv) isAarch32 isAarch64 isDarwin;
|
10 | 13 | buildInputs = with pkgs; [ openmpi ];
|
11 | 14 | osSpecific = with pkgs; buildInputs ++
|
|
31 | 34 | with pkgs; [ openblas ]
|
32 | 35 | );
|
33 | 36 | pkgs = import nixpkgs { inherit system; };
|
34 |
| - nativeBuildInputs = with pkgs; [ cmake pkgconfig ]; |
| 37 | + nativeBuildInputs = with pkgs; [ cmake ninja pkgconfig ]; |
35 | 38 | llama-python =
|
36 | 39 | pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece ]);
|
37 | 40 | postPatch = ''
|
|
44 | 47 | mv $out/bin/server $out/bin/llama-server
|
45 | 48 | '';
|
46 | 49 | cmakeFlags = [ "-DLLAMA_BUILD_SERVER=ON" "-DLLAMA_MPI=ON" "-DBUILD_SHARED_LIBS=ON" "-DCMAKE_SKIP_BUILD_RPATH=ON" ];
|
47 |
| - in { |
| 50 | + in |
| 51 | + { |
48 | 52 | packages.default = pkgs.stdenv.mkDerivation {
|
49 |
| - name = "llama.cpp"; |
50 |
| - src = ./.; |
51 |
| - postPatch = postPatch; |
52 |
| - nativeBuildInputs = nativeBuildInputs; |
53 |
| - buildInputs = osSpecific; |
| 53 | + inherit name src meta postPatch nativeBuildInputs buildInputs postInstall; |
54 | 54 | cmakeFlags = cmakeFlags
|
55 | 55 | ++ (if isAarch64 && isDarwin then [
|
56 |
| - "-DCMAKE_C_FLAGS=-D__ARM_FEATURE_DOTPROD=1" |
57 |
| - "-DLLAMA_METAL=ON" |
58 |
| - ] else [ |
59 |
| - "-DLLAMA_BLAS=ON" |
60 |
| - "-DLLAMA_BLAS_VENDOR=OpenBLAS" |
| 56 | + "-DCMAKE_C_FLAGS=-D__ARM_FEATURE_DOTPROD=1" |
| 57 | + "-DLLAMA_METAL=ON" |
| 58 | + ] else [ |
| 59 | + "-DLLAMA_BLAS=ON" |
| 60 | + "-DLLAMA_BLAS_VENDOR=OpenBLAS" |
61 | 61 | ]);
|
62 |
| - postInstall = postInstall; |
63 |
| - meta.mainProgram = "llama"; |
64 | 62 | };
|
65 | 63 | packages.opencl = pkgs.stdenv.mkDerivation {
|
66 |
| - name = "llama.cpp"; |
67 |
| - src = ./.; |
68 |
| - postPatch = postPatch; |
69 |
| - nativeBuildInputs = nativeBuildInputs; |
| 64 | + inherit name src meta postPatch nativeBuildInputs postInstall; |
70 | 65 | buildInputs = with pkgs; buildInputs ++ [ clblast ];
|
71 | 66 | cmakeFlags = cmakeFlags ++ [
|
72 | 67 | "-DLLAMA_CLBLAST=ON"
|
73 | 68 | ];
|
74 |
| - postInstall = postInstall; |
75 |
| - meta.mainProgram = "llama"; |
| 69 | + }; |
| 70 | + packages.rocm = pkgs.stdenv.mkDerivation { |
| 71 | + inherit name src meta postPatch nativeBuildInputs postInstall; |
| 72 | + buildInputs = with pkgs; buildInputs ++ [ hip hipblas rocblas ]; |
| 73 | + cmakeFlags = cmakeFlags ++ [ |
| 74 | + "-DLLAMA_HIPBLAS=1" |
| 75 | + "-DCMAKE_C_COMPILER=hipcc" |
| 76 | + "-DCMAKE_CXX_COMPILER=hipcc" |
| 77 | + "-DCMAKE_POSITION_INDEPENDENT_CODE=ON" |
| 78 | + ]; |
76 | 79 | };
|
77 | 80 | apps.llama-server = {
|
78 | 81 | type = "app";
|
|
0 commit comments