Skip to content

Instantly share code, notes, and snippets.

@CyberShadow
Created February 25, 2024 19:06
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save CyberShadow/f0871a3d0813e6f6695d2810acd5c02f to your computer and use it in GitHub Desktop.
Save CyberShadow/f0871a3d0813e6f6695d2810acd5c02f to your computer and use it in GitHub Desktop.
Run llama.cpp / .llamafiles with Nix
let
package = {
writeShellScript
, llama-cpp
, nixGL
}:
writeShellScript "run-llamafile" ''
if [ $# -eq 0 ] ; then
echo "Usage: $0 server -m wizardcoder-python-13b-v1.0.Q4_K_M.gguf -ngl 9999"
exit 2
fi
program=$1
shift
${nixGL.auto.nixGLNvidia}/bin/nixGLNvidia-* ${llama-cpp}/bin/llama-cpp-"$program" "$@"
'';
nixpkgs = fetchTarball "https://github.com/NixOS/nixpkgs/archive/d64c6c31e50878fc6cd11d143a8bbb235bdfcc45.tar.gz";
pkgs = import nixpkgs {
config = {
cudaSupport = true;
};
};
llama-cpp = pkgs.callPackage (import "${nixpkgs}/pkgs/by-name/ll/llama-cpp/package.nix") {};
nixGL-source = fetchTarball "https://github.com/nix-community/nixGL/archive/489d6b095ab9d289fe11af0219a9ff00fe87c7c5.zip";
nixGL = pkgs.callPackage (import "${nixGL-source}/nixGL.nix") {};
in
pkgs.callPackage package {
inherit llama-cpp nixGL;
}
# Note: this is broken, though run-llamafile-rocm.nix works...
let
package = {
writeShellScript
, llama-cpp
, nixGL
}:
writeShellScript "run-llamafile" ''
if [ $# -eq 0 ] ; then
echo "Usage: $0 server -m wizardcoder-python-13b-v1.0.Q4_K_M.gguf -ngl 9999"
exit 2
fi
program=$1
shift
ulimit -v unlimited
${nixGL.nixGLIntel}/bin/nixGLIntel ${llama-cpp}/bin/llama-cpp-"$program" "$@"
'';
nixpkgs = fetchTarball "https://github.com/NixOS/nixpkgs/archive/d64c6c31e50878fc6cd11d143a8bbb235bdfcc45.tar.gz";
# nixpkgs = ../work/extern/nixpkgs;
pkgs = import nixpkgs {
config = {
cudaSupport = false;
rocmSupport = true;
};
};
llama-cpp = pkgs.callPackage (import "${nixpkgs}/pkgs/by-name/ll/llama-cpp/package.nix") {};
nixGL-source = fetchTarball "https://github.com/nix-community/nixGL/archive/489d6b095ab9d289fe11af0219a9ff00fe87c7c5.zip";
nixGL = pkgs.callPackage (import "${nixGL-source}/nixGL.nix") {};
in
pkgs.callPackage package {
inherit llama-cpp nixGL;
}
let
package = {
writeShellScript
, symlinkJoin
, clang
, coreutils
, gzip
, bash
, rocmPackages
}:
let
rocmtoolkit_joined = symlinkJoin {
name = "rocm-merged";
paths = with rocmPackages; [
rocm-core clr rccl miopen miopengemm rocrand rocblas
rocsparse hipsparse rocthrust rocprim hipcub roctracer
rocfft rocsolver hipfft hipsolver hipblas
rocminfo rocm-thunk rocm-comgr rocm-device-libs
rocm-runtime clr.icd hipify
];
};
in
writeShellScript "run-llamafile" ''
if [ $# -eq 0 ] ; then
echo "Usage: $0 wizardcoder.llamafile -ngl 9999"
exit 2
fi
export PATH=${rocmtoolkit_joined}/bin
export PATH=$PATH:${clang}/bin
export PATH=$PATH:${coreutils}/bin
export PATH=$PATH:${gzip}/bin
export HIPCC_COMPILE_FLAGS_APPEND="-I${rocmtoolkit_joined}/include -I${rocmtoolkit_joined}/include/rocblas"
export HIPCC_LINK_FLAGS_APPEND="-L${rocmtoolkit_joined}/lib"
export ROCM_PATH=${rocmtoolkit_joined}
export ROCM_SOURCE_DIR=${rocmtoolkit_joined}
ulimit -v unlimited
${pkgs.bash}/bin/sh "$@"
'';
nixpkgs = fetchTarball "https://github.com/NixOS/nixpkgs/archive/0ef56bec7281e2372338f2dfe7c13327ce96f6bb.tar.gz";
pkgs = import nixpkgs {};
in
pkgs.callPackage package {}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment