Skip to content

Commit 28d0c6d

Browse files
committed
build(nix): Package python scripts independently of main derivation
1 parent 48ab2fd commit 28d0c6d

15 files changed

+429
-266
lines changed

.devops/nix/package.nix

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
useRocm ? config.rocmSupport,
3131
useVulkan ? false,
3232
llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake
33-
gguf-py,
3433
}@inputs:
3534

3635
let
@@ -151,7 +150,7 @@ effectiveStdenv.mkDerivation (finalAttrs: {
151150
# TODO: Package up each Python script or service appropriately.
152151
# If we were to migrate to buildPythonPackage and prepare the `pyproject.toml`,
153152
# we could make those *.py into setuptools' entrypoints
154-
substituteInPlace ./*.py --replace "/usr/bin/env python" "${llama-python-base-with-gguf}/bin/python"
153+
# substituteInPlace ./*.py --replace "/usr/bin/env python" "${llama-python-base-with-gguf}/bin/python"
155154
'';
156155

157156
nativeBuildInputs =

.devops/nix/python-scripts.nix

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
{
2+
buildPythonPackage,
3+
poetry-core,
4+
breakpointHook,
5+
python3Packages,
6+
gguf-py
7+
}@inputs:
8+
9+
buildPythonPackage {
10+
pname = "llama-scripts";
11+
src = ../../.;
12+
version = "0.0.0";
13+
pyproject = true;
14+
nativeBuildInputs = [ poetry-core ];
15+
projectDir = ../../.;
16+
propagatedBuildInputs = with python3Packages; [
17+
numpy
18+
sentencepiece
19+
transformers
20+
protobuf
21+
torchWithoutCuda
22+
gguf-py
23+
];
24+
}

.devops/nix/scope.nix

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
newScope,
44
python3,
55
llamaVersion ? "0.0.0",
6+
poetry2nix,
67
}:
78

89
let
@@ -19,7 +20,6 @@ in
1920

2021
lib.makeScope newScope (self: {
2122
inherit llamaVersion;
22-
pp = python3.pkgs;
2323
gguf-py = self.callPackage ./package-gguf-py.nix {
2424
inherit
2525
buildPythonPackage
@@ -28,6 +28,9 @@ lib.makeScope newScope (self: {
2828
pytestCheckHook
2929
;
3030
};
31+
python-scripts = self.callPackage ./python-scripts.nix {
32+
inherit buildPythonPackage poetry-core poetry2nix;
33+
};
3134
llama-cpp = self.callPackage ./package.nix { };
3235
docker = self.callPackage ./docker.nix { };
3336
docker-min = self.callPackage ./docker.nix { interactive = false; };
File renamed without changes.

convert-hf-to-gguf.py renamed to bin/convert_hf_to_gguf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
sys.path.insert(1, str(Path(__file__).parent / 'gguf-py'))
2323
import gguf
2424

25-
from convert import HfVocab
25+
from bin.convert import HfVocab
2626

2727

2828
###### MODEL DEFINITIONS ######

convert-llama-ggml-to-gguf.py renamed to bin/convert_llama_ggml_to_gguf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -352,7 +352,7 @@ def add_tensors(self, gguf_writer):
352352

353353

354354
def handle_metadata(cfg, hp):
355-
import convert
355+
import bin.convert as convert
356356
assert cfg.model_metadata_dir.is_dir(), 'Metadata dir is not a directory'
357357
hf_config_path = cfg.model_metadata_dir / "config.json"
358358
orig_config_path = cfg.model_metadata_dir / "params.json"
File renamed without changes.
File renamed without changes.
File renamed without changes.

flake.lock

Lines changed: 129 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

flake.nix

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,11 +109,14 @@
109109
# Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format
110110
flake.overlays.default = (
111111
final: prev: {
112-
llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
112+
llamaPackages = final.callPackage .devops/nix/scope.nix {
113+
inherit llamaVersion;
114+
};
113115
inherit (final.llamaPackages) llama-cpp;
114116
}
115117
);
116118

119+
debug = true;
117120
systems = [
118121
"aarch64-darwin"
119122
"aarch64-linux"
@@ -132,6 +135,7 @@
132135
...
133136
}:
134137
{
138+
debug = true;
135139
# Unlike `.#packages`, legacyPackages may contain values of
136140
# arbitrary types (including nested attrsets) and may even throw
137141
# exceptions. This attribute isn't recursed into by `nix flake
@@ -141,9 +145,18 @@
141145
# access them as `nix build .#llamaPackages.${scriptName}` using
142146
# the same path you would with an overlay.
143147
legacyPackages = {
144-
llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
145-
llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
146-
llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
148+
llamaPackages = pkgs.callPackage .devops/nix/scope.nix {
149+
inherit llamaVersion;
150+
poetry2nix = import inputs.poetry2nix { pkgs = pkgs; };
151+
};
152+
llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix {
153+
inherit llamaVersion;
154+
poetry2nix = import inputs.poetry2nix { pkgs = pkgs; };
155+
};
156+
llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix {
157+
inherit llamaVersion;
158+
poetry2nix = import inputs.poetry2nix { pkgs = pkgs; };
159+
};
147160
};
148161

149162
# We don't use the overlay here so as to avoid making too many instances of nixpkgs,

0 commit comments

Comments
 (0)