Compare commits

...

2 Commits

Author SHA1 Message Date
3ac6d24e22 Merge pull request 'update flake lock, add flake diff, add evaluation json' (#4) from feature/rd into main
Reviewed-on: #4
2025-03-07 22:28:54 -05:00
e2c127a012 update flake lock, add flake diff, add evaluation json
Signed-off-by: ahuston-0 <aliceghuston@gmail.com>
2025-03-07 22:27:55 -05:00
8 changed files with 150 additions and 55 deletions

3
.gitignore vendored
View File

@ -191,3 +191,6 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
pre.json
post.json

66
flake.lock generated
View File

@ -21,11 +21,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
@ -39,11 +39,11 @@
"systems": "systems_2"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"lastModified": 1726560853,
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
"type": "github"
},
"original": {
@ -81,11 +81,11 @@
]
},
"locked": {
"lastModified": 1703863825,
"narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=",
"lastModified": 1729742964,
"narHash": "sha256-B4mzTcQ0FZHdpeWcpDYPERtyjJd/NIuaQ9+BV1h+MpA=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "5163432afc817cf8bd1f031418d1869e4c9d5547",
"rev": "e04df33f62cdcf93d73e9a04142464753a16db67",
"type": "github"
},
"original": {
@ -96,11 +96,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1722589669,
"narHash": "sha256-rxDnGBZK+Sp3df20nCCRrtQzRrJKxY7KtcYNPo5yfg8=",
"lastModified": 1741323510,
"narHash": "sha256-zQL0iErtVTxywxyWc7ajRmRNCncny95uD+2wmBHYOzc=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "42015a129a2ae1cd43a44490e8235d2b24c8a2e2",
"rev": "f104cca31ba6c0403b678ad9428726476b503782",
"type": "github"
},
"original": {
@ -110,22 +110,6 @@
"type": "github"
}
},
"nixpkgs-stable": {
"locked": {
"lastModified": 1720386169,
"narHash": "sha256-NGKVY4PjzwAa4upkGtAMz1npHGoRzWotlSnVlqI40mo=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "194846768975b7ad2c4988bdb82572c00222c0d7",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-24.05",
"repo": "nixpkgs",
"type": "github"
}
},
"poetry2nix": {
"inputs": {
"flake-utils": "flake-utils_2",
@ -137,11 +121,11 @@
"treefmt-nix": "treefmt-nix"
},
"locked": {
"lastModified": 1722515463,
"narHash": "sha256-6FVPz1WzHak65xJQg8tRjVyFEWMesGxfskKaCxDUnRk=",
"lastModified": 1741163974,
"narHash": "sha256-QehWX1ik9vqkylkbvdpO5XZKaqf2fsE92iMEyRBW2qI=",
"owner": "nix-community",
"repo": "poetry2nix",
"rev": "8c25e871bba3f472e1569bbf6c0f52dcc34bf2a4",
"rev": "98293f0b368f24c48e05aaa2359dcc0de15e976f",
"type": "github"
},
"original": {
@ -156,15 +140,14 @@
"gitignore": "gitignore",
"nixpkgs": [
"nixpkgs"
],
"nixpkgs-stable": "nixpkgs-stable"
]
},
"locked": {
"lastModified": 1721042469,
"narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=",
"lastModified": 1741379162,
"narHash": "sha256-srpAbmJapkaqGRE3ytf3bj4XshspVR5964OX5LfjDWc=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "f451c19376071a90d8c58ab1a953c6e9840527fd",
"rev": "b5a62751225b2f62ff3147d0a334055ebadcd5cc",
"type": "github"
},
"original": {
@ -221,8 +204,9 @@
"type": "github"
},
"original": {
"id": "systems",
"type": "indirect"
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"treefmt-nix": {
@ -233,11 +217,11 @@
]
},
"locked": {
"lastModified": 1719749022,
"narHash": "sha256-ddPKHcqaKCIFSFc/cvxS14goUhCOAwsM1PbMr0ZtHMg=",
"lastModified": 1730120726,
"narHash": "sha256-LqHYIxMrl/1p3/kvm2ir925tZ8DkI0KA10djk8wecSk=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "8df5ff62195d4e67e2264df0b7f5e8c9995fd0bd",
"rev": "9ef337e492a5555d8e17a51c911ff1f02635be15",
"type": "github"
},
"original": {

View File

@ -71,7 +71,7 @@
inherit pkgs;
packages = {
flupdt = pkgs.poetry2nix.mkPoetryApplication poetryConfig // {
develop = true;
develop = false;
};
default = self.packages.${system}.flupdt;
};

View File

@ -13,4 +13,32 @@ def parse_inputs() -> argparse.Namespace:
parser.add_argument("--keep-hydra", action="store_true", help="retain Hydra jobs")
parser.add_argument("--build", action="store_true", help="allow building Hydra jobs")
parser.add_argument("--evaluate", action="store_true", help="allow evaluating Hydra jobs")
return parser.parse_args()
parser.add_argument(
"--json", metavar="json-path", help="whether or not to output evaluations to a json"
)
parser.add_argument(
"--compare-drvs",
action="store_true",
help="whether to compare two drv sets, must provide two evaluation jsons to compare",
)
parser.add_argument(
"--compare-pre-json",
metavar="pre-json-path",
default=None,
help="location of pre.json for comparison. defaults to <flake_path>/pre.json",
)
parser.add_argument(
"--compare-post-json",
metavar="post-json-path",
default=None,
help="location of post.json for comparison. defaults to <flake_path>/post.json",
)
args = parser.parse_args()
if args.compare_pre_json is None:
args.compare_pre_json = args.flake_path + "/pre.json"
if args.compare_post_json is None:
args.compare_post_json = args.flake_path + "/post.json"
return args

View File

@ -4,11 +4,14 @@ from __future__ import annotations
import logging
import re
from tempfile import mkdtemp
from flupdt.common import bash_wrapper
drv_re = re.compile(r".*(/nix/store/.*\.drv).*")
OUTPUT_DIR = mkdtemp(prefix="flupdt-outputs-")
def build_output(path: str, output: str) -> str | None:
"""Builds a given output in a flake.
@ -18,7 +21,8 @@ def build_output(path: str, output: str) -> str | None:
:returns the .drv path on success or None on failure
"""
logging.info(f"build {output}")
out = bash_wrapper(f"nix build {path}#{output} -o {output}.nixoutput")
logging.debug(f"outputting to {OUTPUT_DIR}/{output}.nixoutput")
out = bash_wrapper(f"nix build {path}#{output} -o {OUTPUT_DIR}/{output}.nixoutput")
logging.debug("output")
logging.debug(out[0])
logging.debug("error")

30
flupdt/flake_diff.py Normal file
View File

@ -0,0 +1,30 @@
"""Utility to diff nix derivations."""
import logging
import shutil
from flupdt.common import bash_wrapper
def compare_derivations(
path_to_flake: str, path_to_pre_drv: str, path_to_post_drv: str
) -> list[str]:
"""Gets all derivations present in a flake.
:param path_to_flake: path to flake to be checked
:returns a list of all valid derivations in the flake
:raises RuntimeError: fails if nix is not present in the PATH
"""
nvd_path = shutil.which("nvd")
if nvd_path is None:
status_msg = "nvd is not available in the PATH, please verify that it is installed"
raise RuntimeError(status_msg)
diff_output = bash_wrapper(
f"{nvd_path} diff {path_to_pre_drv} {path_to_post_drv}", path=path_to_flake
)
logging.debug(diff_output[0])
logging.debug(diff_output[1])
logging.debug(diff_output[2])
return diff_output

View File

@ -32,3 +32,5 @@ def evaluate_output(path: str, output: str) -> str | None:
raise RuntimeError(out_msg)
drv = drv_match.group(1)
logging.debug(f"derivation evaluated to {drv}")
return drv

View File

@ -4,10 +4,12 @@
import logging
from argparse import Namespace
from pathlib import Path
from flupdt.cli import parse_inputs
from flupdt.common import configure_logger, partition
from flupdt.flake_build import build_output
from flupdt.flake_diff import compare_derivations
from flupdt.flake_eval import evaluate_output
from flupdt.flake_show import get_derivations
@ -20,11 +22,17 @@ def batch_eval(args: Namespace, flake_path: str, derivations: list[str]) -> None
:params derivations: list of derivations to run against
:returns None
"""
drv_map = {}
for d in derivations:
if args.evaluate:
evaluate_output(flake_path, d)
drv_map[d] = evaluate_output(flake_path, d)
if args.build:
build_output(flake_path, d)
if args.json:
with Path.open(args.json, "w+") as f:
from json import dump
dump(drv_map, f)
def main() -> None:
@ -35,18 +43,54 @@ def main() -> None:
"""
configure_logger("DEBUG")
args = parse_inputs()
flake_path = args.flake_path
derivations, hydra_jobs = partition(
lambda s: s.startswith("hydraJobs"), get_derivations(flake_path)
)
derivations, hydra_jobs = list(derivations), list(hydra_jobs)
logging.info(f"derivations: {list(derivations)}")
batch_eval(args, flake_path, derivations)
if args.compare_drvs:
pre_json_dict = {}
post_json_dict = {}
from json import load
with (
Path.open(args.compare_pre_json, "r") as pre,
Path.open(args.compare_post_json, "r") as post,
):
pre_json_dict = load(pre)
post_json_dict = load(post)
logging.debug(f"pre-snapshot derivations: {pre_json_dict}")
logging.debug(f"post-snapshot derivations: {post_json_dict}")
pre_json_keys = set(pre_json_dict.keys())
post_json_keys = set(post_json_dict.keys())
common_keys_to_eval = pre_json_keys.union(post_json_keys)
missing_post_keys = pre_json_keys.difference(common_keys_to_eval)
missing_pre_keys = post_json_keys.difference(common_keys_to_eval)
if missing_pre_keys:
logging.warning(f"Following outputs are missing from pre-snapshot: {missing_pre_keys}")
if missing_post_keys:
logging.warning(f"Following outputs are missing from post-snapshot: {missing_post_keys}")
logging.info(f"Evaluating the following outputs for differences: {common_keys_to_eval}")
for output_key in common_keys_to_eval:
compare_derivations(args.flake_path, pre_json_dict[output_key], post_json_dict[output_key])
if not args.keep_hydra:
logging.info("--keep-hydra flag is not specified, removing Hydra jobs")
else:
batch_eval(args, flake_path, hydra_jobs)
flake_path = args.flake_path
derivations, hydra_jobs = partition(
lambda s: s.startswith("hydraJobs"), get_derivations(flake_path)
)
derivations, hydra_jobs = list(derivations), list(hydra_jobs)
logging.info(f"derivations: {list(derivations)}")
batch_eval(args, flake_path, derivations)
if not args.keep_hydra:
logging.info("--keep-hydra flag is not specified, removing Hydra jobs")
else:
batch_eval(args, flake_path, hydra_jobs)
if __name__ == "__main__":