#!/usr/bin/env python3 """Default processing of flake outputs for evaluating flake updates.""" import logging from argparse import Namespace from flupdt.cli import parse_inputs from flupdt.common import configure_logger, partition from flupdt.flake_build import build_output from flupdt.flake_eval import evaluate_output from flupdt.flake_show import get_derivations def batch_eval(args: Namespace, flake_path: str, derivations: list[str]) -> None: """Bulk run evaluations or builds on a derivation set. :params args: argument namespace to check against :params flake_path: path to flake to be evaluated :params derivations: list of derivations to run against :returns None """ for d in derivations: if args.evaluate: evaluate_output(flake_path, d) if args.build: build_output(flake_path, d) def main() -> None: """Sets up logging, parses args, and runs evaluation routine. :returns: None """ configure_logger("DEBUG") args = parse_inputs() flake_path = args.flake_path derivations, hydra_jobs = partition( lambda s: s.startswith("hydraJobs"), get_derivations(flake_path) ) derivations, hydra_jobs = list(derivations), list(hydra_jobs) logging.info(f"derivations: {list(derivations)}") batch_eval(args, flake_path, derivations) if not args.keep_hydra: logging.info("--keep-hydra flag is not specified, removing Hydra jobs") else: batch_eval(args, flake_path, hydra_jobs) if __name__ == "__main__": main()