diff --git a/.gitignore b/.gitignore index f8bf5718..ddcbadc4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,48 +1,8 @@ -/.pls_cache -*.o *~ -Makefile -Makefile.in -.deps -.hydra-data -/config.guess -/config.log -/config.status -/config.sub -/configure -/depcomp -/libtool -/ltmain.sh -/autom4te.cache -/aclocal.m4 -/missing -/install-sh +.test_info.* /src/sql/hydra-postgresql.sql /src/sql/hydra-sqlite.sql /src/sql/tmp.sqlite -/src/hydra-eval-jobs/hydra-eval-jobs -/src/root/static/bootstrap -/src/root/static/js/flot -/tests -/doc/manual/images -/doc/manual/manual.html -/doc/manual/manual.pdf -/t/.bzr* -/t/.git* -/t/.hg* -/t/nix -/t/data -/t/jobs/config.nix -t/jobs/declarative/project.json -/inst -hydra-config.h -hydra-config.h.in result result-* outputs -config -stamp-h1 -src/hydra-evaluator/hydra-evaluator -src/hydra-queue-runner/hydra-queue-runner -src/root/static/fontawesome/ -src/root/static/bootstrap*/ diff --git a/.yath.rc b/.yath.rc deleted file mode 100644 index 19bb35af..00000000 --- a/.yath.rc +++ /dev/null @@ -1,2 +0,0 @@ -[test] --I=rel(t/lib) diff --git a/Makefile.am b/Makefile.am deleted file mode 100644 index a28e3f33..00000000 --- a/Makefile.am +++ /dev/null @@ -1,12 +0,0 @@ -SUBDIRS = src doc -if CAN_DO_CHECK - SUBDIRS += t -endif - -BOOTCLEAN_SUBDIRS = $(SUBDIRS) -DIST_SUBDIRS = $(SUBDIRS) -EXTRA_DIST = nixos-modules/hydra.nix - -install-data-local: nixos-modules/hydra.nix - $(INSTALL) -d $(DESTDIR)$(datadir)/nix - $(INSTALL_DATA) nixos-modules/hydra.nix $(DESTDIR)$(datadir)/nix/hydra-module.nix diff --git a/README.md b/README.md index 58373052..8ac18ac9 100644 --- a/README.md +++ b/README.md @@ -39,16 +39,16 @@ In order to evaluate and build anything you need to create _projects_ that conta #### Creating A Project Log in as administrator, click "_Admin_" and select "_Create project_". Fill the form as follows: -- **Identifier**: `hello` +- **Identifier**: `hello-project` - **Display name**: `hello` - **Description**: `hello project` Click "_Create project_". #### Creating A Jobset -After creating a project you are forwarded to the project page. Click "_Actions_" and choose "_Create jobset_". Fill the form with the following values: +After creating a project you are forwarded to the project page. Click "_Actions_" and choose "_Create jobset_". Change **Type** to Legacy for the example below. Fill the form with the following values: -- **Identifier**: `hello` +- **Identifier**: `hello-project` - **Nix expression**: `examples/hello.nix` in `hydra` - **Check interval**: 60 - **Scheduling shares**: 1 @@ -57,7 +57,7 @@ We have to add two inputs for this jobset. One for _nixpkgs_ and one for _hydra_ - **Input name**: `nixpkgs` - **Type**: `Git checkout` -- **Value**: `https://github.com/nixos/nixpkgs-channels nixos-20.03` +- **Value**: `https://github.com/NixOS/nixpkgs nixos-24.05` - **Input name**: `hydra` - **Type**: `Git checkout` diff --git a/configure.ac b/configure.ac deleted file mode 100644 index e5c57d14..00000000 --- a/configure.ac +++ /dev/null @@ -1,91 +0,0 @@ -AC_INIT([Hydra], [m4_esyscmd([echo -n $(cat ./version.txt)$VERSION_SUFFIX])]) -AC_CONFIG_AUX_DIR(config) -AM_INIT_AUTOMAKE([foreign serial-tests]) - -AC_LANG([C++]) - -AC_PROG_CC -AC_PROG_INSTALL -AC_PROG_LN_S -AC_PROG_LIBTOOL -AC_PROG_CXX - -AC_PATH_PROG([XSLTPROC], [xsltproc]) - -AC_ARG_WITH([docbook-xsl], - [AS_HELP_STRING([--with-docbook-xsl=PATH], - [path of the DocBook XSL stylesheets])], - [docbookxsl="$withval"], - [docbookxsl="/docbook-xsl-missing"]) -AC_SUBST([docbookxsl]) - - -AC_DEFUN([NEED_PROG], -[ -AC_PATH_PROG($1, $2) -if test -z "$$1"; then - AC_MSG_ERROR([$2 is required]) -fi -]) - -NEED_PROG(perl, perl) - -NEED_PROG([NIX_STORE_PROGRAM], [nix-store]) - -AC_MSG_CHECKING([whether $NIX_STORE_PROGRAM is recent enough]) -if test -n "$NIX_STORE" -a -n "$TMPDIR" -then - # This may be executed from within a build chroot, so pacify - # `nix-store' instead of letting it choke while trying to mkdir - # /nix/var. - NIX_STATE_DIR="$TMPDIR" - export NIX_STATE_DIR -fi -if NIX_REMOTE=daemon PAGER=cat "$NIX_STORE_PROGRAM" --timeout 123 -q; then - AC_MSG_RESULT([yes]) -else - AC_MSG_RESULT([no]) - AC_MSG_ERROR([`$NIX_STORE_PROGRAM' doesn't support `--timeout'; please use a newer version.]) -fi - -PKG_CHECK_MODULES([NIX], [nix-main nix-expr nix-store]) - -testPath="$(dirname $(type -p expr))" -AC_SUBST(testPath) - -CXXFLAGS+=" -include nix/config.h" - -AC_CONFIG_FILES([ - Makefile - doc/Makefile - doc/manual/Makefile - src/Makefile - src/hydra-evaluator/Makefile - src/hydra-eval-jobs/Makefile - src/hydra-queue-runner/Makefile - src/sql/Makefile - src/ttf/Makefile - src/lib/Makefile - src/root/Makefile - src/script/Makefile -]) - -# Tests might be filtered out -AM_CONDITIONAL([CAN_DO_CHECK], [test -f "$srcdir/t/api-test.t"]) -AM_COND_IF( - [CAN_DO_CHECK], - [ - jobsPath="$(realpath ./t/jobs)" - AC_SUBST(jobsPath) - AC_CONFIG_FILES([ - t/Makefile - t/jobs/config.nix - t/jobs/declarative/project.json - ]) - ]) - -AC_CONFIG_COMMANDS([executable-scripts], []) - -AC_CONFIG_HEADER([hydra-config.h]) - -AC_OUTPUT diff --git a/default.nix b/default.nix index d4c7ec29..b81119c3 100644 --- a/default.nix +++ b/default.nix @@ -1,6 +1,6 @@ # The `default.nix` in flake-compat reads `flake.nix` and `flake.lock` from `src` and # returns an attribute set of the shape `{ defaultNix, shellNix }` -(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) { +(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") { src = ./.; }).defaultNix diff --git a/doc/Makefile.am b/doc/Makefile.am deleted file mode 100644 index 9ac91d24..00000000 --- a/doc/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -SUBDIRS = manual -BOOTCLEAN_SUBDIRS = $(SUBDIRS) -DIST_SUBDIRS = $(SUBDIRS) - diff --git a/doc/manual/Makefile.am b/doc/manual/Makefile.am deleted file mode 100644 index ec732166..00000000 --- a/doc/manual/Makefile.am +++ /dev/null @@ -1,6 +0,0 @@ -MD_FILES = src/*.md - -EXTRA_DIST = $(MD_FILES) - -install: $(MD_FILES) - mdbook build . -d $(docdir) diff --git a/doc/manual/meson.build b/doc/manual/meson.build new file mode 100644 index 00000000..11178809 --- /dev/null +++ b/doc/manual/meson.build @@ -0,0 +1,36 @@ +srcs = files( + 'src/SUMMARY.md', + 'src/about.md', + 'src/api.md', + 'src/configuration.md', + 'src/hacking.md', + 'src/installation.md', + 'src/introduction.md', + 'src/jobs.md', + 'src/monitoring/README.md', + 'src/notifications.md', + 'src/plugins/README.md', + 'src/plugins/RunCommand.md', + 'src/plugins/declarative-projects.md', + 'src/projects.md', + 'src/webhooks.md', +) + +manual = custom_target( + 'manual', + command: [ + mdbook, + 'build', + '@SOURCE_ROOT@/doc/manual', + '-d', meson.current_build_dir() / 'html' + ], + depend_files: srcs, + output: ['html'], + build_by_default: true, +) + +install_subdir( + manual.full_path(), + install_dir: get_option('datadir') / 'doc/hydra', + strip_directory: true, +) diff --git a/doc/manual/src/configuration.md b/doc/manual/src/configuration.md index 4954040c..d370312a 100644 --- a/doc/manual/src/configuration.md +++ b/doc/manual/src/configuration.md @@ -208,7 +208,8 @@ Example configuration: # Make all users in the hydra_admin group Hydra admins hydra_admin = admin - # Allow all users in the dev group to restart jobs and cancel builds + # Allow all users in the dev group to eval jobsets, restart jobs and cancel builds + dev = eval-jobset dev = restart-jobs dev = cancel-build diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index 49c17395..ec96b8c6 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -15,12 +15,18 @@ and dependencies can be found: $ nix-shell ``` +of when flakes are enabled: + +```console +$ nix develop +``` + To build Hydra, you should then do: ```console [nix-shell]$ autoreconfPhase [nix-shell]$ configurePhase -[nix-shell]$ make +[nix-shell]$ make -j$(nproc) ``` You start a local database, the webserver, and other components with @@ -30,6 +36,8 @@ foreman: $ foreman start ``` +The Hydra interface will be available on port 63333, with an admin user named "alice" with password "foobar" + You can run just the Hydra web server in your source tree as follows: ```console diff --git a/doc/manual/src/plugins/README.md b/doc/manual/src/plugins/README.md index 6e46224f..93aa80b4 100644 --- a/doc/manual/src/plugins/README.md +++ b/doc/manual/src/plugins/README.md @@ -42,7 +42,7 @@ Sets CircleCI status. ## Compress build logs -Compresses build logs after a build with bzip2. +Compresses build logs after a build with bzip2 or zstd. ### Configuration options @@ -50,6 +50,14 @@ Compresses build logs after a build with bzip2. Enable log compression +- `compress_build_logs_compression` + +Which compression format to use. Valid values are bzip2 (default) and zstd. + +- `compress_build_logs_silent` + +Whether to compress logs silently. + ### Example ```xml diff --git a/doc/manual/src/webhooks.md b/doc/manual/src/webhooks.md index 2b26cd61..674e1064 100644 --- a/doc/manual/src/webhooks.md +++ b/doc/manual/src/webhooks.md @@ -1,9 +1,12 @@ # Webhooks -Hydra can be notified by github's webhook to trigger a new evaluation when a +Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a jobset has a github repo in its input. -To set up a github webhook go to `https://github.com///settings` and in the `Webhooks` tab -click on `Add webhook`. + +## GitHub + +To set up a webhook for a GitHub repository go to `https://github.com///settings` +and in the `Webhooks` tab click on `Add webhook`. - In `Payload URL` fill in `https:///api/push-github`. - In `Content type` switch to `application/json`. @@ -11,3 +14,14 @@ click on `Add webhook`. - For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`. Then add the hook with `Add webhook`. + +## Gitea + +To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance +and in the `Webhooks` tab click on `Add Webhook` and choose `Gitea` in the drop down. + +- In `Target URL` fill in `https:///api/push-gitea`. +- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`. +- Change the branch filter to match the git branch hydra builds. + +Then add the hook with `Add webhook`. diff --git a/examples/hello.nix b/examples/hello.nix index 5a5d2585..84707025 100644 --- a/examples/hello.nix +++ b/examples/hello.nix @@ -1,5 +1,5 @@ # -# jobset example file. This file canbe referenced as Nix expression +# jobset example file. This file can be referenced as Nix expression # in a jobset configuration along with inputs for nixpkgs and the # repository containing this file. # diff --git a/flake.lock b/flake.lock index 38b9aded..8055275b 100644 --- a/flake.lock +++ b/flake.lock @@ -1,34 +1,18 @@ { "nodes": { - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, "flake-parts": { "inputs": { "nixpkgs-lib": [ - "nix", + "nix-eval-jobs", "nixpkgs" ] }, "locked": { - "lastModified": 1712014858, - "narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=", + "lastModified": 1722555600, + "narHash": "sha256-XOQkdLafnb/p9ij77byFQjDf5m5QYl9b2REiVClC+x4=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "9126214d0a59633752a136528f5f3b9aa8565b7d", + "rev": "8471fe90ad337a8074e957b69ca4d0089218391d", "type": "github" }, "original": { @@ -37,131 +21,120 @@ "type": "github" } }, - "flake-utils": { - "locked": { - "lastModified": 1667395993, - "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, "libgit2": { "flake": false, "locked": { - "lastModified": 1697646580, - "narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=", + "lastModified": 1715853528, + "narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=", "owner": "libgit2", "repo": "libgit2", - "rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5", + "rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96", "type": "github" }, "original": { "owner": "libgit2", + "ref": "v1.8.1", "repo": "libgit2", "type": "github" } }, "nix": { "inputs": { - "flake-compat": "flake-compat", - "flake-parts": "flake-parts", - "libgit2": "libgit2", + "flake-compat": [], + "flake-parts": [], + "git-hooks-nix": [], + "libgit2": [ + "libgit2" + ], "nixpkgs": [ "nixpkgs" ], - "nixpkgs-regression": "nixpkgs-regression", - "pre-commit-hooks": "pre-commit-hooks" + "nixpkgs-23-11": [], + "nixpkgs-regression": [] }, "locked": { - "lastModified": 1717016009, - "narHash": "sha256-RJ0k7fvKjuUGt439F9uFtUw44SM87A+XKBf6sH6qPy8=", + "lastModified": 1726787955, + "narHash": "sha256-XFznzb8L4SdUm9u+w3DPpMWJhffuv+/6+aiVl00slns=", "owner": "NixOS", "repo": "nix", - "rev": "ef5c846e257e1e284ad47ed6be4308d190fe6531", + "rev": "a7fdef6858dd45b9d7bda7c92324c63faee7f509", "type": "github" }, "original": { "owner": "NixOS", + "ref": "2.24-maintenance", "repo": "nix", "type": "github" } }, + "nix-eval-jobs": { + "inputs": { + "flake-parts": "flake-parts", + "nix-github-actions": [], + "nixpkgs": [ + "nixpkgs" + ], + "treefmt-nix": "treefmt-nix" + }, + "locked": { + "lastModified": 1733814344, + "narHash": "sha256-3wwtKpS5tUBdjaGeSia7CotonbiRB6K5Kp0dsUt3nzU=", + "owner": "nix-community", + "repo": "nix-eval-jobs", + "rev": "889ea1406736b53cf165b6c28398aae3969418d1", + "type": "github" + }, + "original": { + "owner": "nix-community", + "ref": "release-2.24", + "repo": "nix-eval-jobs", + "type": "github" + } + }, "nixpkgs": { "locked": { - "lastModified": 1712848736, - "narHash": "sha256-CzZwhqyLlebljv1zFS2KWVH/3byHND0LfaO1jKsGuVo=", + "lastModified": 1726688310, + "narHash": "sha256-Xc9lEtentPCEtxc/F1e6jIZsd4MPDYv4Kugl9WtXlz0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "1d6a23f11e44d0fb64b3237569b87658a9eb5643", + "rev": "dbebdd67a6006bb145d98c8debf9140ac7e651d0", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-23.11-small", + "ref": "nixos-24.05-small", "repo": "nixpkgs", "type": "github" } }, - "nixpkgs-regression": { - "locked": { - "lastModified": 1643052045, - "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - } - }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": [ - "nix" - ], - "flake-utils": "flake-utils", - "gitignore": [ - "nix" - ], - "nixpkgs": [ - "nix", - "nixpkgs" - ], - "nixpkgs-stable": [ - "nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1712897695, - "narHash": "sha256-nMirxrGteNAl9sWiOhoN5tIHyjBbVi5e2tgZUgZlK3Y=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "40e6053ecb65fcbf12863338a6dcefb3f55f1bf8", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, "root": { "inputs": { + "libgit2": "libgit2", "nix": "nix", + "nix-eval-jobs": "nix-eval-jobs", "nixpkgs": "nixpkgs" } + }, + "treefmt-nix": { + "inputs": { + "nixpkgs": [ + "nix-eval-jobs", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1723303070, + "narHash": "sha256-krGNVA30yptyRonohQ+i9cnK+CfCpedg6z3qzqVJcTs=", + "owner": "numtide", + "repo": "treefmt-nix", + "rev": "14c092e0326de759e16b37535161b3cb9770cea3", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "treefmt-nix", + "type": "github" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index 63253c31..f12b8c2f 100644 --- a/flake.nix +++ b/flake.nix @@ -1,22 +1,30 @@ { description = "A Nix-based continuous build system"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small"; - inputs.nix.url = "github:NixOS/nix"; - inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05-small"; - outputs = { self, nixpkgs, nix }: + inputs.libgit2 = { url = "github:libgit2/libgit2/v1.8.1"; flake = false; }; + inputs.nix.url = "github:NixOS/nix/2.24-maintenance"; + inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; + inputs.nix.inputs.libgit2.follows = "libgit2"; + + inputs.nix-eval-jobs.url = "github:nix-community/nix-eval-jobs/release-2.24"; + inputs.nix-eval-jobs.inputs.nixpkgs.follows = "nixpkgs"; + + # hide nix dev tooling from our lock file + inputs.nix.inputs.flake-parts.follows = ""; + inputs.nix.inputs.git-hooks-nix.follows = ""; + inputs.nix.inputs.nixpkgs-regression.follows = ""; + inputs.nix.inputs.nixpkgs-23-11.follows = ""; + inputs.nix.inputs.flake-compat.follows = ""; + + # hide nix-eval-jobs dev tooling from our lock file + inputs.nix-eval-jobs.inputs.nix-github-actions.follows = ""; + + outputs = { self, nixpkgs, nix, nix-eval-jobs, ... }: let systems = [ "x86_64-linux" "aarch64-linux" ]; forEachSystem = nixpkgs.lib.genAttrs systems; - - overlayList = [ self.overlays.default nix.overlays.default ]; - - pkgsBySystem = forEachSystem (system: import nixpkgs { - inherit system; - overlays = overlayList; - }); - in rec { @@ -24,12 +32,13 @@ overlays.default = final: prev: { hydra = final.callPackage ./package.nix { inherit (nixpkgs.lib) fileset; + nix-eval-jobs = nix-eval-jobs.packages.${final.system}.default; rawSrc = self; + nix-perl-bindings = final.nixComponents.nix-perl-bindings; }; }; hydraJobs = { - build = forEachSystem (system: packages.${system}.hydra); buildNoTests = forEachSystem (system: @@ -38,19 +47,21 @@ }) ); - manual = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in - pkgs.runCommand "hydra-manual-${pkgs.hydra.version}" { } + manual = forEachSystem (system: let + pkgs = nixpkgs.legacyPackages.${system}; + hydra = self.packages.${pkgs.hostPlatform.system}.hydra; + in + pkgs.runCommand "hydra-manual-${hydra.version}" { } '' mkdir -p $out/share - cp -prvd ${pkgs.hydra}/share/doc $out/share/ + cp -prvd ${hydra.doc}/share/doc $out/share/ mkdir $out/nix-support echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products ''); tests = import ./nixos-tests.nix { - inherit forEachSystem nixpkgs pkgsBySystem nixosModules; + inherit forEachSystem nixpkgs nixosModules; }; container = nixosConfigurations.container.config.system.build.toplevel; @@ -63,12 +74,18 @@ }); packages = forEachSystem (system: { - hydra = pkgsBySystem.${system}.hydra; - default = pkgsBySystem.${system}.hydra; + hydra = nixpkgs.legacyPackages.${system}.callPackage ./package.nix { + inherit (nixpkgs.lib) fileset; + nix-eval-jobs = nix-eval-jobs.packages.${system}.default; + rawSrc = self; + nix = nix.packages.${system}.nix; + nix-perl-bindings = nix.hydraJobs.perlBindings.${system}; + }; + default = self.packages.${system}.hydra; }); nixosModules = import ./nixos-modules { - overlays = overlayList; + inherit self; }; nixosConfigurations.container = nixpkgs.lib.nixosSystem { @@ -76,7 +93,6 @@ modules = [ self.nixosModules.hydra - self.nixosModules.overlayNixpkgsForThisHydra self.nixosModules.hydraTest self.nixosModules.hydraProxy { diff --git a/hydra-api.yaml b/hydra-api.yaml index 623c9082..a2fdea28 100644 --- a/hydra-api.yaml +++ b/hydra-api.yaml @@ -70,7 +70,7 @@ paths: $ref: '#/components/examples/projects-success' /api/push: - put: + post: summary: trigger jobsets parameters: - in: query diff --git a/meson.build b/meson.build new file mode 100644 index 00000000..b9327d51 --- /dev/null +++ b/meson.build @@ -0,0 +1,40 @@ +project('hydra', 'cpp', + version: files('version.txt'), + license: 'GPL-3.0', + default_options: [ + 'debug=true', + 'optimization=2', + 'cpp_std=c++20', + ], +) + +nix_store_dep = dependency('nix-store', required: true) +nix_main_dep = dependency('nix-main', required: true) +nix_expr_dep = dependency('nix-expr', required: true) +nix_flake_dep = dependency('nix-flake', required: true) +nix_cmd_dep = dependency('nix-cmd', required: true) + +# Nix need extra flags not provided in its pkg-config files. +nix_dep = declare_dependency( + dependencies: [ + nix_store_dep, + nix_main_dep, + nix_expr_dep, + nix_flake_dep, + nix_cmd_dep, + ], + compile_args: ['-include', 'nix/config.h'], +) + +pqxx_dep = dependency('libpqxx', required: true) + +prom_cpp_core_dep = dependency('prometheus-cpp-core', required: true) +prom_cpp_pull_dep = dependency('prometheus-cpp-pull', required: true) + +mdbook = find_program('mdbook', native: true) +perl = find_program('perl', native: true) + +subdir('doc/manual') +subdir('nixos-modules') +subdir('src') +subdir('t') diff --git a/nixos-modules/default.nix b/nixos-modules/default.nix index dac705c3..62b18406 100644 --- a/nixos-modules/default.nix +++ b/nixos-modules/default.nix @@ -1,11 +1,10 @@ -{ overlays }: +{ self }: { - hydra = import ./hydra.nix; - - overlayNixpkgsForThisHydra = { pkgs, ... }: { - nixpkgs = { inherit overlays; }; - services.hydra.package = pkgs.hydra; + hydra = { pkgs, lib,... }: { + _file = ./default.nix; + imports = [ ./hydra.nix ]; + services.hydra-dev.package = lib.mkDefault self.packages.${pkgs.hostPlatform.system}.hydra; }; hydraTest = { pkgs, ... }: { diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 7a0486c1..4fc2d311 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -68,8 +68,6 @@ in package = mkOption { type = types.path; - default = pkgs.hydra_unstable; - defaultText = literalExpression "pkgs.hydra"; description = "The Hydra package."; }; @@ -340,6 +338,7 @@ in systemd.services.hydra-queue-runner = { wantedBy = [ "multi-user.target" ]; requires = [ "hydra-init.service" ]; + wants = [ "network-online.target" ]; after = [ "hydra-init.service" "network.target" "network-online.target" ]; path = [ cfg.package pkgs.nettools pkgs.openssh pkgs.bzip2 config.nix.package ]; restartTriggers = [ hydraConf ]; @@ -408,6 +407,7 @@ in requires = [ "hydra-init.service" ]; after = [ "hydra-init.service" ]; restartTriggers = [ hydraConf ]; + path = [ pkgs.zstd ]; environment = env // { PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-notify"; @@ -458,10 +458,17 @@ in # logs automatically after a step finishes, but this doesn't work # if the queue runner is stopped prematurely. systemd.services.hydra-compress-logs = - { path = [ pkgs.bzip2 ]; + { path = [ pkgs.bzip2 pkgs.zstd ]; script = '' - find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r bzip2 -v -f + set -eou pipefail + compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf) + if [[ $compression == "" ]]; then + compression="bzip2" + elif [[ $compression == zstd ]]; then + compression="zstd --rm" + fi + find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet ''; startAt = "Sun 01:45"; }; diff --git a/nixos-modules/meson.build b/nixos-modules/meson.build new file mode 100644 index 00000000..95c47e9f --- /dev/null +++ b/nixos-modules/meson.build @@ -0,0 +1,4 @@ +install_data('hydra.nix', + install_dir: get_option('datadir') / 'nix', + rename: ['hydra-module.nix'], +) diff --git a/nixos-tests.nix b/nixos-tests.nix index 19a9ba35..9efe68c8 100644 --- a/nixos-tests.nix +++ b/nixos-tests.nix @@ -1,13 +1,12 @@ -{ forEachSystem, nixpkgs, pkgsBySystem, nixosModules }: +{ forEachSystem, nixpkgs, nixosModules }: let # NixOS configuration used for VM tests. hydraServer = - { config, pkgs, ... }: + { pkgs, ... }: { imports = [ nixosModules.hydra - nixosModules.overlayNixpkgsForThisHydra nixosModules.hydraTest ]; @@ -44,11 +43,10 @@ in }); notifications = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; simpleTest { name = "hydra-notifications"; - nodes.machine = { pkgs, ... }: { + nodes.machine = { imports = [ hydraServer ]; services.hydra-dev.extraConfig = '' @@ -89,7 +87,7 @@ in # Setup the project and jobset machine.succeed( - "su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" + "su - hydra -c 'perl -I ${config.services.hydra-dev.package.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" ) # Wait until hydra has build the job and @@ -103,7 +101,7 @@ in }); gitea = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in + let pkgs = nixpkgs.legacyPackages.${system}; in with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; makeTest { name = "hydra-gitea"; @@ -298,7 +296,7 @@ in }); validate-openapi = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in + let pkgs = nixpkgs.legacyPackages.${system}; in pkgs.runCommand "validate-openapi" { buildInputs = [ pkgs.openapi-generator-cli ]; } '' diff --git a/package.nix b/package.nix index f8b1849f..e0046024 100644 --- a/package.nix +++ b/package.nix @@ -9,10 +9,12 @@ , perlPackages , nix +, nix-perl-bindings , git , makeWrapper -, autoreconfHook +, meson +, ninja , nukeReferences , pkg-config , mdbook @@ -48,6 +50,7 @@ , xz , gnutar , gnused +, nix-eval-jobs , rpm , dpkg @@ -59,7 +62,7 @@ let name = "hydra-perl-deps"; paths = lib.closePropagation ([ - nix.perl-bindings + nix-perl-bindings git ] ++ (with perlPackages; [ AuthenSASL @@ -90,6 +93,7 @@ let DigestSHA1 EmailMIME EmailSender + FileCopyRecursive FileLibMagic FileSlurper FileWhich @@ -137,28 +141,24 @@ stdenv.mkDerivation (finalAttrs: { src = fileset.toSource { root = ./.; fileset = fileset.unions ([ - ./version.txt - ./configure.ac - ./Makefile.am - ./src ./doc - ./nixos-modules/hydra.nix - # These are always needed to appease Automake - ./t/Makefile.am - ./t/jobs/config.nix.in - ./t/jobs/declarative/project.json.in - ] ++ lib.optionals finalAttrs.doCheck [ + ./meson.build + ./nixos-modules + ./src ./t + ./version.txt ./.perlcriticrc - ./.yath.rc ]); }; + outputs = [ "out" "doc" ]; + strictDeps = true; nativeBuildInputs = [ makeWrapper - autoreconfHook + meson + ninja nukeReferences pkg-config mdbook @@ -191,6 +191,7 @@ stdenv.mkDerivation (finalAttrs: { openldap postgresql_13 pixz + nix-eval-jobs ]; checkInputs = [ @@ -219,15 +220,22 @@ stdenv.mkDerivation (finalAttrs: { darcs gnused breezy + nix-eval-jobs ] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] ); OPENLDAP_ROOT = openldap; + mesonBuildType = "release"; + + postPatch = '' + patchShebangs . + ''; + shellHook = '' pushd $(git rev-parse --show-toplevel) >/dev/null - PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH + PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-queue-runner:$PATH PERL5LIB=$(pwd)/src/lib:$PERL5LIB export HYDRA_HOME="$(pwd)/src/" mkdir -p .hydra-data @@ -237,14 +245,11 @@ stdenv.mkDerivation (finalAttrs: { popd >/dev/null ''; - NIX_LDFLAGS = [ "-lpthread" ]; - - enableParallelBuilding = true; - doCheck = true; + mesonCheckFlags = [ "--verbose" ]; + preCheck = '' - patchShebangs . export LOGNAME=''${LOGNAME:-foo} # set $HOME for bzr so it can create its trace file export HOME=$(mktemp -d) @@ -261,7 +266,8 @@ stdenv.mkDerivation (finalAttrs: { --prefix PATH ':' $out/bin:$hydraPath \ --set HYDRA_RELEASE ${version} \ --set HYDRA_HOME $out/libexec/hydra \ - --set NIX_RELEASE ${nix.name or "unknown"} + --set NIX_RELEASE ${nix.name or "unknown"} \ + --set NIX_EVAL_JOBS_RELEASE ${nix-eval-jobs.name or "unknown"} done ''; diff --git a/src/Makefile.am b/src/Makefile.am deleted file mode 100644 index a28780b6..00000000 --- a/src/Makefile.am +++ /dev/null @@ -1,3 +0,0 @@ -SUBDIRS = hydra-evaluator hydra-eval-jobs hydra-queue-runner sql script lib root ttf -BOOTCLEAN_SUBDIRS = $(SUBDIRS) -DIST_SUBDIRS = $(SUBDIRS) diff --git a/src/hydra-eval-jobs/Makefile.am b/src/hydra-eval-jobs/Makefile.am deleted file mode 100644 index 90742a30..00000000 --- a/src/hydra-eval-jobs/Makefile.am +++ /dev/null @@ -1,5 +0,0 @@ -bin_PROGRAMS = hydra-eval-jobs - -hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc -hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixcmd -hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra diff --git a/src/hydra-eval-jobs/hydra-eval-jobs.cc b/src/hydra-eval-jobs/hydra-eval-jobs.cc deleted file mode 100644 index 5adea42b..00000000 --- a/src/hydra-eval-jobs/hydra-eval-jobs.cc +++ /dev/null @@ -1,579 +0,0 @@ -#include -#include -#include -#include - -#include "shared.hh" -#include "store-api.hh" -#include "eval.hh" -#include "eval-inline.hh" -#include "eval-settings.hh" -#include "signals.hh" -#include "terminal.hh" -#include "util.hh" -#include "get-drvs.hh" -#include "globals.hh" -#include "common-eval-args.hh" -#include "flake/flakeref.hh" -#include "flake/flake.hh" -#include "attr-path.hh" -#include "derivations.hh" -#include "local-fs-store.hh" - -#include "hydra-config.hh" - -#include -#include -#include - -#include - -void check_pid_status_nonblocking(pid_t check_pid) -{ - // Only check 'initialized' and known PID's - if (check_pid <= 0) { return; } - - int wstatus = 0; - pid_t pid = waitpid(check_pid, &wstatus, WNOHANG); - // -1 = failure, WNOHANG: 0 = no change - if (pid <= 0) { return; } - - std::cerr << "child process (" << pid << ") "; - - if (WIFEXITED(wstatus)) { - std::cerr << "exited with status=" << WEXITSTATUS(wstatus) << std::endl; - } else if (WIFSIGNALED(wstatus)) { - std::cerr << "killed by signal=" << WTERMSIG(wstatus) << std::endl; - } else if (WIFSTOPPED(wstatus)) { - std::cerr << "stopped by signal=" << WSTOPSIG(wstatus) << std::endl; - } else if (WIFCONTINUED(wstatus)) { - std::cerr << "continued" << std::endl; - } -} - -using namespace nix; - -static Path gcRootsDir; -static size_t maxMemorySize; - -struct MyArgs : MixEvalArgs, MixCommonArgs, RootArgs -{ - Path releaseExpr; - bool flake = false; - bool dryRun = false; - - MyArgs() : MixCommonArgs("hydra-eval-jobs") - { - addFlag({ - .longName = "gc-roots-dir", - .description = "garbage collector roots directory", - .labels = {"path"}, - .handler = {&gcRootsDir} - }); - - addFlag({ - .longName = "dry-run", - .description = "don't create store derivations", - .handler = {&dryRun, true} - }); - - addFlag({ - .longName = "flake", - .description = "build a flake", - .handler = {&flake, true} - }); - - expectArg("expr", &releaseExpr); - } -}; - -static MyArgs myArgs; - -static std::string queryMetaStrings(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & subAttribute) -{ - Strings res; - std::function rec; - - rec = [&](Value & v) { - state.forceValue(v, noPos); - if (v.type() == nString) - res.emplace_back(v.string_view()); - else if (v.isList()) - for (unsigned int n = 0; n < v.listSize(); ++n) - rec(*v.listElems()[n]); - else if (v.type() == nAttrs) { - auto a = v.attrs()->find(state.symbols.create(subAttribute)); - if (a != v.attrs()->end()) - res.push_back(std::string(state.forceString(*a->value, a->pos, "while evaluating meta attributes"))); - } - }; - - Value * v = drv.queryMeta(name); - if (v) rec(*v); - - return concatStringsSep(", ", res); -} - -static void worker( - EvalState & state, - Bindings & autoArgs, - AutoCloseFD & to, - AutoCloseFD & from) -{ - Value vTop; - - if (myArgs.flake) { - using namespace flake; - - auto flakeRef = parseFlakeRef(myArgs.releaseExpr); - - auto vFlake = state.allocValue(); - - auto lockedFlake = lockFlake(state, flakeRef, - LockFlags { - .updateLockFile = false, - .useRegistries = false, - .allowUnlocked = false, - }); - - callFlake(state, lockedFlake, *vFlake); - - auto vOutputs = vFlake->attrs()->get(state.symbols.create("outputs"))->value; - state.forceValue(*vOutputs, noPos); - - auto aHydraJobs = vOutputs->attrs()->get(state.symbols.create("hydraJobs")); - if (!aHydraJobs) - aHydraJobs = vOutputs->attrs()->get(state.symbols.create("checks")); - if (!aHydraJobs) - throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef); - - vTop = *aHydraJobs->value; - - } else { - state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop); - } - - auto vRoot = state.allocValue(); - state.autoCallFunction(autoArgs, vTop, *vRoot); - - while (true) { - /* Wait for the master to send us a job name. */ - writeLine(to.get(), "next"); - - auto s = readLine(from.get()); - if (s == "exit") break; - if (!hasPrefix(s, "do ")) abort(); - std::string attrPath(s, 3); - - debug("worker process %d at '%s'", getpid(), attrPath); - - /* Evaluate it and send info back to the master. */ - nlohmann::json reply; - - try { - auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot).first; - - auto v = state.allocValue(); - state.autoCallFunction(autoArgs, *vTmp, *v); - - if (auto drv = getDerivation(state, *v, false)) { - - // CA derivations do not have static output paths, so we - // have to defensively not query output paths in case we - // encounter one. - PackageInfo::Outputs outputs = drv->queryOutputs( - !experimentalFeatureSettings.isEnabled(Xp::CaDerivations)); - - if (drv->querySystem() == "unknown") - state.error("derivation must have a 'system' attribute").debugThrow(); - - auto drvPath = state.store->printStorePath(drv->requireDrvPath()); - - nlohmann::json job; - - job["nixName"] = drv->queryName(); - job["system"] =drv->querySystem(); - job["drvPath"] = drvPath; - job["description"] = drv->queryMetaString("description"); - job["license"] = queryMetaStrings(state, *drv, "license", "shortName"); - job["homepage"] = drv->queryMetaString("homepage"); - job["maintainers"] = queryMetaStrings(state, *drv, "maintainers", "email"); - job["schedulingPriority"] = drv->queryMetaInt("schedulingPriority", 100); - job["timeout"] = drv->queryMetaInt("timeout", 36000); - job["maxSilent"] = drv->queryMetaInt("maxSilent", 7200); - job["isChannel"] = drv->queryMetaBool("isHydraChannel", false); - - /* If this is an aggregate, then get its constituents. */ - auto a = v->attrs()->get(state.symbols.create("_hydraAggregate")); - if (a && state.forceBool(*a->value, a->pos, "while evaluating the `_hydraAggregate` attribute")) { - auto a = v->attrs()->get(state.symbols.create("constituents")); - if (!a) - state.error("derivation must have a ‘constituents’ attribute").debugThrow(); - - NixStringContext context; - state.coerceToString(a->pos, *a->value, context, "while evaluating the `constituents` attribute", true, false); - for (auto & c : context) - std::visit(overloaded { - [&](const NixStringContextElem::Built & b) { - job["constituents"].push_back(b.drvPath->to_string(*state.store)); - }, - [&](const NixStringContextElem::Opaque & o) { - }, - [&](const NixStringContextElem::DrvDeep & d) { - }, - }, c.raw); - - state.forceList(*a->value, a->pos, "while evaluating the `constituents` attribute"); - for (unsigned int n = 0; n < a->value->listSize(); ++n) { - auto v = a->value->listElems()[n]; - state.forceValue(*v, noPos); - if (v->type() == nString) - job["namedConstituents"].push_back(v->string_view()); - } - } - - /* Register the derivation as a GC root. !!! This - registers roots for jobs that we may have already - done. */ - auto localStore = state.store.dynamic_pointer_cast(); - if (gcRootsDir != "" && localStore) { - Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath)); - if (!pathExists(root)) - localStore->addPermRoot(localStore->parseStorePath(drvPath), root); - } - - nlohmann::json out; - for (auto & [outputName, optOutputPath] : outputs) { - if (optOutputPath) { - out[outputName] = state.store->printStorePath(*optOutputPath); - } else { - // See the `queryOutputs` call above; we should - // not encounter missing output paths otherwise. - assert(experimentalFeatureSettings.isEnabled(Xp::CaDerivations)); - out[outputName] = nullptr; - } - } - job["outputs"] = std::move(out); - reply["job"] = std::move(job); - } - - else if (v->type() == nAttrs) { - auto attrs = nlohmann::json::array(); - StringSet ss; - for (auto & i : v->attrs()->lexicographicOrder(state.symbols)) { - std::string name(state.symbols[i->name]); - if (name.find(' ') != std::string::npos) { - printError("skipping job with illegal name '%s'", name); - continue; - } - attrs.push_back(name); - } - reply["attrs"] = std::move(attrs); - } - - else if (v->type() == nNull) - ; - - else state.error("attribute '%s' is %s, which is not supported", attrPath, showType(*v)).debugThrow(); - - } catch (EvalError & e) { - auto msg = e.msg(); - // Transmits the error we got from the previous evaluation - // in the JSON output. - reply["error"] = filterANSIEscapes(msg, true); - // Don't forget to print it into the STDERR log, this is - // what's shown in the Hydra UI. - printError(msg); - } - - writeLine(to.get(), reply.dump()); - - /* If our RSS exceeds the maximum, exit. The master will - start a new process. */ - struct rusage r; - getrusage(RUSAGE_SELF, &r); - if ((size_t) r.ru_maxrss > maxMemorySize * 1024) break; - } - - writeLine(to.get(), "restart"); -} - -int main(int argc, char * * argv) -{ - /* Prevent undeclared dependencies in the evaluation via - $NIX_PATH. */ - unsetenv("NIX_PATH"); - - return handleExceptions(argv[0], [&]() { - - auto config = std::make_unique(); - - auto nrWorkers = config->getIntOption("evaluator_workers", 1); - maxMemorySize = config->getIntOption("evaluator_max_memory_size", 4096); - - initNix(); - initGC(); - - myArgs.parseCmdline(argvToStrings(argc, argv)); - - auto pureEval = config->getBoolOption("evaluator_pure_eval", myArgs.flake); - - /* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */ - settings.builders = ""; - - /* Prevent access to paths outside of the Nix search path and - to the environment. */ - evalSettings.restrictEval = true; - - /* When building a flake, use pure evaluation (no access to - 'getEnv', 'currentSystem' etc. */ - evalSettings.pureEval = pureEval; - - if (myArgs.dryRun) settings.readOnlyMode = true; - - if (myArgs.releaseExpr == "") throw UsageError("no expression specified"); - - if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified"); - - struct State - { - std::set todo{""}; - std::set active; - nlohmann::json jobs; - std::exception_ptr exc; - }; - - std::condition_variable wakeup; - - Sync state_; - - /* Start a handler thread per worker process. */ - auto handler = [&]() - { - pid_t pid = -1; - try { - AutoCloseFD from, to; - - while (true) { - - /* Start a new worker process if necessary. */ - if (pid == -1) { - Pipe toPipe, fromPipe; - toPipe.create(); - fromPipe.create(); - pid = startProcess( - [&, - to{std::make_shared(std::move(fromPipe.writeSide))}, - from{std::make_shared(std::move(toPipe.readSide))} - ]() - { - try { - EvalState state(myArgs.lookupPath, openStore()); - Bindings & autoArgs = *myArgs.getAutoArgs(state); - worker(state, autoArgs, *to, *from); - } catch (Error & e) { - nlohmann::json err; - auto msg = e.msg(); - err["error"] = filterANSIEscapes(msg, true); - printError(msg); - writeLine(to->get(), err.dump()); - // Don't forget to print it into the STDERR log, this is - // what's shown in the Hydra UI. - writeLine(to->get(), "restart"); - } - }, - ProcessOptions { .allowVfork = false }); - from = std::move(fromPipe.readSide); - to = std::move(toPipe.writeSide); - debug("created worker process %d", pid); - } - - /* Check whether the existing worker process is still there. */ - auto s = readLine(from.get()); - if (s == "restart") { - pid = -1; - continue; - } else if (s != "next") { - auto json = nlohmann::json::parse(s); - throw Error("worker error: %s", (std::string) json["error"]); - } - - /* Wait for a job name to become available. */ - std::string attrPath; - - while (true) { - checkInterrupt(); - auto state(state_.lock()); - if ((state->todo.empty() && state->active.empty()) || state->exc) { - writeLine(to.get(), "exit"); - return; - } - if (!state->todo.empty()) { - attrPath = *state->todo.begin(); - state->todo.erase(state->todo.begin()); - state->active.insert(attrPath); - break; - } else - state.wait(wakeup); - } - - /* Tell the worker to evaluate it. */ - writeLine(to.get(), "do " + attrPath); - - /* Wait for the response. */ - auto response = nlohmann::json::parse(readLine(from.get())); - - /* Handle the response. */ - StringSet newAttrs; - - if (response.find("job") != response.end()) { - auto state(state_.lock()); - state->jobs[attrPath] = response["job"]; - } - - if (response.find("attrs") != response.end()) { - for (auto & i : response["attrs"]) { - std::string path = i; - if (path.find(".") != std::string::npos){ - path = "\"" + path + "\""; - } - auto s = (attrPath.empty() ? "" : attrPath + ".") + (std::string) path; - newAttrs.insert(s); - } - } - - if (response.find("error") != response.end()) { - auto state(state_.lock()); - state->jobs[attrPath]["error"] = response["error"]; - } - - /* Add newly discovered job names to the queue. */ - { - auto state(state_.lock()); - state->active.erase(attrPath); - for (auto & s : newAttrs) - state->todo.insert(s); - wakeup.notify_all(); - } - } - } catch (...) { - check_pid_status_nonblocking(pid); - auto state(state_.lock()); - state->exc = std::current_exception(); - wakeup.notify_all(); - } - }; - - std::vector threads; - for (size_t i = 0; i < nrWorkers; i++) - threads.emplace_back(std::thread(handler)); - - for (auto & thread : threads) - thread.join(); - - auto state(state_.lock()); - - if (state->exc) - std::rethrow_exception(state->exc); - - /* For aggregate jobs that have named consistuents - (i.e. constituents that are a job name rather than a - derivation), look up the referenced job and add it to the - dependencies of the aggregate derivation. */ - auto store = openStore(); - - for (auto i = state->jobs.begin(); i != state->jobs.end(); ++i) { - auto jobName = i.key(); - auto & job = i.value(); - - auto named = job.find("namedConstituents"); - if (named == job.end()) continue; - - std::unordered_map brokenJobs; - auto getNonBrokenJobOrRecordError = [&brokenJobs, &jobName, &state]( - const std::string & childJobName) -> std::optional { - auto childJob = state->jobs.find(childJobName); - if (childJob == state->jobs.end()) { - printError("aggregate job '%s' references non-existent job '%s'", jobName, childJobName); - brokenJobs[childJobName] = "does not exist"; - return std::nullopt; - } - if (childJob->find("error") != childJob->end()) { - std::string error = (*childJob)["error"]; - printError("aggregate job '%s' references broken job '%s': %s", jobName, childJobName, error); - brokenJobs[childJobName] = error; - return std::nullopt; - } - return *childJob; - }; - - if (myArgs.dryRun) { - for (std::string jobName2 : *named) { - auto job2 = getNonBrokenJobOrRecordError(jobName2); - if (!job2) { - continue; - } - std::string drvPath2 = (*job2)["drvPath"]; - job["constituents"].push_back(drvPath2); - } - } else { - auto drvPath = store->parseStorePath((std::string) job["drvPath"]); - auto drv = store->readDerivation(drvPath); - - for (std::string jobName2 : *named) { - auto job2 = getNonBrokenJobOrRecordError(jobName2); - if (!job2) { - continue; - } - auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]); - auto drv2 = store->readDerivation(drvPath2); - job["constituents"].push_back(store->printStorePath(drvPath2)); - drv.inputDrvs.map[drvPath2].value = {drv2.outputs.begin()->first}; - } - - if (brokenJobs.empty()) { - std::string drvName(drvPath.name()); - assert(hasSuffix(drvName, drvExtension)); - drvName.resize(drvName.size() - drvExtension.size()); - - auto hashModulo = hashDerivationModulo(*store, drv, true); - if (hashModulo.kind != DrvHash::Kind::Regular) continue; - auto h = hashModulo.hashes.find("out"); - if (h == hashModulo.hashes.end()) continue; - auto outPath = store->makeOutputPath("out", h->second, drvName); - drv.env["out"] = store->printStorePath(outPath); - drv.outputs.insert_or_assign("out", DerivationOutput::InputAddressed { .path = outPath }); - auto newDrvPath = store->printStorePath(writeDerivation(*store, drv)); - - debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath); - - job["drvPath"] = newDrvPath; - job["outputs"]["out"] = store->printStorePath(outPath); - } - } - - job.erase("namedConstituents"); - - /* Register the derivation as a GC root. !!! This - registers roots for jobs that we may have already - done. */ - auto localStore = store.dynamic_pointer_cast(); - if (gcRootsDir != "" && localStore) { - auto drvPath = job["drvPath"].get(); - Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath)); - if (!pathExists(root)) - localStore->addPermRoot(localStore->parseStorePath(drvPath), root); - } - - if (!brokenJobs.empty()) { - std::stringstream ss; - for (const auto& [jobName, error] : brokenJobs) { - ss << jobName << ": " << error << "\n"; - } - job["error"] = ss.str(); - } - } - - std::cout << state->jobs.dump(2) << "\n"; - }); -} diff --git a/src/hydra-evaluator/Makefile.am b/src/hydra-evaluator/Makefile.am deleted file mode 100644 index 73638cfe..00000000 --- a/src/hydra-evaluator/Makefile.am +++ /dev/null @@ -1,5 +0,0 @@ -bin_PROGRAMS = hydra-evaluator - -hydra_evaluator_SOURCES = hydra-evaluator.cc -hydra_evaluator_LDADD = $(NIX_LIBS) -lpqxx -hydra_evaluator_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations diff --git a/src/hydra-evaluator/meson.build b/src/hydra-evaluator/meson.build new file mode 100644 index 00000000..b3dc542e --- /dev/null +++ b/src/hydra-evaluator/meson.build @@ -0,0 +1,9 @@ +hydra_evaluator = executable('hydra-evaluator', + 'hydra-evaluator.cc', + dependencies: [ + libhydra_dep, + nix_dep, + pqxx_dep, + ], + install: true, +) diff --git a/src/hydra-queue-runner/Makefile.am b/src/hydra-queue-runner/Makefile.am deleted file mode 100644 index 117112f6..00000000 --- a/src/hydra-queue-runner/Makefile.am +++ /dev/null @@ -1,8 +0,0 @@ -bin_PROGRAMS = hydra-queue-runner - -hydra_queue_runner_SOURCES = hydra-queue-runner.cc queue-monitor.cc dispatcher.cc \ - builder.cc build-result.cc build-remote.cc \ - hydra-build-result.hh counter.hh state.hh db.hh \ - nar-extractor.cc nar-extractor.hh -hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx -lprometheus-cpp-pull -lprometheus-cpp-core -hydra_queue_runner_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations diff --git a/src/hydra-queue-runner/dispatcher.cc b/src/hydra-queue-runner/dispatcher.cc index 0f5a65e7..cbf982bf 100644 --- a/src/hydra-queue-runner/dispatcher.cc +++ b/src/hydra-queue-runner/dispatcher.cc @@ -2,6 +2,7 @@ #include #include #include +#include #include "state.hh" diff --git a/src/hydra-queue-runner/meson.build b/src/hydra-queue-runner/meson.build new file mode 100644 index 00000000..1c20299f --- /dev/null +++ b/src/hydra-queue-runner/meson.build @@ -0,0 +1,22 @@ +srcs = files( + 'builder.cc', + 'build-remote.cc', + 'build-result.cc', + 'dispatcher.cc', + 'hydra-queue-runner.cc', + 'nar-extractor.cc', + 'queue-monitor.cc', +) + +hydra_queue_runner = executable('hydra-queue-runner', + 'hydra-queue-runner.cc', + srcs, + dependencies: [ + libhydra_dep, + nix_dep, + pqxx_dep, + prom_cpp_core_dep, + prom_cpp_pull_dep, + ], + install: true, +) diff --git a/src/hydra-queue-runner/nar-extractor.cc b/src/hydra-queue-runner/nar-extractor.cc index 61299ecd..8729dd39 100644 --- a/src/hydra-queue-runner/nar-extractor.cc +++ b/src/hydra-queue-runner/nar-extractor.cc @@ -54,32 +54,40 @@ struct Extractor : FileSystemObjectSink }; NarMemberDatas & members; - Path prefix; + std::filesystem::path prefix; + + Path toKey(const CanonPath & path) + { + std::filesystem::path p = prefix; + // Conditional to avoid trailing slash + if (!path.isRoot()) p /= path.rel(); + return p; + } Extractor(NarMemberDatas & members, const Path & prefix) : members(members), prefix(prefix) { } - void createDirectory(const Path & path) override + void createDirectory(const CanonPath & path) override { - members.insert_or_assign(prefix + path, NarMemberData { .type = SourceAccessor::Type::tDirectory }); + members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tDirectory }); } - void createRegularFile(const Path & path, std::function func) override + void createRegularFile(const CanonPath & path, std::function func) override { NarMemberConstructor nmc { - members.insert_or_assign(prefix + path, NarMemberData { + members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tRegular, .fileSize = 0, - .contents = filesToKeep.count(path) ? std::optional("") : std::nullopt, + .contents = filesToKeep.count(path.abs()) ? std::optional("") : std::nullopt, }).first->second, }; func(nmc); } - void createSymlink(const Path & path, const std::string & target) override + void createSymlink(const CanonPath & path, const std::string & target) override { - members.insert_or_assign(prefix + path, NarMemberData { .type = SourceAccessor::Type::tSymlink }); + members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tSymlink }); } }; diff --git a/src/hydra-queue-runner/state.hh b/src/hydra-queue-runner/state.hh index e2fb0c9c..30e01c74 100644 --- a/src/hydra-queue-runner/state.hh +++ b/src/hydra-queue-runner/state.hh @@ -22,6 +22,7 @@ #include "nar-extractor.hh" #include "serve-protocol.hh" #include "serve-protocol-impl.hh" +#include "serve-protocol-connection.hh" #include "machines.hh" diff --git a/src/lib/Hydra/Config.pm b/src/lib/Hydra/Config.pm index af686fca..6aae5a5e 100644 --- a/src/lib/Hydra/Config.pm +++ b/src/lib/Hydra/Config.pm @@ -95,6 +95,7 @@ sub get_legacy_ldap_config { "hydra_bump-to-front" => [ "bump-to-front" ], "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], + "hydra_eval-jobset" => [ "eval-jobset" ], "hydra_restart-jobs" => [ "restart-jobs" ], }, }; @@ -159,6 +160,7 @@ sub valid_roles { "bump-to-front", "cancel-build", "create-projects", + "eval-jobset", "restart-jobs", ]; } diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 8ebed599..9f8b7cba 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -239,6 +239,8 @@ sub triggerJobset { sub push : Chained('api') PathPart('push') Args(0) { my ($self, $c) = @_; + requirePost($c); + $c->{stash}->{json}->{jobsetsTriggered} = []; my $force = exists $c->request->query_params->{force}; @@ -246,19 +248,24 @@ sub push : Chained('api') PathPart('push') Args(0) { foreach my $s (@jobsets) { my ($p, $j) = parseJobsetName($s); my $jobset = $c->model('DB::Jobsets')->find($p, $j); + requireEvalJobsetPrivileges($c, $jobset->project); next unless defined $jobset && ($force || ($jobset->project->enabled && $jobset->enabled)); triggerJobset($self, $c, $jobset, $force); } my @repos = split /,/, ($c->request->query_params->{repos} // ""); foreach my $r (@repos) { - triggerJobset($self, $c, $_, $force) foreach $c->model('DB::Jobsets')->search( + my @jobsets = $c->model('DB::Jobsets')->search( { 'project.enabled' => 1, 'me.enabled' => 1 }, { join => 'project', where => \ [ 'exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value = ?)', [ 'value', $r ] ], order_by => 'me.id DESC' }); + foreach my $jobset (@jobsets) { + requireEvalJobsetPrivileges($c, $jobset->project); + triggerJobset($self, $c, $jobset, $force) + } } $self->status_ok( @@ -285,6 +292,23 @@ sub push_github : Chained('api') PathPart('push-github') Args(0) { $c->response->body(""); } +sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) { + my ($self, $c) = @_; + + $c->{stash}->{json}->{jobsetsTriggered} = []; + + my $in = $c->request->{data}; + my $url = $in->{repository}->{clone_url} or die; + $url =~ s/.git$//; + print STDERR "got push from Gitea repository $url\n"; + + triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search( + { 'project.enabled' => 1, 'me.enabled' => 1 }, + { join => 'project' + , where => \ [ 'me.flake like ? or exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value like ?)', [ 'flake', "%$url%"], [ 'value', "%$url%" ] ] + }); + $c->response->body(""); +} 1; diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index 406dc52e..a231d7c0 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -35,6 +35,7 @@ sub noLoginNeeded { return $whitelisted || $c->request->path eq "api/push-github" || + $c->request->path eq "api/push-gitea" || $c->request->path eq "google-login" || $c->request->path eq "github-redirect" || $c->request->path eq "github-login" || @@ -50,6 +51,7 @@ sub begin :Private { $c->stash->{curUri} = $c->request->uri; $c->stash->{version} = $ENV{"HYDRA_RELEASE"} || ""; $c->stash->{nixVersion} = $ENV{"NIX_RELEASE"} || ""; + $c->stash->{nixEvalJobsVersion} = $ENV{"NIX_EVAL_JOBS_RELEASE"} || ""; $c->stash->{curTime} = time; $c->stash->{logo} = defined $c->config->{hydra_logo} ? "/logo" : ""; $c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : ""; @@ -80,7 +82,7 @@ sub begin :Private { $_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins}; # XSRF protection: require POST requests to have the same origin. - if ($c->req->method eq "POST" && $c->req->path ne "api/push-github") { + if ($c->req->method eq "POST" && $c->req->path ne "api/push-github" && $c->req->path ne "api/push-gitea") { my $referer = $c->req->header('Referer'); $referer //= $c->req->header('Origin'); my $base = $c->req->base; @@ -329,7 +331,7 @@ sub nar :Local :Args(1) { else { $path = $Nix::Config::storeDir . "/$path"; - gone($c, "Path " . $path . " is no longer available.") unless isValidPath($path); + gone($c, "Path " . $path . " is no longer available.") unless $MACHINE_LOCAL_STORE->isValidPath($path); $c->stash->{current_view} = 'NixNAR'; $c->stash->{storePath} = $path; diff --git a/src/lib/Hydra/Helper/CatalystUtils.pm b/src/lib/Hydra/Helper/CatalystUtils.pm index 2a2ad86f..6ccdbc4d 100644 --- a/src/lib/Hydra/Helper/CatalystUtils.pm +++ b/src/lib/Hydra/Helper/CatalystUtils.pm @@ -15,6 +15,7 @@ our @EXPORT = qw( forceLogin requireUser requireProjectOwner requireRestartPrivileges requireAdmin requirePost isAdmin isProjectOwner requireBumpPrivileges requireCancelBuildPrivileges + requireEvalJobsetPrivileges trim getLatestFinishedEval getFirstEval paramToList @@ -186,6 +187,27 @@ sub isProjectOwner { defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username })); } +sub hasEvalJobsetRole { + my ($c) = @_; + return $c->user_exists && $c->check_user_roles("eval-jobset"); +} + +sub mayEvalJobset { + my ($c, $project) = @_; + return + $c->user_exists && + (isAdmin($c) || + hasEvalJobsetRole($c) || + isProjectOwner($c, $project)); +} + +sub requireEvalJobsetPrivileges { + my ($c, $project) = @_; + requireUser($c); + accessDenied($c, "Only the project members, administrators, and accounts with eval-jobset privileges can perform this operation.") + unless mayEvalJobset($c, $project); +} + sub hasCancelBuildRole { my ($c) = @_; return $c->user_exists && $c->check_user_roles('cancel-build'); @@ -272,7 +294,7 @@ sub requireAdmin { sub requirePost { my ($c) = @_; - error($c, "Request must be POSTed.") if $c->request->method ne "POST"; + error($c, "Request must be POSTed.", 405) if $c->request->method ne "POST"; } diff --git a/src/lib/Hydra/Helper/Nix.pm b/src/lib/Hydra/Helper/Nix.pm index 16637b2b..bff7a5ed 100644 --- a/src/lib/Hydra/Helper/Nix.pm +++ b/src/lib/Hydra/Helper/Nix.pm @@ -174,6 +174,9 @@ sub getDrvLogPath { for ($fn . $bucketed, $fn . $bucketed . ".bz2") { return $_ if -f $_; } + for ($fn . $bucketed, $fn . $bucketed . ".zst") { + return $_ if -f $_; + } return undef; } diff --git a/src/lib/Hydra/Plugin/CompressLog.pm b/src/lib/Hydra/Plugin/CompressLog.pm index 10e8f6cc..fe4d33b0 100644 --- a/src/lib/Hydra/Plugin/CompressLog.pm +++ b/src/lib/Hydra/Plugin/CompressLog.pm @@ -9,11 +9,24 @@ use Hydra::Helper::CatalystUtils; sub stepFinished { my ($self, $step, $logPath) = @_; - my $doCompress = $self->{config}->{'compress_build_logs'} // "1"; + my $doCompress = $self->{config}->{'compress_build_logs'} // '1'; + my $silent = $self->{config}->{'compress_build_logs_silent'} // '0'; + my $compression = $self->{config}->{'compress_build_logs_compression'} // 'bzip2'; - if ($doCompress eq "1" && -e $logPath) { - print STDERR "compressing ‘$logPath’...\n"; - system("bzip2", "--force", $logPath); + if (not -e $logPath or $doCompress ne "1") { + return; + } + + if ($silent ne '1') { + print STDERR "compressing '$logPath' with $compression...\n"; + } + + if ($compression eq 'bzip2') { + system('bzip2', '--force', $logPath); + } elsif ($compression eq 'zstd') { + system('zstd', '--rm', '--quiet', '-T0', $logPath); + } else { + print STDERR "unknown compression type '$compression'\n"; } } diff --git a/src/lib/Hydra/Plugin/S3Backup.pm b/src/lib/Hydra/Plugin/S3Backup.pm index 98e79747..f1f50754 100644 --- a/src/lib/Hydra/Plugin/S3Backup.pm +++ b/src/lib/Hydra/Plugin/S3Backup.pm @@ -14,6 +14,7 @@ use Nix::Config; use Nix::Store; use Hydra::Model::DB; use Hydra::Helper::CatalystUtils; +use Hydra::Helper::Nix; sub isEnabled { my ($self) = @_; @@ -92,7 +93,7 @@ sub buildFinished { my $hash = substr basename($path), 0, 32; my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0); my $system; - if (defined $deriver and isValidPath($deriver)) { + if (defined $deriver and $MACHINE_LOCAL_STORE->isValidPath($deriver)) { $system = derivationFromPath($deriver)->{platform}; } foreach my $reference (@{$refs}) { diff --git a/src/lib/Hydra/Plugin/SubversionInput.pm b/src/lib/Hydra/Plugin/SubversionInput.pm index 83c1f39d..d3579c40 100644 --- a/src/lib/Hydra/Plugin/SubversionInput.pm +++ b/src/lib/Hydra/Plugin/SubversionInput.pm @@ -46,7 +46,7 @@ sub fetchInput { $MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput; - if (defined $cachedInput && isValidPath($cachedInput->storepath)) { + if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) { $storePath = $cachedInput->storepath; $sha256 = $cachedInput->sha256hash; } else { diff --git a/src/lib/Hydra/View/NARInfo.pm b/src/lib/Hydra/View/NARInfo.pm index bf8711a4..801fc06a 100644 --- a/src/lib/Hydra/View/NARInfo.pm +++ b/src/lib/Hydra/View/NARInfo.pm @@ -6,6 +6,8 @@ use File::Basename; use Hydra::Helper::CatalystUtils; use MIME::Base64; use Nix::Manifest; +use Nix::Store; +use Nix::Utils; use Hydra::Helper::Nix; use base qw/Catalyst::View/; diff --git a/src/lib/Hydra/View/NixLog.pm b/src/lib/Hydra/View/NixLog.pm index 7f37ae78..fe37d900 100644 --- a/src/lib/Hydra/View/NixLog.pm +++ b/src/lib/Hydra/View/NixLog.pm @@ -16,7 +16,10 @@ sub process { my $tail = int($c->stash->{tail} // "0"); - if ($logPath =~ /\.bz2$/) { + if ($logPath =~ /\.zst$/) { + my $doTail = $tail ? "| tail -n '$tail'" : ""; + open($fh, "-|", "zstd -dc < '$logPath' $doTail") or die; + } elsif ($logPath =~ /\.bz2$/) { my $doTail = $tail ? "| tail -n '$tail'" : ""; open($fh, "-|", "bzip2 -dc < '$logPath' $doTail") or die; } else { diff --git a/src/lib/Makefile.am b/src/lib/Makefile.am deleted file mode 100644 index 434868e0..00000000 --- a/src/lib/Makefile.am +++ /dev/null @@ -1,22 +0,0 @@ -PERL_MODULES = \ - $(wildcard *.pm) \ - $(wildcard Hydra/*.pm) \ - $(wildcard Hydra/Helper/*.pm) \ - $(wildcard Hydra/Model/*.pm) \ - $(wildcard Hydra/View/*.pm) \ - $(wildcard Hydra/Schema/*.pm) \ - $(wildcard Hydra/Schema/Result/*.pm) \ - $(wildcard Hydra/Schema/ResultSet/*.pm) \ - $(wildcard Hydra/Controller/*.pm) \ - $(wildcard Hydra/Base/*.pm) \ - $(wildcard Hydra/Base/Controller/*.pm) \ - $(wildcard Hydra/Script/*.pm) \ - $(wildcard Hydra/Component/*.pm) \ - $(wildcard Hydra/Event/*.pm) \ - $(wildcard Hydra/Plugin/*.pm) - -EXTRA_DIST = \ - $(PERL_MODULES) - -hydradir = $(libexecdir)/hydra/lib -nobase_hydra_DATA = $(PERL_MODULES) diff --git a/src/libhydra/meson.build b/src/libhydra/meson.build new file mode 100644 index 00000000..1866233c --- /dev/null +++ b/src/libhydra/meson.build @@ -0,0 +1,5 @@ +libhydra_inc = include_directories('.') + +libhydra_dep = declare_dependency( + include_directories: [libhydra_inc], +) diff --git a/src/meson.build b/src/meson.build new file mode 100644 index 00000000..52b821bc --- /dev/null +++ b/src/meson.build @@ -0,0 +1,85 @@ +# Native code +subdir('libhydra') +subdir('hydra-evaluator') +subdir('hydra-queue-runner') + +hydra_libexecdir = get_option('libexecdir') / 'hydra' + +# Data and interpreted +foreach dir : ['lib', 'root'] + install_subdir(dir, + install_dir: hydra_libexecdir, + ) +endforeach +subdir('sql') +subdir('ttf') + +# Static files for website + +hydra_libexecdir_static = hydra_libexecdir / 'root' / 'static' + +## Bootstrap + +bootstrap_name = 'bootstrap-4.3.1-dist' +bootstrap = custom_target( + 'extract-bootstrap', + input: 'root' / (bootstrap_name + '.zip'), + output: bootstrap_name, + command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'], +) +custom_target( + 'name-bootstrap', + input: bootstrap, + output: 'bootstrap', + command: ['cp', '-r', '@INPUT@' , '@OUTPUT@'], + install: true, + install_dir: hydra_libexecdir_static, +) + +## Flot + +custom_target( + 'extract-flot', + input: 'root' / 'flot-0.8.3.zip', + output: 'flot', + command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'], + install: true, + install_dir: hydra_libexecdir_static / 'js', +) + +## Fontawesome + +fontawesome_name = 'fontawesome-free-5.10.2-web' +fontawesome = custom_target( + 'extract-fontawesome', + input: 'root' / (fontawesome_name + '.zip'), + output: fontawesome_name, + command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'], +) +custom_target( + 'name-fontawesome-css', + input: fontawesome, + output: 'css', + command: ['cp', '-r', '@INPUT@/css', '@OUTPUT@'], + install: true, + install_dir: hydra_libexecdir_static / 'fontawesome', +) +custom_target( + 'name-fontawesome-webfonts', + input: fontawesome, + output: 'webfonts', + command: ['cp', '-r', '@INPUT@/webfonts', '@OUTPUT@'], + install: true, + install_dir: hydra_libexecdir_static / 'fontawesome', +) + +# Scripts + +install_subdir('script', + install_dir: get_option('bindir'), + exclude_files: [ + 'hydra-dev-server', + ], + install_mode: 'rwxr-xr-x', + strip_directory: true, +) diff --git a/src/root/Makefile.am b/src/root/Makefile.am deleted file mode 100644 index 163b96e0..00000000 --- a/src/root/Makefile.am +++ /dev/null @@ -1,39 +0,0 @@ -TEMPLATES = $(wildcard *.tt) -STATIC = \ - $(wildcard static/images/*) \ - $(wildcard static/css/*) \ - static/js/bootbox.min.js \ - static/js/popper.min.js \ - static/js/common.js \ - static/js/jquery/jquery-3.4.1.min.js \ - static/js/jquery/jquery-ui-1.10.4.min.js - -FLOT = flot-0.8.3.zip -BOOTSTRAP = bootstrap-4.3.1-dist.zip -FONTAWESOME = fontawesome-free-5.10.2-web.zip - -ZIPS = $(FLOT) $(BOOTSTRAP) $(FONTAWESOME) - -EXTRA_DIST = $(TEMPLATES) $(STATIC) $(ZIPS) - -hydradir = $(libexecdir)/hydra/root -nobase_hydra_DATA = $(EXTRA_DIST) - -all: - mkdir -p $(srcdir)/static/js - unzip -u -d $(srcdir)/static $(BOOTSTRAP) - rm -rf $(srcdir)/static/bootstrap - mv $(srcdir)/static/$(basename $(BOOTSTRAP)) $(srcdir)/static/bootstrap - unzip -u -d $(srcdir)/static/js $(FLOT) - unzip -u -d $(srcdir)/static $(FONTAWESOME) - rm -rf $(srcdir)/static/fontawesome - mv $(srcdir)/static/$(basename $(FONTAWESOME)) $(srcdir)/static/fontawesome - -install-data-local: $(ZIPS) - mkdir -p $(hydradir)/static/js - cp -prvd $(srcdir)/static/js/* $(hydradir)/static/js - mkdir -p $(hydradir)/static/bootstrap - cp -prvd $(srcdir)/static/bootstrap/* $(hydradir)/static/bootstrap - mkdir -p $(hydradir)/static/fontawesome/{css,webfonts} - cp -prvd $(srcdir)/static/fontawesome/css/* $(hydradir)/static/fontawesome/css - cp -prvd $(srcdir)/static/fontawesome/webfonts/* $(hydradir)/static/fontawesome/webfonts diff --git a/src/root/common.tt b/src/root/common.tt index 4487cbe3..869d8856 100644 --- a/src/root/common.tt +++ b/src/root/common.tt @@ -374,7 +374,7 @@ BLOCK renderInputDiff; %] [% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %] [% IF bi1.type == "git" %] - [% bi1.name %][% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 8) _ ' to ' _ bi2.revision.substr(0, 8)) %] + [% bi1.name %][% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %] [% ELSE %] diff --git a/src/root/jobset.tt b/src/root/jobset.tt index 56abdb50..5d8345f9 100644 --- a/src/root/jobset.tt +++ b/src/root/jobset.tt @@ -205,6 +205,7 @@ if (!c) return; requestJSON({ url: "[% HTML.escape(c.uri_for('/api/push', { jobsets = project.name _ ':' _ jobset.name, force = "1" })) %]", + type: 'POST', success: function(data) { bootbox.alert("The jobset has been scheduled for evaluation."); } diff --git a/src/root/layout.tt b/src/root/layout.tt index d67ff1b8..399962b4 100644 --- a/src/root/layout.tt +++ b/src/root/layout.tt @@ -93,7 +93,7 @@