Merge branch 'master' into nix-next

This commit is contained in:
John Ericson 2025-02-13 17:54:15 -05:00
commit af9b0663f2
85 changed files with 1079 additions and 1206 deletions

42
.gitignore vendored
View File

@ -1,48 +1,8 @@
/.pls_cache
*.o
*~ *~
Makefile .test_info.*
Makefile.in
.deps
.hydra-data
/config.guess
/config.log
/config.status
/config.sub
/configure
/depcomp
/libtool
/ltmain.sh
/autom4te.cache
/aclocal.m4
/missing
/install-sh
/src/sql/hydra-postgresql.sql /src/sql/hydra-postgresql.sql
/src/sql/hydra-sqlite.sql /src/sql/hydra-sqlite.sql
/src/sql/tmp.sqlite /src/sql/tmp.sqlite
/src/hydra-eval-jobs/hydra-eval-jobs
/src/root/static/bootstrap
/src/root/static/js/flot
/tests
/doc/manual/images
/doc/manual/manual.html
/doc/manual/manual.pdf
/t/.bzr*
/t/.git*
/t/.hg*
/t/nix
/t/data
/t/jobs/config.nix
t/jobs/declarative/project.json
/inst
hydra-config.h
hydra-config.h.in
result result
result-* result-*
outputs outputs
config
stamp-h1
src/hydra-evaluator/hydra-evaluator
src/hydra-queue-runner/hydra-queue-runner
src/root/static/fontawesome/
src/root/static/bootstrap*/

View File

@ -1,2 +0,0 @@
[test]
-I=rel(t/lib)

View File

@ -1,12 +0,0 @@
SUBDIRS = src doc
if CAN_DO_CHECK
SUBDIRS += t
endif
BOOTCLEAN_SUBDIRS = $(SUBDIRS)
DIST_SUBDIRS = $(SUBDIRS)
EXTRA_DIST = nixos-modules/hydra.nix
install-data-local: nixos-modules/hydra.nix
$(INSTALL) -d $(DESTDIR)$(datadir)/nix
$(INSTALL_DATA) nixos-modules/hydra.nix $(DESTDIR)$(datadir)/nix/hydra-module.nix

View File

@ -39,16 +39,16 @@ In order to evaluate and build anything you need to create _projects_ that conta
#### Creating A Project #### Creating A Project
Log in as administrator, click "_Admin_" and select "_Create project_". Fill the form as follows: Log in as administrator, click "_Admin_" and select "_Create project_". Fill the form as follows:
- **Identifier**: `hello` - **Identifier**: `hello-project`
- **Display name**: `hello` - **Display name**: `hello`
- **Description**: `hello project` - **Description**: `hello project`
Click "_Create project_". Click "_Create project_".
#### Creating A Jobset #### Creating A Jobset
After creating a project you are forwarded to the project page. Click "_Actions_" and choose "_Create jobset_". Fill the form with the following values: After creating a project you are forwarded to the project page. Click "_Actions_" and choose "_Create jobset_". Change **Type** to Legacy for the example below. Fill the form with the following values:
- **Identifier**: `hello` - **Identifier**: `hello-project`
- **Nix expression**: `examples/hello.nix` in `hydra` - **Nix expression**: `examples/hello.nix` in `hydra`
- **Check interval**: 60 - **Check interval**: 60
- **Scheduling shares**: 1 - **Scheduling shares**: 1
@ -57,7 +57,7 @@ We have to add two inputs for this jobset. One for _nixpkgs_ and one for _hydra_
- **Input name**: `nixpkgs` - **Input name**: `nixpkgs`
- **Type**: `Git checkout` - **Type**: `Git checkout`
- **Value**: `https://github.com/nixos/nixpkgs-channels nixos-20.03` - **Value**: `https://github.com/NixOS/nixpkgs nixos-24.05`
- **Input name**: `hydra` - **Input name**: `hydra`
- **Type**: `Git checkout` - **Type**: `Git checkout`

View File

@ -1,91 +0,0 @@
AC_INIT([Hydra], [m4_esyscmd([echo -n $(cat ./version.txt)$VERSION_SUFFIX])])
AC_CONFIG_AUX_DIR(config)
AM_INIT_AUTOMAKE([foreign serial-tests])
AC_LANG([C++])
AC_PROG_CC
AC_PROG_INSTALL
AC_PROG_LN_S
AC_PROG_LIBTOOL
AC_PROG_CXX
AC_PATH_PROG([XSLTPROC], [xsltproc])
AC_ARG_WITH([docbook-xsl],
[AS_HELP_STRING([--with-docbook-xsl=PATH],
[path of the DocBook XSL stylesheets])],
[docbookxsl="$withval"],
[docbookxsl="/docbook-xsl-missing"])
AC_SUBST([docbookxsl])
AC_DEFUN([NEED_PROG],
[
AC_PATH_PROG($1, $2)
if test -z "$$1"; then
AC_MSG_ERROR([$2 is required])
fi
])
NEED_PROG(perl, perl)
NEED_PROG([NIX_STORE_PROGRAM], [nix-store])
AC_MSG_CHECKING([whether $NIX_STORE_PROGRAM is recent enough])
if test -n "$NIX_STORE" -a -n "$TMPDIR"
then
# This may be executed from within a build chroot, so pacify
# `nix-store' instead of letting it choke while trying to mkdir
# /nix/var.
NIX_STATE_DIR="$TMPDIR"
export NIX_STATE_DIR
fi
if NIX_REMOTE=daemon PAGER=cat "$NIX_STORE_PROGRAM" --timeout 123 -q; then
AC_MSG_RESULT([yes])
else
AC_MSG_RESULT([no])
AC_MSG_ERROR([`$NIX_STORE_PROGRAM' doesn't support `--timeout'; please use a newer version.])
fi
PKG_CHECK_MODULES([NIX], [nix-main nix-expr nix-store])
testPath="$(dirname $(type -p expr))"
AC_SUBST(testPath)
CXXFLAGS+=" -include nix/config.h"
AC_CONFIG_FILES([
Makefile
doc/Makefile
doc/manual/Makefile
src/Makefile
src/hydra-evaluator/Makefile
src/hydra-eval-jobs/Makefile
src/hydra-queue-runner/Makefile
src/sql/Makefile
src/ttf/Makefile
src/lib/Makefile
src/root/Makefile
src/script/Makefile
])
# Tests might be filtered out
AM_CONDITIONAL([CAN_DO_CHECK], [test -f "$srcdir/t/api-test.t"])
AM_COND_IF(
[CAN_DO_CHECK],
[
jobsPath="$(realpath ./t/jobs)"
AC_SUBST(jobsPath)
AC_CONFIG_FILES([
t/Makefile
t/jobs/config.nix
t/jobs/declarative/project.json
])
])
AC_CONFIG_COMMANDS([executable-scripts], [])
AC_CONFIG_HEADER([hydra-config.h])
AC_OUTPUT

View File

@ -1,6 +1,6 @@
# The `default.nix` in flake-compat reads `flake.nix` and `flake.lock` from `src` and # The `default.nix` in flake-compat reads `flake.nix` and `flake.lock` from `src` and
# returns an attribute set of the shape `{ defaultNix, shellNix }` # returns an attribute set of the shape `{ defaultNix, shellNix }`
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) { (import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
src = ./.; src = ./.;
}).defaultNix }).defaultNix

View File

@ -1,4 +0,0 @@
SUBDIRS = manual
BOOTCLEAN_SUBDIRS = $(SUBDIRS)
DIST_SUBDIRS = $(SUBDIRS)

View File

@ -1,6 +0,0 @@
MD_FILES = src/*.md
EXTRA_DIST = $(MD_FILES)
install: $(MD_FILES)
mdbook build . -d $(docdir)

36
doc/manual/meson.build Normal file
View File

@ -0,0 +1,36 @@
srcs = files(
'src/SUMMARY.md',
'src/about.md',
'src/api.md',
'src/configuration.md',
'src/hacking.md',
'src/installation.md',
'src/introduction.md',
'src/jobs.md',
'src/monitoring/README.md',
'src/notifications.md',
'src/plugins/README.md',
'src/plugins/RunCommand.md',
'src/plugins/declarative-projects.md',
'src/projects.md',
'src/webhooks.md',
)
manual = custom_target(
'manual',
command: [
mdbook,
'build',
'@SOURCE_ROOT@/doc/manual',
'-d', meson.current_build_dir() / 'html'
],
depend_files: srcs,
output: ['html'],
build_by_default: true,
)
install_subdir(
manual.full_path(),
install_dir: get_option('datadir') / 'doc/hydra',
strip_directory: true,
)

View File

@ -208,7 +208,8 @@ Example configuration:
<role_mapping> <role_mapping>
# Make all users in the hydra_admin group Hydra admins # Make all users in the hydra_admin group Hydra admins
hydra_admin = admin hydra_admin = admin
# Allow all users in the dev group to restart jobs and cancel builds # Allow all users in the dev group to eval jobsets, restart jobs and cancel builds
dev = eval-jobset
dev = restart-jobs dev = restart-jobs
dev = cancel-build dev = cancel-build
</role_mapping> </role_mapping>

View File

@ -15,12 +15,18 @@ and dependencies can be found:
$ nix-shell $ nix-shell
``` ```
of when flakes are enabled:
```console
$ nix develop
```
To build Hydra, you should then do: To build Hydra, you should then do:
```console ```console
[nix-shell]$ autoreconfPhase [nix-shell]$ autoreconfPhase
[nix-shell]$ configurePhase [nix-shell]$ configurePhase
[nix-shell]$ make [nix-shell]$ make -j$(nproc)
``` ```
You start a local database, the webserver, and other components with You start a local database, the webserver, and other components with
@ -30,6 +36,8 @@ foreman:
$ foreman start $ foreman start
``` ```
The Hydra interface will be available on port 63333, with an admin user named "alice" with password "foobar"
You can run just the Hydra web server in your source tree as follows: You can run just the Hydra web server in your source tree as follows:
```console ```console

View File

@ -42,7 +42,7 @@ Sets CircleCI status.
## Compress build logs ## Compress build logs
Compresses build logs after a build with bzip2. Compresses build logs after a build with bzip2 or zstd.
### Configuration options ### Configuration options
@ -50,6 +50,14 @@ Compresses build logs after a build with bzip2.
Enable log compression Enable log compression
- `compress_build_logs_compression`
Which compression format to use. Valid values are bzip2 (default) and zstd.
- `compress_build_logs_silent`
Whether to compress logs silently.
### Example ### Example
```xml ```xml

View File

@ -1,9 +1,12 @@
# Webhooks # Webhooks
Hydra can be notified by github's webhook to trigger a new evaluation when a Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a
jobset has a github repo in its input. jobset has a github repo in its input.
To set up a github webhook go to `https://github.com/<yourhandle>/<yourrepo>/settings` and in the `Webhooks` tab
click on `Add webhook`. ## GitHub
To set up a webhook for a GitHub repository go to `https://github.com/<yourhandle>/<yourrepo>/settings`
and in the `Webhooks` tab click on `Add webhook`.
- In `Payload URL` fill in `https://<your-hydra-domain>/api/push-github`. - In `Payload URL` fill in `https://<your-hydra-domain>/api/push-github`.
- In `Content type` switch to `application/json`. - In `Content type` switch to `application/json`.
@ -11,3 +14,14 @@ click on `Add webhook`.
- For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`. - For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`.
Then add the hook with `Add webhook`. Then add the hook with `Add webhook`.
## Gitea
To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance
and in the `Webhooks` tab click on `Add Webhook` and choose `Gitea` in the drop down.
- In `Target URL` fill in `https://<your-hydra-domain>/api/push-gitea`.
- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`.
- Change the branch filter to match the git branch hydra builds.
Then add the hook with `Add webhook`.

View File

@ -1,5 +1,5 @@
# #
# jobset example file. This file canbe referenced as Nix expression # jobset example file. This file can be referenced as Nix expression
# in a jobset configuration along with inputs for nixpkgs and the # in a jobset configuration along with inputs for nixpkgs and the
# repository containing this file. # repository containing this file.
# #

169
flake.lock generated
View File

@ -1,34 +1,18 @@
{ {
"nodes": { "nodes": {
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-parts": { "flake-parts": {
"inputs": { "inputs": {
"nixpkgs-lib": [ "nixpkgs-lib": [
"nix", "nix-eval-jobs",
"nixpkgs" "nixpkgs"
] ]
}, },
"locked": { "locked": {
"lastModified": 1712014858, "lastModified": 1722555600,
"narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=", "narHash": "sha256-XOQkdLafnb/p9ij77byFQjDf5m5QYl9b2REiVClC+x4=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "9126214d0a59633752a136528f5f3b9aa8565b7d", "rev": "8471fe90ad337a8074e957b69ca4d0089218391d",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -37,131 +21,120 @@
"type": "github" "type": "github"
} }
}, },
"flake-utils": {
"locked": {
"lastModified": 1667395993,
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"libgit2": { "libgit2": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1697646580, "lastModified": 1715853528,
"narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=", "narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=",
"owner": "libgit2", "owner": "libgit2",
"repo": "libgit2", "repo": "libgit2",
"rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5", "rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "libgit2", "owner": "libgit2",
"ref": "v1.8.1",
"repo": "libgit2", "repo": "libgit2",
"type": "github" "type": "github"
} }
}, },
"nix": { "nix": {
"inputs": { "inputs": {
"flake-compat": "flake-compat", "flake-compat": [],
"flake-parts": "flake-parts", "flake-parts": [],
"libgit2": "libgit2", "git-hooks-nix": [],
"libgit2": [
"libgit2"
],
"nixpkgs": [ "nixpkgs": [
"nixpkgs" "nixpkgs"
], ],
"nixpkgs-regression": "nixpkgs-regression", "nixpkgs-23-11": [],
"pre-commit-hooks": "pre-commit-hooks" "nixpkgs-regression": []
}, },
"locked": { "locked": {
"lastModified": 1717016009, "lastModified": 1726787955,
"narHash": "sha256-RJ0k7fvKjuUGt439F9uFtUw44SM87A+XKBf6sH6qPy8=", "narHash": "sha256-XFznzb8L4SdUm9u+w3DPpMWJhffuv+/6+aiVl00slns=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nix", "repo": "nix",
"rev": "ef5c846e257e1e284ad47ed6be4308d190fe6531", "rev": "a7fdef6858dd45b9d7bda7c92324c63faee7f509",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "2.24-maintenance",
"repo": "nix", "repo": "nix",
"type": "github" "type": "github"
} }
}, },
"nix-eval-jobs": {
"inputs": {
"flake-parts": "flake-parts",
"nix-github-actions": [],
"nixpkgs": [
"nixpkgs"
],
"treefmt-nix": "treefmt-nix"
},
"locked": {
"lastModified": 1733814344,
"narHash": "sha256-3wwtKpS5tUBdjaGeSia7CotonbiRB6K5Kp0dsUt3nzU=",
"owner": "nix-community",
"repo": "nix-eval-jobs",
"rev": "889ea1406736b53cf165b6c28398aae3969418d1",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "release-2.24",
"repo": "nix-eval-jobs",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1712848736, "lastModified": 1726688310,
"narHash": "sha256-CzZwhqyLlebljv1zFS2KWVH/3byHND0LfaO1jKsGuVo=", "narHash": "sha256-Xc9lEtentPCEtxc/F1e6jIZsd4MPDYv4Kugl9WtXlz0=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "1d6a23f11e44d0fb64b3237569b87658a9eb5643", "rev": "dbebdd67a6006bb145d98c8debf9140ac7e651d0",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-23.11-small", "ref": "nixos-24.05-small",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }
}, },
"nixpkgs-regression": {
"locked": {
"lastModified": 1643052045,
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
"type": "github"
}
},
"pre-commit-hooks": {
"inputs": {
"flake-compat": [
"nix"
],
"flake-utils": "flake-utils",
"gitignore": [
"nix"
],
"nixpkgs": [
"nix",
"nixpkgs"
],
"nixpkgs-stable": [
"nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1712897695,
"narHash": "sha256-nMirxrGteNAl9sWiOhoN5tIHyjBbVi5e2tgZUgZlK3Y=",
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"rev": "40e6053ecb65fcbf12863338a6dcefb3f55f1bf8",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "pre-commit-hooks.nix",
"type": "github"
}
},
"root": { "root": {
"inputs": { "inputs": {
"libgit2": "libgit2",
"nix": "nix", "nix": "nix",
"nix-eval-jobs": "nix-eval-jobs",
"nixpkgs": "nixpkgs" "nixpkgs": "nixpkgs"
} }
},
"treefmt-nix": {
"inputs": {
"nixpkgs": [
"nix-eval-jobs",
"nixpkgs"
]
},
"locked": {
"lastModified": 1723303070,
"narHash": "sha256-krGNVA30yptyRonohQ+i9cnK+CfCpedg6z3qzqVJcTs=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "14c092e0326de759e16b37535161b3cb9770cea3",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"type": "github"
}
} }
}, },
"root": "root", "root": "root",

View File

@ -1,22 +1,30 @@
{ {
description = "A Nix-based continuous build system"; description = "A Nix-based continuous build system";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05-small";
inputs.nix.url = "github:NixOS/nix";
inputs.nix.inputs.nixpkgs.follows = "nixpkgs";
outputs = { self, nixpkgs, nix }: inputs.libgit2 = { url = "github:libgit2/libgit2/v1.8.1"; flake = false; };
inputs.nix.url = "github:NixOS/nix/2.24-maintenance";
inputs.nix.inputs.nixpkgs.follows = "nixpkgs";
inputs.nix.inputs.libgit2.follows = "libgit2";
inputs.nix-eval-jobs.url = "github:nix-community/nix-eval-jobs/release-2.24";
inputs.nix-eval-jobs.inputs.nixpkgs.follows = "nixpkgs";
# hide nix dev tooling from our lock file
inputs.nix.inputs.flake-parts.follows = "";
inputs.nix.inputs.git-hooks-nix.follows = "";
inputs.nix.inputs.nixpkgs-regression.follows = "";
inputs.nix.inputs.nixpkgs-23-11.follows = "";
inputs.nix.inputs.flake-compat.follows = "";
# hide nix-eval-jobs dev tooling from our lock file
inputs.nix-eval-jobs.inputs.nix-github-actions.follows = "";
outputs = { self, nixpkgs, nix, nix-eval-jobs, ... }:
let let
systems = [ "x86_64-linux" "aarch64-linux" ]; systems = [ "x86_64-linux" "aarch64-linux" ];
forEachSystem = nixpkgs.lib.genAttrs systems; forEachSystem = nixpkgs.lib.genAttrs systems;
overlayList = [ self.overlays.default nix.overlays.default ];
pkgsBySystem = forEachSystem (system: import nixpkgs {
inherit system;
overlays = overlayList;
});
in in
rec { rec {
@ -24,12 +32,13 @@
overlays.default = final: prev: { overlays.default = final: prev: {
hydra = final.callPackage ./package.nix { hydra = final.callPackage ./package.nix {
inherit (nixpkgs.lib) fileset; inherit (nixpkgs.lib) fileset;
nix-eval-jobs = nix-eval-jobs.packages.${final.system}.default;
rawSrc = self; rawSrc = self;
nix-perl-bindings = final.nixComponents.nix-perl-bindings;
}; };
}; };
hydraJobs = { hydraJobs = {
build = forEachSystem (system: packages.${system}.hydra); build = forEachSystem (system: packages.${system}.hydra);
buildNoTests = forEachSystem (system: buildNoTests = forEachSystem (system:
@ -38,19 +47,21 @@
}) })
); );
manual = forEachSystem (system: manual = forEachSystem (system: let
let pkgs = pkgsBySystem.${system}; in pkgs = nixpkgs.legacyPackages.${system};
pkgs.runCommand "hydra-manual-${pkgs.hydra.version}" { } hydra = self.packages.${pkgs.hostPlatform.system}.hydra;
in
pkgs.runCommand "hydra-manual-${hydra.version}" { }
'' ''
mkdir -p $out/share mkdir -p $out/share
cp -prvd ${pkgs.hydra}/share/doc $out/share/ cp -prvd ${hydra.doc}/share/doc $out/share/
mkdir $out/nix-support mkdir $out/nix-support
echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products
''); '');
tests = import ./nixos-tests.nix { tests = import ./nixos-tests.nix {
inherit forEachSystem nixpkgs pkgsBySystem nixosModules; inherit forEachSystem nixpkgs nixosModules;
}; };
container = nixosConfigurations.container.config.system.build.toplevel; container = nixosConfigurations.container.config.system.build.toplevel;
@ -63,12 +74,18 @@
}); });
packages = forEachSystem (system: { packages = forEachSystem (system: {
hydra = pkgsBySystem.${system}.hydra; hydra = nixpkgs.legacyPackages.${system}.callPackage ./package.nix {
default = pkgsBySystem.${system}.hydra; inherit (nixpkgs.lib) fileset;
nix-eval-jobs = nix-eval-jobs.packages.${system}.default;
rawSrc = self;
nix = nix.packages.${system}.nix;
nix-perl-bindings = nix.hydraJobs.perlBindings.${system};
};
default = self.packages.${system}.hydra;
}); });
nixosModules = import ./nixos-modules { nixosModules = import ./nixos-modules {
overlays = overlayList; inherit self;
}; };
nixosConfigurations.container = nixpkgs.lib.nixosSystem { nixosConfigurations.container = nixpkgs.lib.nixosSystem {
@ -76,7 +93,6 @@
modules = modules =
[ [
self.nixosModules.hydra self.nixosModules.hydra
self.nixosModules.overlayNixpkgsForThisHydra
self.nixosModules.hydraTest self.nixosModules.hydraTest
self.nixosModules.hydraProxy self.nixosModules.hydraProxy
{ {

View File

@ -70,7 +70,7 @@ paths:
$ref: '#/components/examples/projects-success' $ref: '#/components/examples/projects-success'
/api/push: /api/push:
put: post:
summary: trigger jobsets summary: trigger jobsets
parameters: parameters:
- in: query - in: query

40
meson.build Normal file
View File

@ -0,0 +1,40 @@
project('hydra', 'cpp',
version: files('version.txt'),
license: 'GPL-3.0',
default_options: [
'debug=true',
'optimization=2',
'cpp_std=c++20',
],
)
nix_store_dep = dependency('nix-store', required: true)
nix_main_dep = dependency('nix-main', required: true)
nix_expr_dep = dependency('nix-expr', required: true)
nix_flake_dep = dependency('nix-flake', required: true)
nix_cmd_dep = dependency('nix-cmd', required: true)
# Nix need extra flags not provided in its pkg-config files.
nix_dep = declare_dependency(
dependencies: [
nix_store_dep,
nix_main_dep,
nix_expr_dep,
nix_flake_dep,
nix_cmd_dep,
],
compile_args: ['-include', 'nix/config.h'],
)
pqxx_dep = dependency('libpqxx', required: true)
prom_cpp_core_dep = dependency('prometheus-cpp-core', required: true)
prom_cpp_pull_dep = dependency('prometheus-cpp-pull', required: true)
mdbook = find_program('mdbook', native: true)
perl = find_program('perl', native: true)
subdir('doc/manual')
subdir('nixos-modules')
subdir('src')
subdir('t')

View File

@ -1,11 +1,10 @@
{ overlays }: { self }:
{ {
hydra = import ./hydra.nix; hydra = { pkgs, lib,... }: {
_file = ./default.nix;
overlayNixpkgsForThisHydra = { pkgs, ... }: { imports = [ ./hydra.nix ];
nixpkgs = { inherit overlays; }; services.hydra-dev.package = lib.mkDefault self.packages.${pkgs.hostPlatform.system}.hydra;
services.hydra.package = pkgs.hydra;
}; };
hydraTest = { pkgs, ... }: { hydraTest = { pkgs, ... }: {

View File

@ -68,8 +68,6 @@ in
package = mkOption { package = mkOption {
type = types.path; type = types.path;
default = pkgs.hydra_unstable;
defaultText = literalExpression "pkgs.hydra";
description = "The Hydra package."; description = "The Hydra package.";
}; };
@ -340,6 +338,7 @@ in
systemd.services.hydra-queue-runner = systemd.services.hydra-queue-runner =
{ wantedBy = [ "multi-user.target" ]; { wantedBy = [ "multi-user.target" ];
requires = [ "hydra-init.service" ]; requires = [ "hydra-init.service" ];
wants = [ "network-online.target" ];
after = [ "hydra-init.service" "network.target" "network-online.target" ]; after = [ "hydra-init.service" "network.target" "network-online.target" ];
path = [ cfg.package pkgs.nettools pkgs.openssh pkgs.bzip2 config.nix.package ]; path = [ cfg.package pkgs.nettools pkgs.openssh pkgs.bzip2 config.nix.package ];
restartTriggers = [ hydraConf ]; restartTriggers = [ hydraConf ];
@ -408,6 +407,7 @@ in
requires = [ "hydra-init.service" ]; requires = [ "hydra-init.service" ];
after = [ "hydra-init.service" ]; after = [ "hydra-init.service" ];
restartTriggers = [ hydraConf ]; restartTriggers = [ hydraConf ];
path = [ pkgs.zstd ];
environment = env // { environment = env // {
PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr
HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-notify"; HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-notify";
@ -458,10 +458,17 @@ in
# logs automatically after a step finishes, but this doesn't work # logs automatically after a step finishes, but this doesn't work
# if the queue runner is stopped prematurely. # if the queue runner is stopped prematurely.
systemd.services.hydra-compress-logs = systemd.services.hydra-compress-logs =
{ path = [ pkgs.bzip2 ]; { path = [ pkgs.bzip2 pkgs.zstd ];
script = script =
'' ''
find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r bzip2 -v -f set -eou pipefail
compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf)
if [[ $compression == "" ]]; then
compression="bzip2"
elif [[ $compression == zstd ]]; then
compression="zstd --rm"
fi
find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet
''; '';
startAt = "Sun 01:45"; startAt = "Sun 01:45";
}; };

View File

@ -0,0 +1,4 @@
install_data('hydra.nix',
install_dir: get_option('datadir') / 'nix',
rename: ['hydra-module.nix'],
)

View File

@ -1,13 +1,12 @@
{ forEachSystem, nixpkgs, pkgsBySystem, nixosModules }: { forEachSystem, nixpkgs, nixosModules }:
let let
# NixOS configuration used for VM tests. # NixOS configuration used for VM tests.
hydraServer = hydraServer =
{ config, pkgs, ... }: { pkgs, ... }:
{ {
imports = [ imports = [
nixosModules.hydra nixosModules.hydra
nixosModules.overlayNixpkgsForThisHydra
nixosModules.hydraTest nixosModules.hydraTest
]; ];
@ -44,11 +43,10 @@ in
}); });
notifications = forEachSystem (system: notifications = forEachSystem (system:
let pkgs = pkgsBySystem.${system}; in
with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; };
simpleTest { simpleTest {
name = "hydra-notifications"; name = "hydra-notifications";
nodes.machine = { pkgs, ... }: { nodes.machine = {
imports = [ hydraServer ]; imports = [ hydraServer ];
services.hydra-dev.extraConfig = '' services.hydra-dev.extraConfig = ''
<influxdb> <influxdb>
@ -89,7 +87,7 @@ in
# Setup the project and jobset # Setup the project and jobset
machine.succeed( machine.succeed(
"su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" "su - hydra -c 'perl -I ${config.services.hydra-dev.package.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2"
) )
# Wait until hydra has build the job and # Wait until hydra has build the job and
@ -103,7 +101,7 @@ in
}); });
gitea = forEachSystem (system: gitea = forEachSystem (system:
let pkgs = pkgsBySystem.${system}; in let pkgs = nixpkgs.legacyPackages.${system}; in
with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; };
makeTest { makeTest {
name = "hydra-gitea"; name = "hydra-gitea";
@ -298,7 +296,7 @@ in
}); });
validate-openapi = forEachSystem (system: validate-openapi = forEachSystem (system:
let pkgs = pkgsBySystem.${system}; in let pkgs = nixpkgs.legacyPackages.${system}; in
pkgs.runCommand "validate-openapi" pkgs.runCommand "validate-openapi"
{ buildInputs = [ pkgs.openapi-generator-cli ]; } { buildInputs = [ pkgs.openapi-generator-cli ]; }
'' ''

View File

@ -9,10 +9,12 @@
, perlPackages , perlPackages
, nix , nix
, nix-perl-bindings
, git , git
, makeWrapper , makeWrapper
, autoreconfHook , meson
, ninja
, nukeReferences , nukeReferences
, pkg-config , pkg-config
, mdbook , mdbook
@ -48,6 +50,7 @@
, xz , xz
, gnutar , gnutar
, gnused , gnused
, nix-eval-jobs
, rpm , rpm
, dpkg , dpkg
@ -59,7 +62,7 @@ let
name = "hydra-perl-deps"; name = "hydra-perl-deps";
paths = lib.closePropagation paths = lib.closePropagation
([ ([
nix.perl-bindings nix-perl-bindings
git git
] ++ (with perlPackages; [ ] ++ (with perlPackages; [
AuthenSASL AuthenSASL
@ -90,6 +93,7 @@ let
DigestSHA1 DigestSHA1
EmailMIME EmailMIME
EmailSender EmailSender
FileCopyRecursive
FileLibMagic FileLibMagic
FileSlurper FileSlurper
FileWhich FileWhich
@ -137,28 +141,24 @@ stdenv.mkDerivation (finalAttrs: {
src = fileset.toSource { src = fileset.toSource {
root = ./.; root = ./.;
fileset = fileset.unions ([ fileset = fileset.unions ([
./version.txt
./configure.ac
./Makefile.am
./src
./doc ./doc
./nixos-modules/hydra.nix ./meson.build
# These are always needed to appease Automake ./nixos-modules
./t/Makefile.am ./src
./t/jobs/config.nix.in
./t/jobs/declarative/project.json.in
] ++ lib.optionals finalAttrs.doCheck [
./t ./t
./version.txt
./.perlcriticrc ./.perlcriticrc
./.yath.rc
]); ]);
}; };
outputs = [ "out" "doc" ];
strictDeps = true; strictDeps = true;
nativeBuildInputs = [ nativeBuildInputs = [
makeWrapper makeWrapper
autoreconfHook meson
ninja
nukeReferences nukeReferences
pkg-config pkg-config
mdbook mdbook
@ -191,6 +191,7 @@ stdenv.mkDerivation (finalAttrs: {
openldap openldap
postgresql_13 postgresql_13
pixz pixz
nix-eval-jobs
]; ];
checkInputs = [ checkInputs = [
@ -219,15 +220,22 @@ stdenv.mkDerivation (finalAttrs: {
darcs darcs
gnused gnused
breezy breezy
nix-eval-jobs
] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] ] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ]
); );
OPENLDAP_ROOT = openldap; OPENLDAP_ROOT = openldap;
mesonBuildType = "release";
postPatch = ''
patchShebangs .
'';
shellHook = '' shellHook = ''
pushd $(git rev-parse --show-toplevel) >/dev/null pushd $(git rev-parse --show-toplevel) >/dev/null
PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-queue-runner:$PATH
PERL5LIB=$(pwd)/src/lib:$PERL5LIB PERL5LIB=$(pwd)/src/lib:$PERL5LIB
export HYDRA_HOME="$(pwd)/src/" export HYDRA_HOME="$(pwd)/src/"
mkdir -p .hydra-data mkdir -p .hydra-data
@ -237,14 +245,11 @@ stdenv.mkDerivation (finalAttrs: {
popd >/dev/null popd >/dev/null
''; '';
NIX_LDFLAGS = [ "-lpthread" ];
enableParallelBuilding = true;
doCheck = true; doCheck = true;
mesonCheckFlags = [ "--verbose" ];
preCheck = '' preCheck = ''
patchShebangs .
export LOGNAME=''${LOGNAME:-foo} export LOGNAME=''${LOGNAME:-foo}
# set $HOME for bzr so it can create its trace file # set $HOME for bzr so it can create its trace file
export HOME=$(mktemp -d) export HOME=$(mktemp -d)
@ -261,7 +266,8 @@ stdenv.mkDerivation (finalAttrs: {
--prefix PATH ':' $out/bin:$hydraPath \ --prefix PATH ':' $out/bin:$hydraPath \
--set HYDRA_RELEASE ${version} \ --set HYDRA_RELEASE ${version} \
--set HYDRA_HOME $out/libexec/hydra \ --set HYDRA_HOME $out/libexec/hydra \
--set NIX_RELEASE ${nix.name or "unknown"} --set NIX_RELEASE ${nix.name or "unknown"} \
--set NIX_EVAL_JOBS_RELEASE ${nix-eval-jobs.name or "unknown"}
done done
''; '';

View File

@ -1,3 +0,0 @@
SUBDIRS = hydra-evaluator hydra-eval-jobs hydra-queue-runner sql script lib root ttf
BOOTCLEAN_SUBDIRS = $(SUBDIRS)
DIST_SUBDIRS = $(SUBDIRS)

View File

@ -1,5 +0,0 @@
bin_PROGRAMS = hydra-eval-jobs
hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc
hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixcmd
hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra

View File

@ -1,579 +0,0 @@
#include <iostream>
#include <thread>
#include <optional>
#include <unordered_map>
#include "shared.hh"
#include "store-api.hh"
#include "eval.hh"
#include "eval-inline.hh"
#include "eval-settings.hh"
#include "signals.hh"
#include "terminal.hh"
#include "util.hh"
#include "get-drvs.hh"
#include "globals.hh"
#include "common-eval-args.hh"
#include "flake/flakeref.hh"
#include "flake/flake.hh"
#include "attr-path.hh"
#include "derivations.hh"
#include "local-fs-store.hh"
#include "hydra-config.hh"
#include <sys/types.h>
#include <sys/wait.h>
#include <sys/resource.h>
#include <nlohmann/json.hpp>
void check_pid_status_nonblocking(pid_t check_pid)
{
// Only check 'initialized' and known PID's
if (check_pid <= 0) { return; }
int wstatus = 0;
pid_t pid = waitpid(check_pid, &wstatus, WNOHANG);
// -1 = failure, WNOHANG: 0 = no change
if (pid <= 0) { return; }
std::cerr << "child process (" << pid << ") ";
if (WIFEXITED(wstatus)) {
std::cerr << "exited with status=" << WEXITSTATUS(wstatus) << std::endl;
} else if (WIFSIGNALED(wstatus)) {
std::cerr << "killed by signal=" << WTERMSIG(wstatus) << std::endl;
} else if (WIFSTOPPED(wstatus)) {
std::cerr << "stopped by signal=" << WSTOPSIG(wstatus) << std::endl;
} else if (WIFCONTINUED(wstatus)) {
std::cerr << "continued" << std::endl;
}
}
using namespace nix;
static Path gcRootsDir;
static size_t maxMemorySize;
struct MyArgs : MixEvalArgs, MixCommonArgs, RootArgs
{
Path releaseExpr;
bool flake = false;
bool dryRun = false;
MyArgs() : MixCommonArgs("hydra-eval-jobs")
{
addFlag({
.longName = "gc-roots-dir",
.description = "garbage collector roots directory",
.labels = {"path"},
.handler = {&gcRootsDir}
});
addFlag({
.longName = "dry-run",
.description = "don't create store derivations",
.handler = {&dryRun, true}
});
addFlag({
.longName = "flake",
.description = "build a flake",
.handler = {&flake, true}
});
expectArg("expr", &releaseExpr);
}
};
static MyArgs myArgs;
static std::string queryMetaStrings(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & subAttribute)
{
Strings res;
std::function<void(Value & v)> rec;
rec = [&](Value & v) {
state.forceValue(v, noPos);
if (v.type() == nString)
res.emplace_back(v.string_view());
else if (v.isList())
for (unsigned int n = 0; n < v.listSize(); ++n)
rec(*v.listElems()[n]);
else if (v.type() == nAttrs) {
auto a = v.attrs()->find(state.symbols.create(subAttribute));
if (a != v.attrs()->end())
res.push_back(std::string(state.forceString(*a->value, a->pos, "while evaluating meta attributes")));
}
};
Value * v = drv.queryMeta(name);
if (v) rec(*v);
return concatStringsSep(", ", res);
}
static void worker(
EvalState & state,
Bindings & autoArgs,
AutoCloseFD & to,
AutoCloseFD & from)
{
Value vTop;
if (myArgs.flake) {
using namespace flake;
auto flakeRef = parseFlakeRef(myArgs.releaseExpr);
auto vFlake = state.allocValue();
auto lockedFlake = lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
.useRegistries = false,
.allowUnlocked = false,
});
callFlake(state, lockedFlake, *vFlake);
auto vOutputs = vFlake->attrs()->get(state.symbols.create("outputs"))->value;
state.forceValue(*vOutputs, noPos);
auto aHydraJobs = vOutputs->attrs()->get(state.symbols.create("hydraJobs"));
if (!aHydraJobs)
aHydraJobs = vOutputs->attrs()->get(state.symbols.create("checks"));
if (!aHydraJobs)
throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef);
vTop = *aHydraJobs->value;
} else {
state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop);
}
auto vRoot = state.allocValue();
state.autoCallFunction(autoArgs, vTop, *vRoot);
while (true) {
/* Wait for the master to send us a job name. */
writeLine(to.get(), "next");
auto s = readLine(from.get());
if (s == "exit") break;
if (!hasPrefix(s, "do ")) abort();
std::string attrPath(s, 3);
debug("worker process %d at '%s'", getpid(), attrPath);
/* Evaluate it and send info back to the master. */
nlohmann::json reply;
try {
auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot).first;
auto v = state.allocValue();
state.autoCallFunction(autoArgs, *vTmp, *v);
if (auto drv = getDerivation(state, *v, false)) {
// CA derivations do not have static output paths, so we
// have to defensively not query output paths in case we
// encounter one.
PackageInfo::Outputs outputs = drv->queryOutputs(
!experimentalFeatureSettings.isEnabled(Xp::CaDerivations));
if (drv->querySystem() == "unknown")
state.error<EvalError>("derivation must have a 'system' attribute").debugThrow();
auto drvPath = state.store->printStorePath(drv->requireDrvPath());
nlohmann::json job;
job["nixName"] = drv->queryName();
job["system"] =drv->querySystem();
job["drvPath"] = drvPath;
job["description"] = drv->queryMetaString("description");
job["license"] = queryMetaStrings(state, *drv, "license", "shortName");
job["homepage"] = drv->queryMetaString("homepage");
job["maintainers"] = queryMetaStrings(state, *drv, "maintainers", "email");
job["schedulingPriority"] = drv->queryMetaInt("schedulingPriority", 100);
job["timeout"] = drv->queryMetaInt("timeout", 36000);
job["maxSilent"] = drv->queryMetaInt("maxSilent", 7200);
job["isChannel"] = drv->queryMetaBool("isHydraChannel", false);
/* If this is an aggregate, then get its constituents. */
auto a = v->attrs()->get(state.symbols.create("_hydraAggregate"));
if (a && state.forceBool(*a->value, a->pos, "while evaluating the `_hydraAggregate` attribute")) {
auto a = v->attrs()->get(state.symbols.create("constituents"));
if (!a)
state.error<EvalError>("derivation must have a constituents attribute").debugThrow();
NixStringContext context;
state.coerceToString(a->pos, *a->value, context, "while evaluating the `constituents` attribute", true, false);
for (auto & c : context)
std::visit(overloaded {
[&](const NixStringContextElem::Built & b) {
job["constituents"].push_back(b.drvPath->to_string(*state.store));
},
[&](const NixStringContextElem::Opaque & o) {
},
[&](const NixStringContextElem::DrvDeep & d) {
},
}, c.raw);
state.forceList(*a->value, a->pos, "while evaluating the `constituents` attribute");
for (unsigned int n = 0; n < a->value->listSize(); ++n) {
auto v = a->value->listElems()[n];
state.forceValue(*v, noPos);
if (v->type() == nString)
job["namedConstituents"].push_back(v->string_view());
}
}
/* Register the derivation as a GC root. !!! This
registers roots for jobs that we may have already
done. */
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
if (gcRootsDir != "" && localStore) {
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
if (!pathExists(root))
localStore->addPermRoot(localStore->parseStorePath(drvPath), root);
}
nlohmann::json out;
for (auto & [outputName, optOutputPath] : outputs) {
if (optOutputPath) {
out[outputName] = state.store->printStorePath(*optOutputPath);
} else {
// See the `queryOutputs` call above; we should
// not encounter missing output paths otherwise.
assert(experimentalFeatureSettings.isEnabled(Xp::CaDerivations));
out[outputName] = nullptr;
}
}
job["outputs"] = std::move(out);
reply["job"] = std::move(job);
}
else if (v->type() == nAttrs) {
auto attrs = nlohmann::json::array();
StringSet ss;
for (auto & i : v->attrs()->lexicographicOrder(state.symbols)) {
std::string name(state.symbols[i->name]);
if (name.find(' ') != std::string::npos) {
printError("skipping job with illegal name '%s'", name);
continue;
}
attrs.push_back(name);
}
reply["attrs"] = std::move(attrs);
}
else if (v->type() == nNull)
;
else state.error<TypeError>("attribute '%s' is %s, which is not supported", attrPath, showType(*v)).debugThrow();
} catch (EvalError & e) {
auto msg = e.msg();
// Transmits the error we got from the previous evaluation
// in the JSON output.
reply["error"] = filterANSIEscapes(msg, true);
// Don't forget to print it into the STDERR log, this is
// what's shown in the Hydra UI.
printError(msg);
}
writeLine(to.get(), reply.dump());
/* If our RSS exceeds the maximum, exit. The master will
start a new process. */
struct rusage r;
getrusage(RUSAGE_SELF, &r);
if ((size_t) r.ru_maxrss > maxMemorySize * 1024) break;
}
writeLine(to.get(), "restart");
}
int main(int argc, char * * argv)
{
/* Prevent undeclared dependencies in the evaluation via
$NIX_PATH. */
unsetenv("NIX_PATH");
return handleExceptions(argv[0], [&]() {
auto config = std::make_unique<HydraConfig>();
auto nrWorkers = config->getIntOption("evaluator_workers", 1);
maxMemorySize = config->getIntOption("evaluator_max_memory_size", 4096);
initNix();
initGC();
myArgs.parseCmdline(argvToStrings(argc, argv));
auto pureEval = config->getBoolOption("evaluator_pure_eval", myArgs.flake);
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */
settings.builders = "";
/* Prevent access to paths outside of the Nix search path and
to the environment. */
evalSettings.restrictEval = true;
/* When building a flake, use pure evaluation (no access to
'getEnv', 'currentSystem' etc. */
evalSettings.pureEval = pureEval;
if (myArgs.dryRun) settings.readOnlyMode = true;
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
struct State
{
std::set<std::string> todo{""};
std::set<std::string> active;
nlohmann::json jobs;
std::exception_ptr exc;
};
std::condition_variable wakeup;
Sync<State> state_;
/* Start a handler thread per worker process. */
auto handler = [&]()
{
pid_t pid = -1;
try {
AutoCloseFD from, to;
while (true) {
/* Start a new worker process if necessary. */
if (pid == -1) {
Pipe toPipe, fromPipe;
toPipe.create();
fromPipe.create();
pid = startProcess(
[&,
to{std::make_shared<AutoCloseFD>(std::move(fromPipe.writeSide))},
from{std::make_shared<AutoCloseFD>(std::move(toPipe.readSide))}
]()
{
try {
EvalState state(myArgs.lookupPath, openStore());
Bindings & autoArgs = *myArgs.getAutoArgs(state);
worker(state, autoArgs, *to, *from);
} catch (Error & e) {
nlohmann::json err;
auto msg = e.msg();
err["error"] = filterANSIEscapes(msg, true);
printError(msg);
writeLine(to->get(), err.dump());
// Don't forget to print it into the STDERR log, this is
// what's shown in the Hydra UI.
writeLine(to->get(), "restart");
}
},
ProcessOptions { .allowVfork = false });
from = std::move(fromPipe.readSide);
to = std::move(toPipe.writeSide);
debug("created worker process %d", pid);
}
/* Check whether the existing worker process is still there. */
auto s = readLine(from.get());
if (s == "restart") {
pid = -1;
continue;
} else if (s != "next") {
auto json = nlohmann::json::parse(s);
throw Error("worker error: %s", (std::string) json["error"]);
}
/* Wait for a job name to become available. */
std::string attrPath;
while (true) {
checkInterrupt();
auto state(state_.lock());
if ((state->todo.empty() && state->active.empty()) || state->exc) {
writeLine(to.get(), "exit");
return;
}
if (!state->todo.empty()) {
attrPath = *state->todo.begin();
state->todo.erase(state->todo.begin());
state->active.insert(attrPath);
break;
} else
state.wait(wakeup);
}
/* Tell the worker to evaluate it. */
writeLine(to.get(), "do " + attrPath);
/* Wait for the response. */
auto response = nlohmann::json::parse(readLine(from.get()));
/* Handle the response. */
StringSet newAttrs;
if (response.find("job") != response.end()) {
auto state(state_.lock());
state->jobs[attrPath] = response["job"];
}
if (response.find("attrs") != response.end()) {
for (auto & i : response["attrs"]) {
std::string path = i;
if (path.find(".") != std::string::npos){
path = "\"" + path + "\"";
}
auto s = (attrPath.empty() ? "" : attrPath + ".") + (std::string) path;
newAttrs.insert(s);
}
}
if (response.find("error") != response.end()) {
auto state(state_.lock());
state->jobs[attrPath]["error"] = response["error"];
}
/* Add newly discovered job names to the queue. */
{
auto state(state_.lock());
state->active.erase(attrPath);
for (auto & s : newAttrs)
state->todo.insert(s);
wakeup.notify_all();
}
}
} catch (...) {
check_pid_status_nonblocking(pid);
auto state(state_.lock());
state->exc = std::current_exception();
wakeup.notify_all();
}
};
std::vector<std::thread> threads;
for (size_t i = 0; i < nrWorkers; i++)
threads.emplace_back(std::thread(handler));
for (auto & thread : threads)
thread.join();
auto state(state_.lock());
if (state->exc)
std::rethrow_exception(state->exc);
/* For aggregate jobs that have named consistuents
(i.e. constituents that are a job name rather than a
derivation), look up the referenced job and add it to the
dependencies of the aggregate derivation. */
auto store = openStore();
for (auto i = state->jobs.begin(); i != state->jobs.end(); ++i) {
auto jobName = i.key();
auto & job = i.value();
auto named = job.find("namedConstituents");
if (named == job.end()) continue;
std::unordered_map<std::string, std::string> brokenJobs;
auto getNonBrokenJobOrRecordError = [&brokenJobs, &jobName, &state](
const std::string & childJobName) -> std::optional<nlohmann::json> {
auto childJob = state->jobs.find(childJobName);
if (childJob == state->jobs.end()) {
printError("aggregate job '%s' references non-existent job '%s'", jobName, childJobName);
brokenJobs[childJobName] = "does not exist";
return std::nullopt;
}
if (childJob->find("error") != childJob->end()) {
std::string error = (*childJob)["error"];
printError("aggregate job '%s' references broken job '%s': %s", jobName, childJobName, error);
brokenJobs[childJobName] = error;
return std::nullopt;
}
return *childJob;
};
if (myArgs.dryRun) {
for (std::string jobName2 : *named) {
auto job2 = getNonBrokenJobOrRecordError(jobName2);
if (!job2) {
continue;
}
std::string drvPath2 = (*job2)["drvPath"];
job["constituents"].push_back(drvPath2);
}
} else {
auto drvPath = store->parseStorePath((std::string) job["drvPath"]);
auto drv = store->readDerivation(drvPath);
for (std::string jobName2 : *named) {
auto job2 = getNonBrokenJobOrRecordError(jobName2);
if (!job2) {
continue;
}
auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]);
auto drv2 = store->readDerivation(drvPath2);
job["constituents"].push_back(store->printStorePath(drvPath2));
drv.inputDrvs.map[drvPath2].value = {drv2.outputs.begin()->first};
}
if (brokenJobs.empty()) {
std::string drvName(drvPath.name());
assert(hasSuffix(drvName, drvExtension));
drvName.resize(drvName.size() - drvExtension.size());
auto hashModulo = hashDerivationModulo(*store, drv, true);
if (hashModulo.kind != DrvHash::Kind::Regular) continue;
auto h = hashModulo.hashes.find("out");
if (h == hashModulo.hashes.end()) continue;
auto outPath = store->makeOutputPath("out", h->second, drvName);
drv.env["out"] = store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput::InputAddressed { .path = outPath });
auto newDrvPath = store->printStorePath(writeDerivation(*store, drv));
debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath);
job["drvPath"] = newDrvPath;
job["outputs"]["out"] = store->printStorePath(outPath);
}
}
job.erase("namedConstituents");
/* Register the derivation as a GC root. !!! This
registers roots for jobs that we may have already
done. */
auto localStore = store.dynamic_pointer_cast<LocalFSStore>();
if (gcRootsDir != "" && localStore) {
auto drvPath = job["drvPath"].get<std::string>();
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
if (!pathExists(root))
localStore->addPermRoot(localStore->parseStorePath(drvPath), root);
}
if (!brokenJobs.empty()) {
std::stringstream ss;
for (const auto& [jobName, error] : brokenJobs) {
ss << jobName << ": " << error << "\n";
}
job["error"] = ss.str();
}
}
std::cout << state->jobs.dump(2) << "\n";
});
}

View File

@ -1,5 +0,0 @@
bin_PROGRAMS = hydra-evaluator
hydra_evaluator_SOURCES = hydra-evaluator.cc
hydra_evaluator_LDADD = $(NIX_LIBS) -lpqxx
hydra_evaluator_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations

View File

@ -0,0 +1,9 @@
hydra_evaluator = executable('hydra-evaluator',
'hydra-evaluator.cc',
dependencies: [
libhydra_dep,
nix_dep,
pqxx_dep,
],
install: true,
)

View File

@ -1,8 +0,0 @@
bin_PROGRAMS = hydra-queue-runner
hydra_queue_runner_SOURCES = hydra-queue-runner.cc queue-monitor.cc dispatcher.cc \
builder.cc build-result.cc build-remote.cc \
hydra-build-result.hh counter.hh state.hh db.hh \
nar-extractor.cc nar-extractor.hh
hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx -lprometheus-cpp-pull -lprometheus-cpp-core
hydra_queue_runner_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations

View File

@ -2,6 +2,7 @@
#include <cmath> #include <cmath>
#include <thread> #include <thread>
#include <unordered_map> #include <unordered_map>
#include <unordered_set>
#include "state.hh" #include "state.hh"

View File

@ -0,0 +1,22 @@
srcs = files(
'builder.cc',
'build-remote.cc',
'build-result.cc',
'dispatcher.cc',
'hydra-queue-runner.cc',
'nar-extractor.cc',
'queue-monitor.cc',
)
hydra_queue_runner = executable('hydra-queue-runner',
'hydra-queue-runner.cc',
srcs,
dependencies: [
libhydra_dep,
nix_dep,
pqxx_dep,
prom_cpp_core_dep,
prom_cpp_pull_dep,
],
install: true,
)

View File

@ -54,32 +54,40 @@ struct Extractor : FileSystemObjectSink
}; };
NarMemberDatas & members; NarMemberDatas & members;
Path prefix; std::filesystem::path prefix;
Path toKey(const CanonPath & path)
{
std::filesystem::path p = prefix;
// Conditional to avoid trailing slash
if (!path.isRoot()) p /= path.rel();
return p;
}
Extractor(NarMemberDatas & members, const Path & prefix) Extractor(NarMemberDatas & members, const Path & prefix)
: members(members), prefix(prefix) : members(members), prefix(prefix)
{ } { }
void createDirectory(const Path & path) override void createDirectory(const CanonPath & path) override
{ {
members.insert_or_assign(prefix + path, NarMemberData { .type = SourceAccessor::Type::tDirectory }); members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tDirectory });
} }
void createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func) override void createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> func) override
{ {
NarMemberConstructor nmc { NarMemberConstructor nmc {
members.insert_or_assign(prefix + path, NarMemberData { members.insert_or_assign(toKey(path), NarMemberData {
.type = SourceAccessor::Type::tRegular, .type = SourceAccessor::Type::tRegular,
.fileSize = 0, .fileSize = 0,
.contents = filesToKeep.count(path) ? std::optional("") : std::nullopt, .contents = filesToKeep.count(path.abs()) ? std::optional("") : std::nullopt,
}).first->second, }).first->second,
}; };
func(nmc); func(nmc);
} }
void createSymlink(const Path & path, const std::string & target) override void createSymlink(const CanonPath & path, const std::string & target) override
{ {
members.insert_or_assign(prefix + path, NarMemberData { .type = SourceAccessor::Type::tSymlink }); members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tSymlink });
} }
}; };

View File

@ -22,6 +22,7 @@
#include "nar-extractor.hh" #include "nar-extractor.hh"
#include "serve-protocol.hh" #include "serve-protocol.hh"
#include "serve-protocol-impl.hh" #include "serve-protocol-impl.hh"
#include "serve-protocol-connection.hh"
#include "machines.hh" #include "machines.hh"

View File

@ -95,6 +95,7 @@ sub get_legacy_ldap_config {
"hydra_bump-to-front" => [ "bump-to-front" ], "hydra_bump-to-front" => [ "bump-to-front" ],
"hydra_cancel-build" => [ "cancel-build" ], "hydra_cancel-build" => [ "cancel-build" ],
"hydra_create-projects" => [ "create-projects" ], "hydra_create-projects" => [ "create-projects" ],
"hydra_eval-jobset" => [ "eval-jobset" ],
"hydra_restart-jobs" => [ "restart-jobs" ], "hydra_restart-jobs" => [ "restart-jobs" ],
}, },
}; };
@ -159,6 +160,7 @@ sub valid_roles {
"bump-to-front", "bump-to-front",
"cancel-build", "cancel-build",
"create-projects", "create-projects",
"eval-jobset",
"restart-jobs", "restart-jobs",
]; ];
} }

View File

@ -239,6 +239,8 @@ sub triggerJobset {
sub push : Chained('api') PathPart('push') Args(0) { sub push : Chained('api') PathPart('push') Args(0) {
my ($self, $c) = @_; my ($self, $c) = @_;
requirePost($c);
$c->{stash}->{json}->{jobsetsTriggered} = []; $c->{stash}->{json}->{jobsetsTriggered} = [];
my $force = exists $c->request->query_params->{force}; my $force = exists $c->request->query_params->{force};
@ -246,19 +248,24 @@ sub push : Chained('api') PathPart('push') Args(0) {
foreach my $s (@jobsets) { foreach my $s (@jobsets) {
my ($p, $j) = parseJobsetName($s); my ($p, $j) = parseJobsetName($s);
my $jobset = $c->model('DB::Jobsets')->find($p, $j); my $jobset = $c->model('DB::Jobsets')->find($p, $j);
requireEvalJobsetPrivileges($c, $jobset->project);
next unless defined $jobset && ($force || ($jobset->project->enabled && $jobset->enabled)); next unless defined $jobset && ($force || ($jobset->project->enabled && $jobset->enabled));
triggerJobset($self, $c, $jobset, $force); triggerJobset($self, $c, $jobset, $force);
} }
my @repos = split /,/, ($c->request->query_params->{repos} // ""); my @repos = split /,/, ($c->request->query_params->{repos} // "");
foreach my $r (@repos) { foreach my $r (@repos) {
triggerJobset($self, $c, $_, $force) foreach $c->model('DB::Jobsets')->search( my @jobsets = $c->model('DB::Jobsets')->search(
{ 'project.enabled' => 1, 'me.enabled' => 1 }, { 'project.enabled' => 1, 'me.enabled' => 1 },
{ {
join => 'project', join => 'project',
where => \ [ 'exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value = ?)', [ 'value', $r ] ], where => \ [ 'exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value = ?)', [ 'value', $r ] ],
order_by => 'me.id DESC' order_by => 'me.id DESC'
}); });
foreach my $jobset (@jobsets) {
requireEvalJobsetPrivileges($c, $jobset->project);
triggerJobset($self, $c, $jobset, $force)
}
} }
$self->status_ok( $self->status_ok(
@ -285,6 +292,23 @@ sub push_github : Chained('api') PathPart('push-github') Args(0) {
$c->response->body(""); $c->response->body("");
} }
sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) {
my ($self, $c) = @_;
$c->{stash}->{json}->{jobsetsTriggered} = [];
my $in = $c->request->{data};
my $url = $in->{repository}->{clone_url} or die;
$url =~ s/.git$//;
print STDERR "got push from Gitea repository $url\n";
triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search(
{ 'project.enabled' => 1, 'me.enabled' => 1 },
{ join => 'project'
, where => \ [ 'me.flake like ? or exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value like ?)', [ 'flake', "%$url%"], [ 'value', "%$url%" ] ]
});
$c->response->body("");
}
1; 1;

View File

@ -35,6 +35,7 @@ sub noLoginNeeded {
return $whitelisted || return $whitelisted ||
$c->request->path eq "api/push-github" || $c->request->path eq "api/push-github" ||
$c->request->path eq "api/push-gitea" ||
$c->request->path eq "google-login" || $c->request->path eq "google-login" ||
$c->request->path eq "github-redirect" || $c->request->path eq "github-redirect" ||
$c->request->path eq "github-login" || $c->request->path eq "github-login" ||
@ -50,6 +51,7 @@ sub begin :Private {
$c->stash->{curUri} = $c->request->uri; $c->stash->{curUri} = $c->request->uri;
$c->stash->{version} = $ENV{"HYDRA_RELEASE"} || "<devel>"; $c->stash->{version} = $ENV{"HYDRA_RELEASE"} || "<devel>";
$c->stash->{nixVersion} = $ENV{"NIX_RELEASE"} || "<devel>"; $c->stash->{nixVersion} = $ENV{"NIX_RELEASE"} || "<devel>";
$c->stash->{nixEvalJobsVersion} = $ENV{"NIX_EVAL_JOBS_RELEASE"} || "<devel>";
$c->stash->{curTime} = time; $c->stash->{curTime} = time;
$c->stash->{logo} = defined $c->config->{hydra_logo} ? "/logo" : ""; $c->stash->{logo} = defined $c->config->{hydra_logo} ? "/logo" : "";
$c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : ""; $c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : "";
@ -80,7 +82,7 @@ sub begin :Private {
$_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins}; $_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins};
# XSRF protection: require POST requests to have the same origin. # XSRF protection: require POST requests to have the same origin.
if ($c->req->method eq "POST" && $c->req->path ne "api/push-github") { if ($c->req->method eq "POST" && $c->req->path ne "api/push-github" && $c->req->path ne "api/push-gitea") {
my $referer = $c->req->header('Referer'); my $referer = $c->req->header('Referer');
$referer //= $c->req->header('Origin'); $referer //= $c->req->header('Origin');
my $base = $c->req->base; my $base = $c->req->base;
@ -329,7 +331,7 @@ sub nar :Local :Args(1) {
else { else {
$path = $Nix::Config::storeDir . "/$path"; $path = $Nix::Config::storeDir . "/$path";
gone($c, "Path " . $path . " is no longer available.") unless isValidPath($path); gone($c, "Path " . $path . " is no longer available.") unless $MACHINE_LOCAL_STORE->isValidPath($path);
$c->stash->{current_view} = 'NixNAR'; $c->stash->{current_view} = 'NixNAR';
$c->stash->{storePath} = $path; $c->stash->{storePath} = $path;

View File

@ -15,6 +15,7 @@ our @EXPORT = qw(
forceLogin requireUser requireProjectOwner requireRestartPrivileges requireAdmin requirePost isAdmin isProjectOwner forceLogin requireUser requireProjectOwner requireRestartPrivileges requireAdmin requirePost isAdmin isProjectOwner
requireBumpPrivileges requireBumpPrivileges
requireCancelBuildPrivileges requireCancelBuildPrivileges
requireEvalJobsetPrivileges
trim trim
getLatestFinishedEval getFirstEval getLatestFinishedEval getFirstEval
paramToList paramToList
@ -186,6 +187,27 @@ sub isProjectOwner {
defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username })); defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username }));
} }
sub hasEvalJobsetRole {
my ($c) = @_;
return $c->user_exists && $c->check_user_roles("eval-jobset");
}
sub mayEvalJobset {
my ($c, $project) = @_;
return
$c->user_exists &&
(isAdmin($c) ||
hasEvalJobsetRole($c) ||
isProjectOwner($c, $project));
}
sub requireEvalJobsetPrivileges {
my ($c, $project) = @_;
requireUser($c);
accessDenied($c, "Only the project members, administrators, and accounts with eval-jobset privileges can perform this operation.")
unless mayEvalJobset($c, $project);
}
sub hasCancelBuildRole { sub hasCancelBuildRole {
my ($c) = @_; my ($c) = @_;
return $c->user_exists && $c->check_user_roles('cancel-build'); return $c->user_exists && $c->check_user_roles('cancel-build');
@ -272,7 +294,7 @@ sub requireAdmin {
sub requirePost { sub requirePost {
my ($c) = @_; my ($c) = @_;
error($c, "Request must be POSTed.") if $c->request->method ne "POST"; error($c, "Request must be POSTed.", 405) if $c->request->method ne "POST";
} }

View File

@ -174,6 +174,9 @@ sub getDrvLogPath {
for ($fn . $bucketed, $fn . $bucketed . ".bz2") { for ($fn . $bucketed, $fn . $bucketed . ".bz2") {
return $_ if -f $_; return $_ if -f $_;
} }
for ($fn . $bucketed, $fn . $bucketed . ".zst") {
return $_ if -f $_;
}
return undef; return undef;
} }

View File

@ -9,11 +9,24 @@ use Hydra::Helper::CatalystUtils;
sub stepFinished { sub stepFinished {
my ($self, $step, $logPath) = @_; my ($self, $step, $logPath) = @_;
my $doCompress = $self->{config}->{'compress_build_logs'} // "1"; my $doCompress = $self->{config}->{'compress_build_logs'} // '1';
my $silent = $self->{config}->{'compress_build_logs_silent'} // '0';
my $compression = $self->{config}->{'compress_build_logs_compression'} // 'bzip2';
if ($doCompress eq "1" && -e $logPath) { if (not -e $logPath or $doCompress ne "1") {
print STDERR "compressing $logPath...\n"; return;
system("bzip2", "--force", $logPath); }
if ($silent ne '1') {
print STDERR "compressing '$logPath' with $compression...\n";
}
if ($compression eq 'bzip2') {
system('bzip2', '--force', $logPath);
} elsif ($compression eq 'zstd') {
system('zstd', '--rm', '--quiet', '-T0', $logPath);
} else {
print STDERR "unknown compression type '$compression'\n";
} }
} }

View File

@ -14,6 +14,7 @@ use Nix::Config;
use Nix::Store; use Nix::Store;
use Hydra::Model::DB; use Hydra::Model::DB;
use Hydra::Helper::CatalystUtils; use Hydra::Helper::CatalystUtils;
use Hydra::Helper::Nix;
sub isEnabled { sub isEnabled {
my ($self) = @_; my ($self) = @_;
@ -92,7 +93,7 @@ sub buildFinished {
my $hash = substr basename($path), 0, 32; my $hash = substr basename($path), 0, 32;
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0); my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0);
my $system; my $system;
if (defined $deriver and isValidPath($deriver)) { if (defined $deriver and $MACHINE_LOCAL_STORE->isValidPath($deriver)) {
$system = derivationFromPath($deriver)->{platform}; $system = derivationFromPath($deriver)->{platform};
} }
foreach my $reference (@{$refs}) { foreach my $reference (@{$refs}) {

View File

@ -46,7 +46,7 @@ sub fetchInput {
$MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput; $MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput;
if (defined $cachedInput && isValidPath($cachedInput->storepath)) { if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath; $storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash; $sha256 = $cachedInput->sha256hash;
} else { } else {

View File

@ -6,6 +6,8 @@ use File::Basename;
use Hydra::Helper::CatalystUtils; use Hydra::Helper::CatalystUtils;
use MIME::Base64; use MIME::Base64;
use Nix::Manifest; use Nix::Manifest;
use Nix::Store;
use Nix::Utils;
use Hydra::Helper::Nix; use Hydra::Helper::Nix;
use base qw/Catalyst::View/; use base qw/Catalyst::View/;

View File

@ -16,7 +16,10 @@ sub process {
my $tail = int($c->stash->{tail} // "0"); my $tail = int($c->stash->{tail} // "0");
if ($logPath =~ /\.bz2$/) { if ($logPath =~ /\.zst$/) {
my $doTail = $tail ? "| tail -n '$tail'" : "";
open($fh, "-|", "zstd -dc < '$logPath' $doTail") or die;
} elsif ($logPath =~ /\.bz2$/) {
my $doTail = $tail ? "| tail -n '$tail'" : ""; my $doTail = $tail ? "| tail -n '$tail'" : "";
open($fh, "-|", "bzip2 -dc < '$logPath' $doTail") or die; open($fh, "-|", "bzip2 -dc < '$logPath' $doTail") or die;
} else { } else {

View File

@ -1,22 +0,0 @@
PERL_MODULES = \
$(wildcard *.pm) \
$(wildcard Hydra/*.pm) \
$(wildcard Hydra/Helper/*.pm) \
$(wildcard Hydra/Model/*.pm) \
$(wildcard Hydra/View/*.pm) \
$(wildcard Hydra/Schema/*.pm) \
$(wildcard Hydra/Schema/Result/*.pm) \
$(wildcard Hydra/Schema/ResultSet/*.pm) \
$(wildcard Hydra/Controller/*.pm) \
$(wildcard Hydra/Base/*.pm) \
$(wildcard Hydra/Base/Controller/*.pm) \
$(wildcard Hydra/Script/*.pm) \
$(wildcard Hydra/Component/*.pm) \
$(wildcard Hydra/Event/*.pm) \
$(wildcard Hydra/Plugin/*.pm)
EXTRA_DIST = \
$(PERL_MODULES)
hydradir = $(libexecdir)/hydra/lib
nobase_hydra_DATA = $(PERL_MODULES)

5
src/libhydra/meson.build Normal file
View File

@ -0,0 +1,5 @@
libhydra_inc = include_directories('.')
libhydra_dep = declare_dependency(
include_directories: [libhydra_inc],
)

85
src/meson.build Normal file
View File

@ -0,0 +1,85 @@
# Native code
subdir('libhydra')
subdir('hydra-evaluator')
subdir('hydra-queue-runner')
hydra_libexecdir = get_option('libexecdir') / 'hydra'
# Data and interpreted
foreach dir : ['lib', 'root']
install_subdir(dir,
install_dir: hydra_libexecdir,
)
endforeach
subdir('sql')
subdir('ttf')
# Static files for website
hydra_libexecdir_static = hydra_libexecdir / 'root' / 'static'
## Bootstrap
bootstrap_name = 'bootstrap-4.3.1-dist'
bootstrap = custom_target(
'extract-bootstrap',
input: 'root' / (bootstrap_name + '.zip'),
output: bootstrap_name,
command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'],
)
custom_target(
'name-bootstrap',
input: bootstrap,
output: 'bootstrap',
command: ['cp', '-r', '@INPUT@' , '@OUTPUT@'],
install: true,
install_dir: hydra_libexecdir_static,
)
## Flot
custom_target(
'extract-flot',
input: 'root' / 'flot-0.8.3.zip',
output: 'flot',
command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'],
install: true,
install_dir: hydra_libexecdir_static / 'js',
)
## Fontawesome
fontawesome_name = 'fontawesome-free-5.10.2-web'
fontawesome = custom_target(
'extract-fontawesome',
input: 'root' / (fontawesome_name + '.zip'),
output: fontawesome_name,
command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'],
)
custom_target(
'name-fontawesome-css',
input: fontawesome,
output: 'css',
command: ['cp', '-r', '@INPUT@/css', '@OUTPUT@'],
install: true,
install_dir: hydra_libexecdir_static / 'fontawesome',
)
custom_target(
'name-fontawesome-webfonts',
input: fontawesome,
output: 'webfonts',
command: ['cp', '-r', '@INPUT@/webfonts', '@OUTPUT@'],
install: true,
install_dir: hydra_libexecdir_static / 'fontawesome',
)
# Scripts
install_subdir('script',
install_dir: get_option('bindir'),
exclude_files: [
'hydra-dev-server',
],
install_mode: 'rwxr-xr-x',
strip_directory: true,
)

View File

@ -1,39 +0,0 @@
TEMPLATES = $(wildcard *.tt)
STATIC = \
$(wildcard static/images/*) \
$(wildcard static/css/*) \
static/js/bootbox.min.js \
static/js/popper.min.js \
static/js/common.js \
static/js/jquery/jquery-3.4.1.min.js \
static/js/jquery/jquery-ui-1.10.4.min.js
FLOT = flot-0.8.3.zip
BOOTSTRAP = bootstrap-4.3.1-dist.zip
FONTAWESOME = fontawesome-free-5.10.2-web.zip
ZIPS = $(FLOT) $(BOOTSTRAP) $(FONTAWESOME)
EXTRA_DIST = $(TEMPLATES) $(STATIC) $(ZIPS)
hydradir = $(libexecdir)/hydra/root
nobase_hydra_DATA = $(EXTRA_DIST)
all:
mkdir -p $(srcdir)/static/js
unzip -u -d $(srcdir)/static $(BOOTSTRAP)
rm -rf $(srcdir)/static/bootstrap
mv $(srcdir)/static/$(basename $(BOOTSTRAP)) $(srcdir)/static/bootstrap
unzip -u -d $(srcdir)/static/js $(FLOT)
unzip -u -d $(srcdir)/static $(FONTAWESOME)
rm -rf $(srcdir)/static/fontawesome
mv $(srcdir)/static/$(basename $(FONTAWESOME)) $(srcdir)/static/fontawesome
install-data-local: $(ZIPS)
mkdir -p $(hydradir)/static/js
cp -prvd $(srcdir)/static/js/* $(hydradir)/static/js
mkdir -p $(hydradir)/static/bootstrap
cp -prvd $(srcdir)/static/bootstrap/* $(hydradir)/static/bootstrap
mkdir -p $(hydradir)/static/fontawesome/{css,webfonts}
cp -prvd $(srcdir)/static/fontawesome/css/* $(hydradir)/static/fontawesome/css
cp -prvd $(srcdir)/static/fontawesome/webfonts/* $(hydradir)/static/fontawesome/webfonts

View File

@ -374,7 +374,7 @@ BLOCK renderInputDiff; %]
[% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %] [% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %]
[% IF bi1.type == "git" %] [% IF bi1.type == "git" %]
<tr><td> <tr><td>
<b>[% bi1.name %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 8) _ ' to ' _ bi2.revision.substr(0, 8)) %]</tt> <b>[% bi1.name %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %]</tt>
</td></tr> </td></tr>
[% ELSE %] [% ELSE %]
<tr><td> <tr><td>

View File

@ -205,6 +205,7 @@
if (!c) return; if (!c) return;
requestJSON({ requestJSON({
url: "[% HTML.escape(c.uri_for('/api/push', { jobsets = project.name _ ':' _ jobset.name, force = "1" })) %]", url: "[% HTML.escape(c.uri_for('/api/push', { jobsets = project.name _ ':' _ jobset.name, force = "1" })) %]",
type: 'POST',
success: function(data) { success: function(data) {
bootbox.alert("The jobset has been scheduled for evaluation."); bootbox.alert("The jobset has been scheduled for evaluation.");
} }

View File

@ -93,7 +93,7 @@
<footer class="navbar"> <footer class="navbar">
<hr /> <hr />
<small> <small>
<em><a href="http://nixos.org/hydra" target="_blank" class="squiggle">Hydra</a> [% HTML.escape(version) %] (using [% HTML.escape(nixVersion) %]).</em> <em><a href="http://nixos.org/hydra" target="_blank" class="squiggle">Hydra</a> [% HTML.escape(version) %] (using [% HTML.escape(nixVersion) %] and [% HTML.escape(nixEvalJobsVersion) %]).</em>
[% IF c.user_exists %] [% IF c.user_exists %]
You are signed in as <tt>[% HTML.escape(c.user.username) %]</tt> You are signed in as <tt>[% HTML.escape(c.user.username) %]</tt>
[%- IF c.user.type == 'google' %] via Google[% END %]. [%- IF c.user.type == 'google' %] via Google[% END %].

View File

@ -7,7 +7,7 @@ main() {
set -e set -e
tmpDir=${TMPDIR:-/tmp}/build-[% build.id +%] tmpDir=$(realpath "${TMPDIR:-/tmp}")/build-[% build.id +%]
declare -a args extraArgs declare -a args extraArgs

View File

@ -91,6 +91,7 @@
[% INCLUDE roleoption mutable=mutable role="restart-jobs" %] [% INCLUDE roleoption mutable=mutable role="restart-jobs" %]
[% INCLUDE roleoption mutable=mutable role="bump-to-front" %] [% INCLUDE roleoption mutable=mutable role="bump-to-front" %]
[% INCLUDE roleoption mutable=mutable role="cancel-build" %] [% INCLUDE roleoption mutable=mutable role="cancel-build" %]
[% INCLUDE roleoption mutable=mutable role="eval-jobset" %]
</p> </p>
</div> </div>
</div> </div>

View File

@ -1,19 +0,0 @@
EXTRA_DIST = \
$(distributable_scripts)
distributable_scripts = \
hydra-backfill-ids \
hydra-init \
hydra-eval-jobset \
hydra-server \
hydra-update-gc-roots \
hydra-s3-backup-collect-garbage \
hydra-create-user \
hydra-notify \
hydra-send-stats \
nix-prefetch-git \
nix-prefetch-bzr \
nix-prefetch-hg
bin_SCRIPTS = \
$(distributable_scripts)

View File

@ -17,6 +17,7 @@ use Hydra::Helper::Nix;
use Hydra::Model::DB; use Hydra::Model::DB;
use Hydra::Plugin; use Hydra::Plugin;
use Hydra::Schema; use Hydra::Schema;
use IPC::Run;
use JSON::MaybeXS; use JSON::MaybeXS;
use Net::Statsd; use Net::Statsd;
use Nix::Store; use Nix::Store;
@ -357,22 +358,32 @@ sub evalJobs {
my @cmd; my @cmd;
if (defined $flakeRef) { if (defined $flakeRef) {
@cmd = ("hydra-eval-jobs", my $nix_expr =
"--flake", $flakeRef, "let " .
"--gc-roots-dir", getGCRootsDir, "flake = builtins.getFlake (toString \"$flakeRef\"); " .
"--max-jobs", 1); "in " .
"flake.hydraJobs " .
"or flake.checks " .
"or (throw \"flake '$flakeRef' does not provide any Hydra jobs or checks\")";
@cmd = ("nix-eval-jobs", "--expr", $nix_expr);
} else { } else {
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0] my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
or die "cannot find the input containing the job expression\n"; or die "cannot find the input containing the job expression\n";
@cmd = ("hydra-eval-jobs", @cmd = ("nix-eval-jobs",
"<" . $nixExprInputName . "/" . $nixExprPath . ">", "<" . $nixExprInputName . "/" . $nixExprPath . ">",
"--gc-roots-dir", getGCRootsDir,
"--max-jobs", 1,
inputsToArgs($inputInfo)); inputsToArgs($inputInfo));
} }
push @cmd, "--no-allow-import-from-derivation" if $config->{allow_import_from_derivation} // "true" ne "true"; push @cmd, ("--gc-roots-dir", getGCRootsDir);
push @cmd, ("--max-jobs", 1);
push @cmd, "--meta";
push @cmd, "--constituents";
push @cmd, "--force-recurse";
push @cmd, ("--option", "allow-import-from-derivation", "false") if $config->{allow_import_from_derivation} // "true" ne "true";
push @cmd, ("--workers", $config->{evaluator_workers} // 1);
push @cmd, ("--max-memory-size", $config->{evaluator_max_memory_size} // 4096);
if (defined $ENV{'HYDRA_DEBUG'}) { if (defined $ENV{'HYDRA_DEBUG'}) {
sub escape { sub escape {
@ -384,14 +395,40 @@ sub evalJobs {
print STDERR "evaluator: @escaped\n"; print STDERR "evaluator: @escaped\n";
} }
(my $res, my $jobsJSON, my $stderr) = captureStdoutStderr(21600, @cmd); my $evalProc = IPC::Run::start \@cmd,
die "hydra-eval-jobs returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8)) '>', IPC::Run::new_chunker, \my $out,
. ":\n" . ($stderr ? decode("utf-8", $stderr) : "(no output)\n") '2>', \my $err;
if $res;
print STDERR "$stderr"; return sub {
while (1) {
$evalProc->pump;
if (!defined $out && !defined $err) {
$evalProc->finish;
if ($?) {
die "nix-eval-jobs returned " . ($? & 127 ? "signal $?" : "exit code " . ($? >> 8)) . "\n";
}
return;
}
return decode_json($jobsJSON); if (defined $err) {
print STDERR "$err";
undef $err;
}
if (defined $out && $out ne '') {
my $job;
try {
$job = decode_json($out);
} catch {
warn "nix-eval-jobs sent invalid JSON.\n parse error: $_\n invalid json: $out\n";
};
undef $out;
if (defined $job) {
return $job;
}
}
}
};
} }
@ -420,7 +457,7 @@ sub checkBuild {
my $firstOutputName = $outputNames[0]; my $firstOutputName = $outputNames[0];
my $firstOutputPath = $buildInfo->{outputs}->{$firstOutputName}; my $firstOutputPath = $buildInfo->{outputs}->{$firstOutputName};
my $jobName = $buildInfo->{jobName} or die; my $jobName = $buildInfo->{attr} or die;
my $drvPath = $buildInfo->{drvPath} or die; my $drvPath = $buildInfo->{drvPath} or die;
my $build; my $build;
@ -474,9 +511,30 @@ sub checkBuild {
my $time = time(); my $time = time();
sub null { sub getMeta {
my ($s) = @_; my ($s, $def) = @_;
return $s eq "" ? undef : $s; return ($s || "") eq "" ? $def : $s;
}
sub getMetaStrings {
my ($v, $k, $acc) = @_;
my $t = ref $v;
if ($t eq 'HASH') {
push @$acc, $v->{$k} if exists $v->{$k};
} elsif ($t eq 'ARRAY') {
getMetaStrings($_, $k, $acc) foreach @$v;
} elsif (defined $v) {
push @$acc, $v;
}
}
sub getMetaConcatStrings {
my ($v, $k) = @_;
my @strings;
getMetaStrings($v, $k, \@strings);
return join(", ", @strings) || undef;
} }
# Add the build to the database. # Add the build to the database.
@ -484,19 +542,19 @@ sub checkBuild {
{ timestamp => $time { timestamp => $time
, jobset_id => $jobset->id , jobset_id => $jobset->id
, job => $jobName , job => $jobName
, description => null($buildInfo->{description}) , description => getMeta($buildInfo->{meta}->{description}, undef)
, license => null($buildInfo->{license}) , license => getMetaConcatStrings($buildInfo->{meta}->{license}, "shortName")
, homepage => null($buildInfo->{homepage}) , homepage => getMeta($buildInfo->{meta}->{homepage}, undef)
, maintainers => null($buildInfo->{maintainers}) , maintainers => getMetaConcatStrings($buildInfo->{meta}->{maintainers}, "email")
, maxsilent => $buildInfo->{maxSilent} , maxsilent => getMeta($buildInfo->{meta}->{maxSilent}, 7200)
, timeout => $buildInfo->{timeout} , timeout => getMeta($buildInfo->{meta}->{timeout}, 36000)
, nixname => $buildInfo->{nixName} , nixname => $buildInfo->{name}
, drvpath => $drvPath , drvpath => $drvPath
, system => $buildInfo->{system} , system => $buildInfo->{system}
, priority => $buildInfo->{schedulingPriority} , priority => getMeta($buildInfo->{meta}->{schedulingPriority}, 100)
, finished => 0 , finished => 0
, iscurrent => 1 , iscurrent => 1
, ischannel => $buildInfo->{isChannel} , ischannel => getMeta($buildInfo->{meta}->{isChannel}, 0)
}); });
$build->buildoutputs->create({ name => $_, path => $buildInfo->{outputs}->{$_} }) $build->buildoutputs->create({ name => $_, path => $buildInfo->{outputs}->{$_} })
@ -665,7 +723,7 @@ sub checkJobsetWrapped {
return; return;
} }
# Hash the arguments to hydra-eval-jobs and check the # Hash the arguments to nix-eval-jobs and check the
# JobsetInputHashes to see if the previous evaluation had the same # JobsetInputHashes to see if the previous evaluation had the same
# inputs. If so, bail out. # inputs. If so, bail out.
my @args = ($jobset->nixexprinput // "", $jobset->nixexprpath // "", inputsToArgs($inputInfo)); my @args = ($jobset->nixexprinput // "", $jobset->nixexprpath // "", inputsToArgs($inputInfo));
@ -687,19 +745,12 @@ sub checkJobsetWrapped {
# Evaluate the job expression. # Evaluate the job expression.
my $evalStart = clock_gettime(CLOCK_MONOTONIC); my $evalStart = clock_gettime(CLOCK_MONOTONIC);
my $jobs = evalJobs($project->name . ":" . $jobset->name, $inputInfo, $jobset->nixexprinput, $jobset->nixexprpath, $flakeRef); my $evalStop;
my $evalStop = clock_gettime(CLOCK_MONOTONIC); my $jobsIter = evalJobs($project->name . ":" . $jobset->name, $inputInfo, $jobset->nixexprinput, $jobset->nixexprpath, $flakeRef);
if ($jobsetsJobset) {
my @keys = keys %$jobs;
die "The .jobsets jobset must only have a single job named 'jobsets'"
unless (scalar @keys) == 1 && $keys[0] eq "jobsets";
}
Net::Statsd::timing("hydra.evaluator.eval_time", int(($evalStop - $evalStart) * 1000));
if ($dryRun) { if ($dryRun) {
foreach my $name (keys %{$jobs}) { while (defined(my $job = $jobsIter->())) {
my $job = $jobs->{$name}; my $name = $job->{attr};
if (defined $job->{drvPath}) { if (defined $job->{drvPath}) {
print STDERR "good job $name: $job->{drvPath}\n"; print STDERR "good job $name: $job->{drvPath}\n";
} else { } else {
@ -709,36 +760,20 @@ sub checkJobsetWrapped {
return; return;
} }
die "Jobset contains a job with an empty name. Make sure the jobset evaluates to an attrset of jobs.\n"
if defined $jobs->{""};
$jobs->{$_}->{jobName} = $_ for keys %{$jobs};
my $jobOutPathMap = {};
my $jobsetChanged = 0;
my $dbStart = clock_gettime(CLOCK_MONOTONIC);
# Store the error messages for jobs that failed to evaluate. # Store the error messages for jobs that failed to evaluate.
my $evaluationErrorTime = time; my $evaluationErrorTime = time;
my $evaluationErrorMsg = ""; my $evaluationErrorMsg = "";
foreach my $job (values %{$jobs}) {
next unless defined $job->{error};
$evaluationErrorMsg .=
($job->{jobName} ne "" ? "in job $job->{jobName}" : "at top-level") .
":\n" . $job->{error} . "\n\n";
}
setJobsetError($jobset, $evaluationErrorMsg, $evaluationErrorTime);
my $evaluationErrorRecord = $db->resultset('EvaluationErrors')->create( my $evaluationErrorRecord = $db->resultset('EvaluationErrors')->create(
{ errormsg => $evaluationErrorMsg { errormsg => $evaluationErrorMsg
, errortime => $evaluationErrorTime , errortime => $evaluationErrorTime
} }
); );
my $jobOutPathMap = {};
my $jobsetChanged = 0;
my %buildMap; my %buildMap;
$db->txn_do(sub {
$db->txn_do(sub {
my $prevEval = getPrevJobsetEval($db, $jobset, 1); my $prevEval = getPrevJobsetEval($db, $jobset, 1);
# Clear the "current" flag on all builds. Since we're in a # Clear the "current" flag on all builds. Since we're in a
@ -751,7 +786,7 @@ sub checkJobsetWrapped {
, evaluationerror => $evaluationErrorRecord , evaluationerror => $evaluationErrorRecord
, timestamp => time , timestamp => time
, checkouttime => abs(int($checkoutStop - $checkoutStart)) , checkouttime => abs(int($checkoutStop - $checkoutStart))
, evaltime => abs(int($evalStop - $evalStart)) , evaltime => 0
, hasnewbuilds => 0 , hasnewbuilds => 0
, nrbuilds => 0 , nrbuilds => 0
, flake => $flakeRef , flake => $flakeRef
@ -759,11 +794,24 @@ sub checkJobsetWrapped {
, nixexprpath => $jobset->nixexprpath , nixexprpath => $jobset->nixexprpath
}); });
# Schedule each successfully evaluated job. my @jobsWithConstituents;
foreach my $job (permute(values %{$jobs})) {
next if defined $job->{error}; while (defined(my $job = $jobsIter->())) {
#print STDERR "considering job " . $project->name, ":", $jobset->name, ":", $job->{jobName} . "\n"; if ($jobsetsJobset) {
checkBuild($db, $jobset, $ev, $inputInfo, $job, \%buildMap, $prevEval, $jobOutPathMap, $plugins); die "The .jobsets jobset must only have a single job named 'jobsets'"
unless $job->{attr} eq "jobsets";
}
$evaluationErrorMsg .=
($job->{attr} ne "" ? "in job $job->{attr}" : "at top-level") .
":\n" . $job->{error} . "\n\n" if defined $job->{error};
checkBuild($db, $jobset, $ev, $inputInfo, $job, \%buildMap, $prevEval, $jobOutPathMap, $plugins)
unless defined $job->{error};
if (defined $job->{constituents}) {
push @jobsWithConstituents, $job;
}
} }
# Have any builds been added or removed since last time? # Have any builds been added or removed since last time?
@ -801,21 +849,20 @@ sub checkJobsetWrapped {
$drvPathToId{$x->{drvPath}} = $x; $drvPathToId{$x->{drvPath}} = $x;
} }
foreach my $job (values %{$jobs}) { foreach my $job (values @jobsWithConstituents) {
next unless $job->{constituents}; next unless defined $job->{constituents};
if (defined $job->{error}) { if (defined $job->{error}) {
die "aggregate job $job->{jobName} failed with the error: $job->{error}\n"; die "aggregate job $job->{attr} failed with the error: $job->{error}\n";
} }
my $x = $drvPathToId{$job->{drvPath}} or my $x = $drvPathToId{$job->{drvPath}} or
die "aggregate job $job->{jobName} has no corresponding build record.\n"; die "aggregate job $job->{attr} has no corresponding build record.\n";
foreach my $drvPath (@{$job->{constituents}}) { foreach my $drvPath (@{$job->{constituents}}) {
my $constituent = $drvPathToId{$drvPath}; my $constituent = $drvPathToId{$drvPath};
if (defined $constituent) { if (defined $constituent) {
$db->resultset('AggregateConstituents')->update_or_create({aggregate => $x->{id}, constituent => $constituent->{id}}); $db->resultset('AggregateConstituents')->update_or_create({aggregate => $x->{id}, constituent => $constituent->{id}});
} else { } else {
warn "aggregate job $job->{jobName} has a constituent $drvPath that doesn't correspond to a Hydra build\n"; warn "aggregate job $job->{attr} has a constituent $drvPath that doesn't correspond to a Hydra build\n";
} }
} }
} }
@ -857,11 +904,15 @@ sub checkJobsetWrapped {
$jobset->update({ enabled => 0 }) if $jobset->enabled == 2; $jobset->update({ enabled => 0 }) if $jobset->enabled == 2;
$jobset->update({ lastcheckedtime => time, forceeval => undef }); $jobset->update({ lastcheckedtime => time, forceeval => undef });
$evaluationErrorRecord->update({ errormsg => $evaluationErrorMsg });
setJobsetError($jobset, $evaluationErrorMsg, $evaluationErrorTime);
$evalStop = clock_gettime(CLOCK_MONOTONIC);
$ev->update({ evaltime => abs(int($evalStop - $evalStart)) });
}); });
my $dbStop = clock_gettime(CLOCK_MONOTONIC); Net::Statsd::timing("hydra.evaluator.eval_time", int(($evalStop - $evalStart) * 1000));
Net::Statsd::timing("hydra.evaluator.db_time", int(($dbStop - $dbStart) * 1000));
Net::Statsd::increment("hydra.evaluator.evals"); Net::Statsd::increment("hydra.evaluator.evals");
Net::Statsd::increment("hydra.evaluator.cached_evals") unless $jobsetChanged; Net::Statsd::increment("hydra.evaluator.cached_evals") unless $jobsetChanged;
} }

View File

@ -78,7 +78,7 @@ fi
init_remote(){ init_remote(){
local url=$1; local url=$1;
git init; git init --initial-branch=trunk;
git remote add origin $url; git remote add origin $url;
} }

View File

@ -1,9 +0,0 @@
sqldir = $(libexecdir)/hydra/sql
nobase_dist_sql_DATA = \
hydra.sql \
test.sql \
upgrade-*.sql \
update-dbix.pl
update-dbix: hydra.sql
./update-dbix-harness.sh

90
src/sql/meson.build Normal file
View File

@ -0,0 +1,90 @@
sql_files = files(
'hydra.sql',
'test.sql',
'update-dbix.pl',
'upgrade-2.sql',
'upgrade-3.sql',
'upgrade-4.sql',
'upgrade-5.sql',
'upgrade-6.sql',
'upgrade-7.sql',
'upgrade-8.sql',
'upgrade-9.sql',
'upgrade-10.sql',
'upgrade-11.sql',
'upgrade-12.sql',
'upgrade-13.sql',
'upgrade-14.sql',
'upgrade-15.sql',
'upgrade-16.sql',
'upgrade-17.sql',
'upgrade-18.sql',
'upgrade-19.sql',
'upgrade-20.sql',
'upgrade-21.sql',
'upgrade-22.sql',
'upgrade-23.sql',
'upgrade-24.sql',
'upgrade-25.sql',
'upgrade-26.sql',
'upgrade-27.sql',
'upgrade-28.sql',
'upgrade-29.sql',
'upgrade-30.sql',
'upgrade-31.sql',
'upgrade-32.sql',
'upgrade-33.sql',
'upgrade-34.sql',
'upgrade-35.sql',
'upgrade-36.sql',
'upgrade-37.sql',
'upgrade-38.sql',
'upgrade-39.sql',
'upgrade-40.sql',
'upgrade-41.sql',
'upgrade-42.sql',
'upgrade-43.sql',
'upgrade-44.sql',
'upgrade-45.sql',
'upgrade-46.sql',
'upgrade-47.sql',
'upgrade-48.sql',
'upgrade-49.sql',
'upgrade-50.sql',
'upgrade-51.sql',
'upgrade-52.sql',
'upgrade-53.sql',
'upgrade-54.sql',
'upgrade-55.sql',
'upgrade-56.sql',
'upgrade-57.sql',
'upgrade-58.sql',
'upgrade-59.sql',
'upgrade-60.sql',
'upgrade-61.sql',
'upgrade-62.sql',
'upgrade-63.sql',
'upgrade-64.sql',
'upgrade-65.sql',
'upgrade-66.sql',
'upgrade-67.sql',
'upgrade-68.sql',
'upgrade-69.sql',
'upgrade-70.sql',
'upgrade-71.sql',
'upgrade-72.sql',
'upgrade-73.sql',
'upgrade-74.sql',
'upgrade-75.sql',
'upgrade-76.sql',
'upgrade-77.sql',
'upgrade-78.sql',
'upgrade-79.sql',
'upgrade-80.sql',
'upgrade-81.sql',
'upgrade-82.sql',
'upgrade-83.sql',
'upgrade-84.sql',
)
install_data(sql_files, install_dir: hydra_libexecdir / 'sql')

View File

@ -1,4 +0,0 @@
EXTRA_DIST = COPYING.LIB StayPuft.ttf
ttfdir = $(libexecdir)/hydra/ttf
nobase_ttf_DATA = $(EXTRA_DIST)

5
src/ttf/meson.build Normal file
View File

@ -0,0 +1,5 @@
data_files = files(
'StayPuft.ttf',
'COPYING.LIB',
)
install_data(data_files, install_dir: hydra_libexecdir / 'ttf')

View File

@ -57,6 +57,7 @@ subtest "getLDAPConfig" => sub {
"hydra_cancel-build" => [ "cancel-build" ], "hydra_cancel-build" => [ "cancel-build" ],
"hydra_create-projects" => [ "create-projects" ], "hydra_create-projects" => [ "create-projects" ],
"hydra_restart-jobs" => [ "restart-jobs" ], "hydra_restart-jobs" => [ "restart-jobs" ],
"hydra_eval-jobset" => [ "eval-jobset" ],
} }
}, },
"The empty file and set env var make legacy mode active." "The empty file and set env var make legacy mode active."
@ -177,6 +178,7 @@ subtest "get_legacy_ldap_config" => sub {
"hydra_cancel-build" => [ "cancel-build" ], "hydra_cancel-build" => [ "cancel-build" ],
"hydra_create-projects" => [ "create-projects" ], "hydra_create-projects" => [ "create-projects" ],
"hydra_restart-jobs" => [ "restart-jobs" ], "hydra_restart-jobs" => [ "restart-jobs" ],
"hydra_eval-jobset" => [ "eval-jobset" ],
} }
}, },
"Legacy, default role maps are applied." "Legacy, default role maps are applied."

View File

@ -22,9 +22,24 @@ sub is_json {
} }
my $ctx = test_context(); my $ctx = test_context();
Catalyst::Test->import('Hydra'); Catalyst::Test->import('Hydra');
# Create a user to log in to
my $user = $ctx->db->resultset('Users')->create({ username => 'alice', emailaddress => 'alice@example.com', password => '!' });
$user->setPassword('foobar');
$user->userroles->update_or_create({ role => 'admin' });
# Login and save cookie for future requests
my $req = request(POST '/login',
Referer => 'http://localhost/',
Content => {
username => 'alice',
password => 'foobar'
}
);
is($req->code, 302, "The login redirects");
my $cookie = $req->header("set-cookie");
my $finishedBuilds = $ctx->makeAndEvaluateJobset( my $finishedBuilds = $ctx->makeAndEvaluateJobset(
expression => "one-job.nix", expression => "one-job.nix",
build => 1 build => 1
@ -109,7 +124,10 @@ subtest "/api/push" => sub {
my $jobsetName = $jobset->name; my $jobsetName = $jobset->name;
is($jobset->forceeval, undef, "The existing jobset is not set to be forced to eval"); is($jobset->forceeval, undef, "The existing jobset is not set to be forced to eval");
my $response = request(GET "/api/push?jobsets=$projectName:$jobsetName&force=1"); my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1",
Cookie => $cookie,
Referer => 'http://localhost/',
);
ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200.");
my $data = is_json($response); my $data = is_json($response);
@ -128,7 +146,10 @@ subtest "/api/push" => sub {
print STDERR $repo; print STDERR $repo;
my $response = request(GET "/api/push?repos=$repo&force=1"); my $response = request(POST "/api/push?repos=$repo&force=1",
Cookie => $cookie,
Referer => 'http://localhost/',
);
ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200.");
my $data = is_json($response); my $data = is_json($response);

View File

@ -54,13 +54,14 @@ subtest "/job/PROJECT/JOBSET/JOB/shield" => sub {
subtest "/job/PROJECT/JOBSET/JOB/prometheus" => sub { subtest "/job/PROJECT/JOBSET/JOB/prometheus" => sub {
my $response = request(GET '/job/' . $project->name . '/' . $jobset->name . '/' . $build->job . '/prometheus'); my $response = request(GET '/job/' . $project->name . '/' . $jobset->name . '/' . $build->job . '/prometheus');
ok($response->is_success, "The page showing the job's prometheus data returns 200.");
my $metrics = $response->content;
ok($metrics =~ m/hydra_job_failed\{.*\} 0/); ok($response->is_success, "The page showing the job's prometheus data returns 200.");
ok($metrics =~ m/hydra_job_completion_time\{.*\} [\d]+/);
ok($metrics =~ m/hydra_build_closure_size\{.*\} 96/); my $metrics = $response->content;
ok($metrics =~ m/hydra_build_output_size\{.*\} 96/); like($metrics, qr/hydra_job_failed\{.*\} 0/);
like($metrics, qr/hydra_job_completion_time\{.*\} [\d]+/);
like($metrics, qr/hydra_build_closure_size\{.*\} 96/);
like($metrics, qr/hydra_build_output_size\{.*\} 96/);
}; };
done_testing; done_testing;

View File

@ -186,7 +186,7 @@ subtest 'Update jobset "job" to have an invalid input type' => sub {
}) })
); );
ok(!$jobsetupdate->is_success); ok(!$jobsetupdate->is_success);
ok($jobsetupdate->content =~ m/Invalid input type.*valid types:/); like($jobsetupdate->content, qr/Invalid input type.*valid types:/);
}; };

View File

@ -24,7 +24,7 @@ my $cookie = $login->header("set-cookie");
my $my_jobs = request(GET '/dashboard/alice/my-jobs-tab', Accept => 'application/json', Cookie => $cookie); my $my_jobs = request(GET '/dashboard/alice/my-jobs-tab', Accept => 'application/json', Cookie => $cookie);
ok($my_jobs->is_success); ok($my_jobs->is_success);
my $content = $my_jobs->content(); my $content = $my_jobs->content();
ok($content =~ /empty_dir/); like($content, qr/empty_dir/);
ok(!($content =~ /fails/)); ok(!($content =~ /fails/));
ok(!($content =~ /succeed_with_failed/)); ok(!($content =~ /succeed_with_failed/));
done_testing; done_testing;

View File

@ -24,6 +24,7 @@ $ldap->add_group("hydra_create-projects", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_eval-jobset", $users->{"many_roles"}->{"username"});
my $hydra_ldap_config = "${\$ldap->tmpdir()}/hydra_ldap_config.yaml"; my $hydra_ldap_config = "${\$ldap->tmpdir()}/hydra_ldap_config.yaml";
LDAPContext::write_file($hydra_ldap_config, <<YAML); LDAPContext::write_file($hydra_ldap_config, <<YAML);
@ -68,7 +69,7 @@ subtest "Valid login attempts" => sub {
unrelated => [], unrelated => [],
admin => ["admin"], admin => ["admin"],
not_admin => [], not_admin => [],
many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build" ], many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build", "eval-jobset" ],
); );
for my $username (keys %users_to_roles) { for my $username (keys %users_to_roles) {
my $user = $users->{$username}; my $user = $users->{$username};

View File

@ -24,6 +24,7 @@ $ldap->add_group("hydra_create-projects", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"});
$ldap->add_group("hydra_eval-jobset", $users->{"many_roles"}->{"username"});
my $ctx = test_context( my $ctx = test_context(
@ -76,10 +77,12 @@ my $ctx = test_context(
hydra_cancel-build = cancel-build hydra_cancel-build = cancel-build
hydra_bump-to-front = bump-to-front hydra_bump-to-front = bump-to-front
hydra_restart-jobs = restart-jobs hydra_restart-jobs = restart-jobs
hydra_eval-jobset = eval-jobset
hydra_one_group_many_roles = create-projects hydra_one_group_many_roles = create-projects
hydra_one_group_many_roles = cancel-build hydra_one_group_many_roles = cancel-build
hydra_one_group_many_roles = bump-to-front hydra_one_group_many_roles = bump-to-front
hydra_one_group_many-roles = eval-jobset
</role_mapping> </role_mapping>
</ldap> </ldap>
CFG CFG
@ -92,7 +95,7 @@ subtest "Valid login attempts" => sub {
unrelated => [], unrelated => [],
admin => ["admin"], admin => ["admin"],
not_admin => [], not_admin => [],
many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build" ], many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build", "eval-jobset" ],
many_roles_one_group => [ "create-projects", "bump-to-front", "cancel-build" ], many_roles_one_group => [ "create-projects", "bump-to-front", "cancel-build" ],
); );
for my $username (keys %users_to_roles) { for my $username (keys %users_to_roles) {

View File

@ -1,39 +0,0 @@
TESTS_ENVIRONMENT = \
BZR_HOME="$(abs_builddir)/data" \
HYDRA_DBI="dbi:Pg:dbname=hydra-test-suite;port=6433" \
HYDRA_DATA="$(abs_builddir)/data" \
HYDRA_HOME="$(top_srcdir)/src" \
HYDRA_CONFIG= \
NIX_REMOTE= \
NIX_REMOTE_SYSTEMS= \
NIX_CONF_DIR="$(abs_builddir)/nix/etc/nix" \
NIX_STATE_DIR="$(abs_builddir)/nix/var/nix" \
NIX_STORE_DIR="$(abs_builddir)/nix/store" \
NIX_LOG_DIR="$(abs_builddir)/nix/var/log/nix" \
PGHOST=/tmp \
PERL5LIB="$(srcdir):$(abs_top_srcdir)/src/lib:$$PERL5LIB" \
PYTHONPATH= \
PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-eval-jobs:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \
perl -w
EXTRA_DIST = \
$(wildcard *.pm) \
$(wildcard jobs/*.nix) \
$(wildcard jobs/*.sh) \
$(TESTS)
TESTS = \
perlcritic.pl \
test.pl
check_SCRIPTS = repos
repos: dirs
dirs:
mkdir -p data
touch data/hydra.conf
mkdir -p nix
mkdir -p nix/etc/nix
mkdir -p nix/store
mkdir -p nix/var

View File

@ -115,7 +115,7 @@ subtest "evaluation" => sub {
my $build = decode_json(request_json({ uri => "/build/" . $evals->[0]->{builds}->[0] })->content()); my $build = decode_json(request_json({ uri => "/build/" . $evals->[0]->{builds}->[0] })->content());
is($build->{job}, "job", "The build's job name is job"); is($build->{job}, "job", "The build's job name is job");
is($build->{finished}, 0, "The build isn't finished yet"); is($build->{finished}, 0, "The build isn't finished yet");
ok($build->{buildoutputs}->{out}->{path} =~ /\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'"); like($build->{buildoutputs}->{out}->{path}, qr/\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'");
subtest "search" => sub { subtest "search" => sub {
my $search_project = decode_json(request_json({ uri => "/search/?query=sample" })->content()); my $search_project = decode_json(request_json({ uri => "/search/?query=sample" })->content());

View File

@ -18,14 +18,14 @@ isnt($res, 0, "hydra-eval-jobset exits non-zero");
ok(utf8::decode($stderr), "Stderr output is UTF8-clean"); ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
like( like(
$stderr, $stderr,
qr/aggregate job mixed_aggregate failed with the error: constituentA: does not exist/, qr/aggregate job mixed_aggregate failed with the error: "constituentA": does not exist/,
"The stderr record includes a relevant error message" "The stderr record includes a relevant error message"
); );
$jobset->discard_changes; # refresh from DB $jobset->discard_changes({ '+columns' => {'errormsg' => 'errormsg'} }); # refresh from DB
like( like(
$jobset->errormsg, $jobset->errormsg,
qr/aggregate job mixed_aggregate failed with the error: constituentA: does not exist/, qr/aggregate job mixed_aggregate failed with the error: "constituentA": does not exist/,
"The jobset records a relevant error message" "The jobset records a relevant error message"
); );

View File

@ -5,13 +5,58 @@ use Test2::V0;
my $ctx = test_context(); my $ctx = test_context();
my $builds = $ctx->makeAndEvaluateJobset( my $expression = 'constituents.nix';
expression => 'constituents.nix', my $jobsetCtx = $ctx->makeJobset(
expression => $expression,
);
my $builds = $ctx->evaluateJobset(
jobset => $jobsetCtx->{"jobset"},
expression => $expression,
build => 0,
); );
my $constituentA = $builds->{"constituentA"}; my $constituentA = $builds->{"constituentA"};
my $directAggregate = $builds->{"direct_aggregate"}; my $directAggregate = $builds->{"direct_aggregate"};
my $indirectAggregate = $builds->{"indirect_aggregate"}; my $indirectAggregate = $builds->{"indirect_aggregate"};
my $mixedAggregate = $builds->{"mixed_aggregate"};
# Ensure that we get exactly the aggregates we expect
my %expected_constituents = (
'direct_aggregate' => {
'constituentA' => 1,
},
'indirect_aggregate' => {
'constituentA' => 1,
},
'mixed_aggregate' => {
# Note that `constituentA_alias` becomes `constituentA`, because
# the shorter name is preferred
'constituentA' => 1,
'constituentB' => 1,
},
);
my $rs = $ctx->db->resultset('AggregateConstituents')->search(
{},
{
join => [ 'aggregate', 'constituent' ], # Use correct relationship names
columns => [],
'+select' => [ 'aggregate.job', 'constituent.job' ],
'+as' => [ 'aggregate_job', 'constituent_job' ],
}
);
my %actual_constituents;
while (my $row = $rs->next) {
my $aggregate_job = $row->get_column('aggregate_job');
my $constituent_job = $row->get_column('constituent_job');
$actual_constituents{$aggregate_job} //= {};
$actual_constituents{$aggregate_job}{$constituent_job} = 1;
}
is(\%actual_constituents, \%expected_constituents, "Exact aggregate constituents as expected");
# Check that deletion also doesn't work accordingly
is(system('nix-store', '--delete', $constituentA->drvpath), 256, "Deleting a constituent derivation fails"); is(system('nix-store', '--delete', $constituentA->drvpath), 256, "Deleting a constituent derivation fails");
is(system('nix-store', '--delete', $directAggregate->drvpath), 256, "Deleting the direct aggregate derivation fails"); is(system('nix-store', '--delete', $directAggregate->drvpath), 256, "Deleting the direct aggregate derivation fails");

View File

@ -0,0 +1,67 @@
use feature 'unicode_strings';
use strict;
use warnings;
use Setup;
use Test2::V0;
use File::Copy qw(cp);
my $ctx = test_context(
nix_config => qq|
experimental-features = nix-command flakes
|,
hydra_config => q|
<runcommand>
evaluator_pure_eval = false
</runcommand>
|
);
sub checkFlake {
my ($flake) = @_;
cp($ctx->jobsdir . "/basic.nix", $ctx->jobsdir . "/" . $flake);
cp($ctx->jobsdir . "/config.nix", $ctx->jobsdir . "/" . $flake);
cp($ctx->jobsdir . "/empty-dir-builder.sh", $ctx->jobsdir . "/" . $flake);
cp($ctx->jobsdir . "/fail.sh", $ctx->jobsdir . "/" . $flake);
cp($ctx->jobsdir . "/succeed-with-failed.sh", $ctx->jobsdir . "/" . $flake);
chmod 0755, $ctx->jobsdir . "/" . $flake . "/empty-dir-builder.sh";
chmod 0755, $ctx->jobsdir . "/" . $flake . "/fail.sh";
chmod 0755, $ctx->jobsdir . "/" . $flake . "/succeed-with-failed.sh";
my $builds = $ctx->makeAndEvaluateJobset(
flake => 'path:' . $ctx->jobsdir . "/" . $flake,
build => 1
);
subtest "Build: succeed_with_failed" => sub {
my $build = $builds->{"succeed_with_failed"};
is($build->finished, 1, "Build should be finished.");
is($build->buildstatus, 6, "succeeeded-but-failed should have buildstatus 6.");
};
subtest "Build: empty_dir" => sub {
my $build = $builds->{"empty_dir"};
is($build->finished, 1, "Build should be finished.");
is($build->buildstatus, 0, "Should have succeeded.");
};
subtest "Build: fails" => sub {
my $build = $builds->{"fails"};
is($build->finished, 1, "Build should be finished.");
is($build->buildstatus, 1, "Should have failed.");
};
}
subtest "Flake using `checks`" => sub {
checkFlake 'flake-checks'
};
subtest "Flake using `hydraJobs`" => sub {
checkFlake 'flake-hydraJobs'
};
done_testing;

View File

@ -0,0 +1,22 @@
use feature 'unicode_strings';
use strict;
use warnings;
use Setup;
use Test2::V0;
my $ctx = test_context();
my $builds = $ctx->makeAndEvaluateJobset(
expression => "meta.nix",
build => 1
);
my $build = $builds->{"full-of-meta"};
is($build->finished, 1, "Build should be finished.");
is($build->description, "This is the description of the job.", "Wrong description extracted from the build.");
is($build->license, "MIT, BSD", "Wrong licenses extracted from the build.");
is($build->homepage, "https://example.com/", "Wrong homepage extracted from the build.");
is($build->maintainers, 'alice@example.com, bob@not.found', "Wrong maintainers extracted from the build.");
done_testing;

View File

@ -31,6 +31,10 @@ if ($sd_res != 0) {
skip_all("`systemd-run` returned non-zero when executing `true` (expected 0)"); skip_all("`systemd-run` returned non-zero when executing `true` (expected 0)");
} }
# XXX(Mindavi): We should think about how to fix this.
# Note that it was always skipped on ofborg/h.n.o (nixos hydra) since systemd-run is not present in the ambient environment there.
skip_all("Always fails, an error about 'oom' being a string is logged and the process never OOMs. Needs a way to use more memory.");
my $ctx = test_context(); my $ctx = test_context();
# Contain the memory usage to 25 MegaBytes using `systemd-run` # Contain the memory usage to 25 MegaBytes using `systemd-run`

View File

@ -5,6 +5,8 @@ rec {
builder = ./empty-dir-builder.sh; builder = ./empty-dir-builder.sh;
}; };
constituentA_alias = constituentA;
constituentB = mkDerivation { constituentB = mkDerivation {
name = "empty-dir-B"; name = "empty-dir-B";
builder = ./empty-dir-builder.sh; builder = ./empty-dir-builder.sh;
@ -32,7 +34,7 @@ rec {
name = "mixed_aggregate"; name = "mixed_aggregate";
_hydraAggregate = true; _hydraAggregate = true;
constituents = [ constituents = [
"constituentA" "constituentA_alias"
constituentB constituentB
]; ];
builder = ./empty-dir-builder.sh; builder = ./empty-dir-builder.sh;

View File

@ -0,0 +1,6 @@
{
outputs = { ... }: {
checks =
import ./basic.nix;
};
}

View File

@ -0,0 +1,6 @@
{
outputs = { ... }: {
hydraJobs =
import ./basic.nix;
};
}

17
t/jobs/meta.nix Normal file
View File

@ -0,0 +1,17 @@
with import ./config.nix;
{
full-of-meta =
mkDerivation {
name = "full-of-meta";
builder = ./empty-dir-builder.sh;
meta = {
description = "This is the description of the job.";
license = [ { shortName = "MIT"; } "BSD" ];
homepage = "https://example.com/";
maintainers = [ "alice@example.com" { email = "bob@not.found"; } ];
outPath = "${placeholder "out"}";
};
};
}

View File

@ -4,6 +4,8 @@ use warnings;
package HydraTestContext; package HydraTestContext;
use File::Path qw(make_path); use File::Path qw(make_path);
use File::Basename; use File::Basename;
use File::Copy::Recursive qw(rcopy);
use File::Which qw(which);
use Cwd qw(abs_path getcwd); use Cwd qw(abs_path getcwd);
use CliRunners; use CliRunners;
use Hydra::Helper::Exec; use Hydra::Helper::Exec;
@ -77,6 +79,13 @@ sub new {
); );
$ENV{'HYDRA_DBI'} = $pgsql->dsn; $ENV{'HYDRA_DBI'} = $pgsql->dsn;
my $jobsdir = "$dir/jobs";
rcopy(abs_path(dirname(__FILE__) . "/../jobs"), $jobsdir);
my $coreutils_path = dirname(which 'install');
replace_variable_in_file($jobsdir . "/config.nix", '@testPath@', $coreutils_path);
replace_variable_in_file($jobsdir . "/declarative/project.json", '@jobsPath@', $jobsdir);
my $self = bless { my $self = bless {
_db => undef, _db => undef,
db_handle => $pgsql, db_handle => $pgsql,
@ -84,7 +93,7 @@ sub new {
nix_state_dir => $nix_state_dir, nix_state_dir => $nix_state_dir,
nix_log_dir => $nix_log_dir, nix_log_dir => $nix_log_dir,
testdir => abs_path(dirname(__FILE__) . "/.."), testdir => abs_path(dirname(__FILE__) . "/.."),
jobsdir => abs_path(dirname(__FILE__) . "/../jobs"), jobsdir => $jobsdir,
deststoredir => $deststoredir, deststoredir => $deststoredir,
}, $class; }, $class;
@ -92,7 +101,7 @@ sub new {
$opts{'before_init'}->($self); $opts{'before_init'}->($self);
} }
expectOkay(5, ("hydra-init")); expectOkay(30, ("hydra-init"));
return $self; return $self;
} }
@ -156,20 +165,46 @@ sub nix_state_dir {
sub makeAndEvaluateJobset { sub makeAndEvaluateJobset {
my ($self, %opts) = @_; my ($self, %opts) = @_;
my $expression = $opts{'expression'} || die "Mandatory 'expression' option not passed to makeAndEvaluateJobset.\n"; my $expression = $opts{'expression'};
my $jobsdir = $opts{'jobsdir'} // $self->jobsdir; my $flake = $opts{'flake'};
my $should_build = $opts{'build'} // 0; if (not $expression and not $flake) {
die "One of 'expression' or 'flake' must be passed to makeEvaluateJobset.\n";
}
my $jobsetCtx = $self->makeJobset( my $jobsdir = $opts{'jobsdir'} // $self->jobsdir;
expression => $expression,
my %args = (
jobsdir => $jobsdir, jobsdir => $jobsdir,
); );
my $jobset = $jobsetCtx->{"jobset"}; if ($expression) {
$args{expression} = $expression;
}
if ($flake) {
$args{flake} = $flake;
}
my $jobsetCtx = $self->makeJobset(%args);
return $self->evaluateJobset(
jobset => $jobsetCtx->{"jobset"},
expression => $expression,
flake => $flake,
build => $opts{"build"} // 0,
)
}
sub evaluateJobset {
my ($self, %opts) = @_;
my $jobset = $opts{'jobset'};
my $expression = $opts{'expression'} // $opts{'flake'};
evalSucceeds($jobset) or die "Evaluating jobs/$expression should exit with return code 0.\n"; evalSucceeds($jobset) or die "Evaluating jobs/$expression should exit with return code 0.\n";
my $builds = {}; my $builds = {};
my $should_build = $opts{'build'};
for my $build ($jobset->builds) { for my $build ($jobset->builds) {
if ($should_build) { if ($should_build) {
runBuild($build) or die "Build '".$build->job."' from jobs/$expression should exit with return code 0.\n"; runBuild($build) or die "Build '".$build->job."' from jobs/$expression should exit with return code 0.\n";
@ -186,7 +221,7 @@ sub makeAndEvaluateJobset {
# #
# In return, you get a hash of the user, project, and jobset records. # In return, you get a hash of the user, project, and jobset records.
# #
# This always uses an `expression` from the `jobsdir` directory. # This always uses an `expression` or `flake` from the `jobsdir` directory.
# #
# Hash Parameters: # Hash Parameters:
# #
@ -195,7 +230,12 @@ sub makeAndEvaluateJobset {
sub makeJobset { sub makeJobset {
my ($self, %opts) = @_; my ($self, %opts) = @_;
my $expression = $opts{'expression'} || die "Mandatory 'expression' option not passed to makeJobset.\n"; my $expression = $opts{'expression'};
my $flake = $opts{'flake'};
if (not $expression and not $flake) {
die "One of 'expression' or 'flake' must be passed to makeJobset.\n";
}
my $jobsdir = $opts{'jobsdir'} // $self->jobsdir; my $jobsdir = $opts{'jobsdir'} // $self->jobsdir;
# Create a new user for this test # Create a new user for this test
@ -213,12 +253,20 @@ sub makeJobset {
}); });
# Create a new jobset for this test and set up the inputs # Create a new jobset for this test and set up the inputs
my $jobset = $project->jobsets->create({ my %args = (
name => rand_chars(), name => rand_chars(),
nixexprinput => "jobs",
nixexprpath => $expression,
emailoverride => "" emailoverride => ""
}); );
if ($expression) {
$args{type} = 0;
$args{nixexprinput} = "jobs";
$args{nixexprpath} = $expression;
}
if ($flake) {
$args{type} = 1;
$args{flake} = $flake;
}
my $jobset = $project->jobsets->create(\%args);
my $jobsetinput = $jobset->jobsetinputs->create({name => "jobs", type => "path"}); my $jobsetinput = $jobset->jobsetinputs->create({name => "jobs", type => "path"});
$jobsetinput->jobsetinputalts->create({altnr => 0, value => $jobsdir}); $jobsetinput->jobsetinputalts->create({altnr => 0, value => $jobsdir});
@ -243,6 +291,18 @@ sub write_file {
close $fh; close $fh;
} }
sub replace_variable_in_file {
my ($fn, $var, $val) = @_;
open (my $input, '<', "$fn.in") or die $!;
open (my $output, '>', $fn) or die $!;
while (my $line = <$input>) {
$line =~ s/$var/$val/g;
print $output $line;
}
}
sub rand_chars { sub rand_chars {
return sprintf("t%08X", rand(0xFFFFFFFF)); return sprintf("t%08X", rand(0xFFFFFFFF));
} }

View File

@ -70,7 +70,7 @@ sub add_user {
my $email = $opts{'email'} // "$name\@example"; my $email = $opts{'email'} // "$name\@example";
my $password = $opts{'password'} // rand_chars(); my $password = $opts{'password'} // rand_chars();
my ($res, $stdout, $stderr) = captureStdoutStderr(1, ("slappasswd", "-s", $password)); my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("slappasswd", "-s", $password));
if ($res) { if ($res) {
die "Failed to execute slappasswd ($res): $stderr, $stdout"; die "Failed to execute slappasswd ($res): $stderr, $stdout";
} }
@ -178,7 +178,7 @@ sub start {
sub validateConfig { sub validateConfig {
my ($self) = @_; my ($self) = @_;
expectOkay(1, ("slaptest", "-u", "-F", $self->{"_slapd_dir"})); expectOkay(5, ("slaptest", "-u", "-F", $self->{"_slapd_dir"}));
} }
sub _spawn { sub _spawn {
@ -218,7 +218,7 @@ sub load_ldif {
my $path = "${\$self->{'_tmpdir'}}/load.ldif"; my $path = "${\$self->{'_tmpdir'}}/load.ldif";
write_file($path, $content); write_file($path, $content);
expectOkay(1, ("slapadd", "-F", $self->{"_slapd_dir"}, "-b", $suffix, "-l", $path)); expectOkay(5, ("slapadd", "-F", $self->{"_slapd_dir"}, "-b", $suffix, "-l", $path));
$self->validateConfig(); $self->validateConfig();
} }

42
t/meson.build Normal file
View File

@ -0,0 +1,42 @@
fs = import('fs')
test('perlcritic',
perl,
args: ['-w', files('perlcritic.pl')],
workdir: meson.project_source_root(),
timeout: -1,
)
testenv = environment(
{
'BZR_HOME': meson.current_build_dir() / 'data',
'HYDRA_DBI': 'dbi:Pg:dbname=hydra-test-suite;port=6433',
'HYDRA_DATA': meson.current_build_dir() / 'data',
'HYDRA_HOME': meson.project_source_root() / 'src',
'PGHOST': '/tmp',
'PYTHONPATH': '',
# libpqxx seems to randomly crash with certain values of MALLOC_PERTURB_,
# set by default by Meson's test(). Very promising, high quality software.
'MALLOC_PERTURB_': '0',
},
)
testenv.prepend('PERL5LIB',
meson.current_source_dir(),
meson.project_source_root() / 'src/lib',
separator: ':'
)
testenv.prepend('PATH',
fs.parent(hydra_evaluator.full_path()),
fs.parent(hydra_queue_runner.full_path()),
meson.project_source_root() / 'src/script',
separator: ':'
)
test('testsuite',
perl,
args: ['-I', meson.current_source_dir() / 'lib', '-w', files('test.pl')],
env: testenv,
workdir: meson.current_source_dir(),
timeout: -1,
)

View File

@ -39,7 +39,7 @@ subtest "Building, caching, and then garbage collecting the underlying job" => s
ok(unlink(Hydra::Helper::Nix::gcRootFor($path)), "Unlinking the GC root for underlying Dependency succeeds"); ok(unlink(Hydra::Helper::Nix::gcRootFor($path)), "Unlinking the GC root for underlying Dependency succeeds");
(my $ret, my $stdout, my $stderr) = captureStdoutStderr(5, "nix-store", "--delete", $path); (my $ret, my $stdout, my $stderr) = captureStdoutStderr(15, "nix-store", "--delete", $path);
is($ret, 0, "Deleting the underlying dependency should succeed"); is($ret, 0, "Deleting the underlying dependency should succeed");
}; };

View File

@ -22,11 +22,11 @@ is(nrQueuedBuildsForJobset($jobset), 0, "Evaluating jobs/broken-constituent.nix
like( like(
$jobset->errormsg, $jobset->errormsg,
qr/^does-not-exist: does not exist$/m, qr/^"does-not-exist": does not exist$/m,
"Evaluating jobs/broken-constituent.nix should log an error for does-not-exist"); "Evaluating jobs/broken-constituent.nix should log an error for does-not-exist");
like( like(
$jobset->errormsg, $jobset->errormsg,
qr/^does-not-evaluate: error: assertion 'false' failed$/m, qr/^"does-not-evaluate": "error: assertion 'false' failed/m,
"Evaluating jobs/broken-constituent.nix should log an error for does-not-evaluate"); "Evaluating jobs/broken-constituent.nix should log an error for does-not-evaluate");
done_testing; done_testing;

View File

@ -9,7 +9,7 @@ my $db = $ctx->db();
subtest "Handling password and password hash creation" => sub { subtest "Handling password and password hash creation" => sub {
subtest "Creating a user with a plain text password (insecure) stores the password securely" => sub { subtest "Creating a user with a plain text password (insecure) stores the password securely" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "plain-text-user", "--password", "foobar")); my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "plain-text-user", "--password", "foobar"));
is($res, 0, "hydra-create-user should exit zero"); is($res, 0, "hydra-create-user should exit zero");
like($stderr, qr/Submitting plaintext passwords as arguments is deprecated and will be removed/, "Submitting a plain text password is deprecated."); like($stderr, qr/Submitting plaintext passwords as arguments is deprecated and will be removed/, "Submitting a plain text password is deprecated.");
@ -23,7 +23,7 @@ subtest "Handling password and password hash creation" => sub {
}; };
subtest "Creating a user with a sha1 password (still insecure) stores the password as a hashed sha1" => sub { subtest "Creating a user with a sha1 password (still insecure) stores the password as a hashed sha1" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "old-password-hash-user", "--password-hash", "8843d7f92416211de9ebb963ff4ce28125932878")); my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "old-password-hash-user", "--password-hash", "8843d7f92416211de9ebb963ff4ce28125932878"));
is($res, 0, "hydra-create-user should exit zero"); is($res, 0, "hydra-create-user should exit zero");
my $user = $db->resultset('Users')->find({ username => "old-password-hash-user" }); my $user = $db->resultset('Users')->find({ username => "old-password-hash-user" });
@ -36,7 +36,7 @@ subtest "Handling password and password hash creation" => sub {
}; };
subtest "Creating a user with an argon2 password stores the password as given" => sub { subtest "Creating a user with an argon2 password stores the password as given" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "argon2-hash-user", "--password-hash", '$argon2id$v=19$m=262144,t=3,p=1$tMnV5paYjmIrUIb6hylaNA$M8/e0i3NGrjhOliVLa5LqQ')); my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "argon2-hash-user", "--password-hash", '$argon2id$v=19$m=262144,t=3,p=1$tMnV5paYjmIrUIb6hylaNA$M8/e0i3NGrjhOliVLa5LqQ'));
is($res, 0, "hydra-create-user should exit zero"); is($res, 0, "hydra-create-user should exit zero");
my $user = $db->resultset('Users')->find({ username => "argon2-hash-user" }); my $user = $db->resultset('Users')->find({ username => "argon2-hash-user" });
@ -50,7 +50,7 @@ subtest "Handling password and password hash creation" => sub {
subtest "Creating a user by prompting for the password" => sub { subtest "Creating a user by prompting for the password" => sub {
subtest "with the same password twice" => sub { subtest "with the same password twice" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(5, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nmy-password\n"); my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(15, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nmy-password\n");
is($res, 0, "hydra-create-user should exit zero"); is($res, 0, "hydra-create-user should exit zero");
my $user = $db->resultset('Users')->find({ username => "prompted-pass-user" }); my $user = $db->resultset('Users')->find({ username => "prompted-pass-user" });
@ -62,7 +62,7 @@ subtest "Handling password and password hash creation" => sub {
}; };
subtest "With mismatched password confirmation" => sub { subtest "With mismatched password confirmation" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(5, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nnot-my-password\n"); my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(15, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nnot-my-password\n");
isnt($res, 0, "hydra-create-user should exit non-zero"); isnt($res, 0, "hydra-create-user should exit non-zero");
}; };
}; };
@ -76,7 +76,7 @@ subtest "Handling password and password hash creation" => sub {
); );
for my $case (@cases) { for my $case (@cases) {
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ( my ($res, $stdout, $stderr) = captureStdoutStderr(15, (
"hydra-create-user", "bogus-password-options", @{$case})); "hydra-create-user", "bogus-password-options", @{$case}));
like($stderr, qr/please specify only one of --password-prompt or --password-hash/, "We get an error about specifying the password"); like($stderr, qr/please specify only one of --password-prompt or --password-hash/, "We get an error about specifying the password");
isnt($res, 0, "hydra-create-user should exit non-zero with conflicting " . join(" ", @{$case})); isnt($res, 0, "hydra-create-user should exit non-zero with conflicting " . join(" ", @{$case}));
@ -84,7 +84,7 @@ subtest "Handling password and password hash creation" => sub {
}; };
subtest "A password is not required for creating a Google-based account" => sub { subtest "A password is not required for creating a Google-based account" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ( my ($res, $stdout, $stderr) = captureStdoutStderr(15, (
"hydra-create-user", "google-account", "--type", "google")); "hydra-create-user", "google-account", "--type", "google"));
is($res, 0, "hydra-create-user should exit zero"); is($res, 0, "hydra-create-user should exit zero");
}; };

View File

@ -28,7 +28,7 @@ subtest "hydra-init upgrades user's password hashes from sha1 to sha1 inside Arg
$janet->setPassword("foobar"); $janet->setPassword("foobar");
is($alice->password, "8843d7f92416211de9ebb963ff4ce28125932878", "Alices's sha1 is stored in the database"); is($alice->password, "8843d7f92416211de9ebb963ff4ce28125932878", "Alices's sha1 is stored in the database");
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-init")); my ($res, $stdout, $stderr) = captureStdoutStderr(30, ("hydra-init"));
if ($res != 0) { if ($res != 0) {
is($stdout, ""); is($stdout, "");
is($stderr, ""); is($stderr, "");
@ -55,7 +55,7 @@ subtest "hydra-init upgrades user's password hashes from sha1 to sha1 inside Arg
}; };
subtest "Running hydra-init don't break Alice or Janet's passwords" => sub { subtest "Running hydra-init don't break Alice or Janet's passwords" => sub {
my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-init")); my ($res, $stdout, $stderr) = captureStdoutStderr(30, ("hydra-init"));
is($res, 0, "hydra-init should exit zero"); is($res, 0, "hydra-init should exit zero");
my $updatedAlice = $db->resultset('Users')->find({ username => "alice" }); my $updatedAlice = $db->resultset('Users')->find({ username => "alice" });

View File

@ -21,7 +21,7 @@ if (defined($ENV{"NIX_BUILD_CORES"})
print STDERR "test.pl: Defaulting \$YATH_JOB_COUNT to \$NIX_BUILD_CORES (${\$ENV{'NIX_BUILD_CORES'}})\n"; print STDERR "test.pl: Defaulting \$YATH_JOB_COUNT to \$NIX_BUILD_CORES (${\$ENV{'NIX_BUILD_CORES'}})\n";
} }
system($^X, find_yath(), '-D', 'test', '--default-search' => './', @ARGV); system($^X, find_yath(), '-D', 'test', '--qvf', '--event-timeout', 240, '--default-search' => './', @ARGV);
my $exit = $?; my $exit = $?;
# This makes sure it works with prove. # This makes sure it works with prove.