From 750978a19232583e17620a1bd80435e957e7213a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Sat, 18 Jun 2022 13:22:42 +0200 Subject: [PATCH 01/46] Add gitea push hook --- doc/manual/src/webhooks.md | 20 +++++++++++++++++--- src/lib/Hydra/Controller/API.pm | 16 ++++++++++++++++ src/lib/Hydra/Controller/Root.pm | 3 ++- 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/doc/manual/src/webhooks.md b/doc/manual/src/webhooks.md index 2b26cd61..674e1064 100644 --- a/doc/manual/src/webhooks.md +++ b/doc/manual/src/webhooks.md @@ -1,9 +1,12 @@ # Webhooks -Hydra can be notified by github's webhook to trigger a new evaluation when a +Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a jobset has a github repo in its input. -To set up a github webhook go to `https://github.com///settings` and in the `Webhooks` tab -click on `Add webhook`. + +## GitHub + +To set up a webhook for a GitHub repository go to `https://github.com///settings` +and in the `Webhooks` tab click on `Add webhook`. - In `Payload URL` fill in `https:///api/push-github`. - In `Content type` switch to `application/json`. @@ -11,3 +14,14 @@ click on `Add webhook`. - For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`. Then add the hook with `Add webhook`. + +## Gitea + +To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance +and in the `Webhooks` tab click on `Add Webhook` and choose `Gitea` in the drop down. + +- In `Target URL` fill in `https:///api/push-gitea`. +- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`. +- Change the branch filter to match the git branch hydra builds. + +Then add the hook with `Add webhook`. diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 6f10ef57..12073595 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -285,6 +285,22 @@ sub push_github : Chained('api') PathPart('push-github') Args(0) { $c->response->body(""); } +sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) { + my ($self, $c) = @_; + + $c->{stash}->{json}->{jobsetsTriggered} = []; + + my $in = $c->request->{data}; + my $url = $in->{repository}->{clone_url} or die; + print STDERR "got push from Gitea repository $url\n"; + + triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search( + { 'project.enabled' => 1, 'me.enabled' => 1 }, + { join => 'project' + , where => \ [ 'me.flake like ? or exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value like ?)', [ 'flake', "%$url%"], [ 'value', "%$url%" ] ] + }); + $c->response->body(""); +} 1; diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index c6843d29..1b33db2a 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -32,6 +32,7 @@ sub noLoginNeeded { return $whitelisted || $c->request->path eq "api/push-github" || + $c->request->path eq "api/push-gitea" || $c->request->path eq "google-login" || $c->request->path eq "github-redirect" || $c->request->path eq "github-login" || @@ -77,7 +78,7 @@ sub begin :Private { $_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins}; # XSRF protection: require POST requests to have the same origin. - if ($c->req->method eq "POST" && $c->req->path ne "api/push-github") { + if ($c->req->method eq "POST" && $c->req->path ne "api/push-github" && $c->req->path ne "api/push-gitea") { my $referer = $c->req->header('Referer'); $referer //= $c->req->header('Origin'); my $base = $c->req->base; From a81c6a3a80d1055aa80934ab229e2dc49594edd2 Mon Sep 17 00:00:00 2001 From: Sandro Date: Fri, 1 Jul 2022 22:21:32 +0200 Subject: [PATCH 02/46] Match URIs that don't end in .git Co-authored-by: Charlotte --- src/lib/Hydra/Controller/API.pm | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 12073595..5eeb0c04 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -292,6 +292,7 @@ sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) { my $in = $c->request->{data}; my $url = $in->{repository}->{clone_url} or die; + $url =~ s/.git$//; print STDERR "got push from Gitea repository $url\n"; triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search( From 21044bc4d9d0373366ca189f27066124f526b3b8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 21 May 2024 18:01:30 -0400 Subject: [PATCH 03/46] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nix': 'github:NixOS/nix/8f42912c80c0a03f62f6a3d28a3af05a9762565d' (2024-01-30) → 'github:NixOS/nix/ab48ea416a203e9ccefb70aa634e27477e4c1ac4' (2024-05-15) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index d8b2e4a9..3c4c0f75 100644 --- a/flake.lock +++ b/flake.lock @@ -42,11 +42,11 @@ "nixpkgs-regression": "nixpkgs-regression" }, "locked": { - "lastModified": 1706637536, - "narHash": "sha256-fjx+nCOzuSxGWfhwWWc8hCsLFZAjZLDDUcbBtldRqbk=", + "lastModified": 1715805674, + "narHash": "sha256-0CIBMECsA3ISJZrJcOTzi6wa3QENTKGLtOpYIOoxwxo=", "owner": "NixOS", "repo": "nix", - "rev": "8f42912c80c0a03f62f6a3d28a3af05a9762565d", + "rev": "ab48ea416a203e9ccefb70aa634e27477e4c1ac4", "type": "github" }, "original": { From 879ceb5cdc88096004882402d333f27d2afac577 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 21 May 2024 18:23:35 -0400 Subject: [PATCH 04/46] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nix': 'github:NixOS/nix/60824fa97c588a0faf68ea61260a47e388b0a4e5' (2024-04-11) → 'github:NixOS/nix/1ebc34e9c54b740ea4f4466443047d709dccf5b2' (2024-05-16) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 966431f9..a4be7433 100644 --- a/flake.lock +++ b/flake.lock @@ -42,11 +42,11 @@ "nixpkgs-regression": "nixpkgs-regression" }, "locked": { - "lastModified": 1712849398, - "narHash": "sha256-10z/SoidVl9/lh56cMLj7ntJZHtVrumFvmn1YEqXmaM=", + "lastModified": 1715845907, + "narHash": "sha256-1OigUcZGDInTVZJBTioo9vwRt70yvcfAkSRUeAD/mfg=", "owner": "NixOS", "repo": "nix", - "rev": "60824fa97c588a0faf68ea61260a47e388b0a4e5", + "rev": "1ebc34e9c54b740ea4f4466443047d709dccf5b2", "type": "github" }, "original": { From bc19e7cd65e55e39e9c304d108010399ef0987a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niklas=20Hamb=C3=BCchen?= Date: Sat, 20 Jul 2024 23:45:12 +0200 Subject: [PATCH 05/46] renderInputDiff: Increase git hash length 8 -> 12 See investigation on lengths required to be conflict-free in practice: https://github.com/NixOS/hydra/pull/1258#issuecomment-1321891677 --- src/root/common.tt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/root/common.tt b/src/root/common.tt index 4487cbe3..869d8856 100644 --- a/src/root/common.tt +++ b/src/root/common.tt @@ -374,7 +374,7 @@ BLOCK renderInputDiff; %] [% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %] [% IF bi1.type == "git" %] - [% bi1.name %][% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 8) _ ' to ' _ bi2.revision.substr(0, 8)) %] + [% bi1.name %][% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %] [% ELSE %] From ada51d70fc3e7fec52eeeef26b937e4d3ae0529b Mon Sep 17 00:00:00 2001 From: marius david Date: Tue, 23 Jul 2024 22:37:32 +0200 Subject: [PATCH 06/46] Document the default user and port in hacking.md --- doc/manual/src/hacking.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index 49c17395..de826720 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -30,6 +30,8 @@ foreman: $ foreman start ``` +The Hydra interface will be available on port 63333, with an admin user named "alice" with password "foobar" + You can run just the Hydra web server in your source tree as follows: ```console From 578a3d22920c2dc319c3c55ff0b63e899aa15588 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Wed, 31 Jul 2024 01:19:40 +0200 Subject: [PATCH 07/46] t: increase timeouts for slow commands with high load We've seen many fails on ofborg, at lot of them ultimately appear to come down to a timeout being hit, resulting in something like this: Failure executing slapadd -F //slap.d -b dc=example -l //load.ldif. Hopefully this resolves it for most cases. I've done some endurance testing and this helps a lot. some other commands also regularly time-out with high load: - hydra-init - hydra-create-user - nix-store --delete This should address most issues with tests randomly failing. Used the following script for endurance testing: ``` import os import subprocess run_counter = 0 fail_counter = 0 while True: try: run_counter += 1 print(f"Starting run {run_counter}") env = os.environ env["YATH_JOB_COUNT"] = "20" result = subprocess.run(["perl", "t/test.pl"], env=env) if (result.returncode != 0): fail_counter += 1 print(f"Finish run {run_counter}, total fail count: {fail_counter}") except KeyboardInterrupt: print(f"Finished {run_counter} runs with {fail_counter} fails") break ``` In case someone else wants to do it on their system :). Note that YATH_JOB_COUNT may need to be changed loosely based on your cores. I only have 4 cores (8 threads), so for others higher numbers might yield better results in hashing out unstable tests. --- t/lib/HydraTestContext.pm | 2 +- t/lib/LDAPContext.pm | 6 +++--- .../build-locally-with-substitutable-path.t | 2 +- t/scripts/hydra-create-user.t | 14 +++++++------- t/scripts/hydra-init.t | 4 ++-- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/t/lib/HydraTestContext.pm b/t/lib/HydraTestContext.pm index e1a5b226..1d6fa909 100644 --- a/t/lib/HydraTestContext.pm +++ b/t/lib/HydraTestContext.pm @@ -92,7 +92,7 @@ sub new { $opts{'before_init'}->($self); } - expectOkay(5, ("hydra-init")); + expectOkay(30, ("hydra-init")); return $self; } diff --git a/t/lib/LDAPContext.pm b/t/lib/LDAPContext.pm index 2cd1a19d..df1334f0 100644 --- a/t/lib/LDAPContext.pm +++ b/t/lib/LDAPContext.pm @@ -70,7 +70,7 @@ sub add_user { my $email = $opts{'email'} // "$name\@example"; my $password = $opts{'password'} // rand_chars(); - my ($res, $stdout, $stderr) = captureStdoutStderr(1, ("slappasswd", "-s", $password)); + my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("slappasswd", "-s", $password)); if ($res) { die "Failed to execute slappasswd ($res): $stderr, $stdout"; } @@ -178,7 +178,7 @@ sub start { sub validateConfig { my ($self) = @_; - expectOkay(1, ("slaptest", "-u", "-F", $self->{"_slapd_dir"})); + expectOkay(5, ("slaptest", "-u", "-F", $self->{"_slapd_dir"})); } sub _spawn { @@ -218,7 +218,7 @@ sub load_ldif { my $path = "${\$self->{'_tmpdir'}}/load.ldif"; write_file($path, $content); - expectOkay(1, ("slapadd", "-F", $self->{"_slapd_dir"}, "-b", $suffix, "-l", $path)); + expectOkay(5, ("slapadd", "-F", $self->{"_slapd_dir"}, "-b", $suffix, "-l", $path)); $self->validateConfig(); } diff --git a/t/queue-runner/build-locally-with-substitutable-path.t b/t/queue-runner/build-locally-with-substitutable-path.t index e3b31761..6477635a 100644 --- a/t/queue-runner/build-locally-with-substitutable-path.t +++ b/t/queue-runner/build-locally-with-substitutable-path.t @@ -39,7 +39,7 @@ subtest "Building, caching, and then garbage collecting the underlying job" => s ok(unlink(Hydra::Helper::Nix::gcRootFor($path)), "Unlinking the GC root for underlying Dependency succeeds"); - (my $ret, my $stdout, my $stderr) = captureStdoutStderr(5, "nix-store", "--delete", $path); + (my $ret, my $stdout, my $stderr) = captureStdoutStderr(15, "nix-store", "--delete", $path); is($ret, 0, "Deleting the underlying dependency should succeed"); }; diff --git a/t/scripts/hydra-create-user.t b/t/scripts/hydra-create-user.t index 71a5eda3..7f943f9d 100644 --- a/t/scripts/hydra-create-user.t +++ b/t/scripts/hydra-create-user.t @@ -9,7 +9,7 @@ my $db = $ctx->db(); subtest "Handling password and password hash creation" => sub { subtest "Creating a user with a plain text password (insecure) stores the password securely" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "plain-text-user", "--password", "foobar")); + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "plain-text-user", "--password", "foobar")); is($res, 0, "hydra-create-user should exit zero"); like($stderr, qr/Submitting plaintext passwords as arguments is deprecated and will be removed/, "Submitting a plain text password is deprecated."); @@ -23,7 +23,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "Creating a user with a sha1 password (still insecure) stores the password as a hashed sha1" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "old-password-hash-user", "--password-hash", "8843d7f92416211de9ebb963ff4ce28125932878")); + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "old-password-hash-user", "--password-hash", "8843d7f92416211de9ebb963ff4ce28125932878")); is($res, 0, "hydra-create-user should exit zero"); my $user = $db->resultset('Users')->find({ username => "old-password-hash-user" }); @@ -36,7 +36,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "Creating a user with an argon2 password stores the password as given" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "argon2-hash-user", "--password-hash", '$argon2id$v=19$m=262144,t=3,p=1$tMnV5paYjmIrUIb6hylaNA$M8/e0i3NGrjhOliVLa5LqQ')); + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "argon2-hash-user", "--password-hash", '$argon2id$v=19$m=262144,t=3,p=1$tMnV5paYjmIrUIb6hylaNA$M8/e0i3NGrjhOliVLa5LqQ')); is($res, 0, "hydra-create-user should exit zero"); my $user = $db->resultset('Users')->find({ username => "argon2-hash-user" }); @@ -50,7 +50,7 @@ subtest "Handling password and password hash creation" => sub { subtest "Creating a user by prompting for the password" => sub { subtest "with the same password twice" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(5, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nmy-password\n"); + my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(15, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nmy-password\n"); is($res, 0, "hydra-create-user should exit zero"); my $user = $db->resultset('Users')->find({ username => "prompted-pass-user" }); @@ -62,7 +62,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "With mismatched password confirmation" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(5, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nnot-my-password\n"); + my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(15, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nnot-my-password\n"); isnt($res, 0, "hydra-create-user should exit non-zero"); }; }; @@ -76,7 +76,7 @@ subtest "Handling password and password hash creation" => sub { ); for my $case (@cases) { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ( + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ( "hydra-create-user", "bogus-password-options", @{$case})); like($stderr, qr/please specify only one of --password-prompt or --password-hash/, "We get an error about specifying the password"); isnt($res, 0, "hydra-create-user should exit non-zero with conflicting " . join(" ", @{$case})); @@ -84,7 +84,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "A password is not required for creating a Google-based account" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ( + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ( "hydra-create-user", "google-account", "--type", "google")); is($res, 0, "hydra-create-user should exit zero"); }; diff --git a/t/scripts/hydra-init.t b/t/scripts/hydra-init.t index bd5bd4bf..603aa4a4 100644 --- a/t/scripts/hydra-init.t +++ b/t/scripts/hydra-init.t @@ -28,7 +28,7 @@ subtest "hydra-init upgrades user's password hashes from sha1 to sha1 inside Arg $janet->setPassword("foobar"); is($alice->password, "8843d7f92416211de9ebb963ff4ce28125932878", "Alices's sha1 is stored in the database"); - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-init")); + my ($res, $stdout, $stderr) = captureStdoutStderr(30, ("hydra-init")); if ($res != 0) { is($stdout, ""); is($stderr, ""); @@ -55,7 +55,7 @@ subtest "hydra-init upgrades user's password hashes from sha1 to sha1 inside Arg }; subtest "Running hydra-init don't break Alice or Janet's passwords" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-init")); + my ($res, $stdout, $stderr) = captureStdoutStderr(30, ("hydra-init")); is($res, 0, "hydra-init should exit zero"); my $updatedAlice = $db->resultset('Users')->find({ username => "alice" }); From a6b14369ee05c376deb04dd71062a5b95f186096 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Wed, 31 Jul 2024 17:10:44 +0200 Subject: [PATCH 08/46] t/test.pl: increase event-timeout, set qvf Only log issues/failures when something's actually up. It has irked me for a long time that so much output came out of running the tests, this seems to silence it. It does hide some warnings, but I think it makes the output so much more readable that it's worth the tradeoff. Helps for highly parallel running of jobs, sometimes they'd not give output for a while. Setting this timeout higher appears to help. Not completely sure if this is the right place to do it, but it works fine for me. --- t/test.pl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/test.pl b/t/test.pl index ba7f3781..12284637 100644 --- a/t/test.pl +++ b/t/test.pl @@ -21,7 +21,7 @@ if (defined($ENV{"NIX_BUILD_CORES"}) print STDERR "test.pl: Defaulting \$YATH_JOB_COUNT to \$NIX_BUILD_CORES (${\$ENV{'NIX_BUILD_CORES'}})\n"; } -system($^X, find_yath(), '-D', 'test', '--default-search' => './', @ARGV); +system($^X, find_yath(), '-D', 'test', '--qvf', '--event-timeout', 240, '--default-search' => './', @ARGV); my $exit = $?; # This makes sure it works with prove. From 54002f0fcf4a7cb65baf3e25e665e5325292f609 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Wed, 31 Jul 2024 17:12:47 +0200 Subject: [PATCH 09/46] t/evaluator/evaluate-oom-job.t: always skip, the test always fails We should look into how to resolve this, but I tried some things and nothing really worked. Let's put it skipped for now until someone comes along to improve it. --- t/evaluator/evaluate-oom-job.t | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/t/evaluator/evaluate-oom-job.t b/t/evaluator/evaluate-oom-job.t index 6c17d4e4..8f0450c5 100644 --- a/t/evaluator/evaluate-oom-job.t +++ b/t/evaluator/evaluate-oom-job.t @@ -31,6 +31,10 @@ if ($sd_res != 0) { skip_all("`systemd-run` returned non-zero when executing `true` (expected 0)"); } +# XXX(Mindavi): We should think about how to fix this. +# Note that it was always skipped on ofborg/h.n.o (nixos hydra) since systemd-run is not present in the ambient environment there. +skip_all("Always fails, an error about 'oom' being a string is logged and the process never OOMs. Needs a way to use more memory."); + my $ctx = test_context(); # Contain the memory usage to 25 MegaBytes using `systemd-run` From 2c886f51d3554bfd4246c34cc3dab15a5b6efc1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janne=20He=C3=9F?= Date: Sun, 12 Jun 2022 17:57:49 +0200 Subject: [PATCH 10/46] CompressLog: Add zstd compression --- nixos-modules/hydra.nix | 1 + src/lib/Hydra/Helper/Nix.pm | 3 +++ src/lib/Hydra/Plugin/CompressLog.pm | 21 +++++++++++++++++---- src/lib/Hydra/View/NixLog.pm | 5 ++++- 4 files changed, 25 insertions(+), 5 deletions(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 7a0486c1..576bc553 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -408,6 +408,7 @@ in requires = [ "hydra-init.service" ]; after = [ "hydra-init.service" ]; restartTriggers = [ hydraConf ]; + path = [ pkgs.zstd ]; environment = env // { PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-notify"; diff --git a/src/lib/Hydra/Helper/Nix.pm b/src/lib/Hydra/Helper/Nix.pm index 16637b2b..bff7a5ed 100644 --- a/src/lib/Hydra/Helper/Nix.pm +++ b/src/lib/Hydra/Helper/Nix.pm @@ -174,6 +174,9 @@ sub getDrvLogPath { for ($fn . $bucketed, $fn . $bucketed . ".bz2") { return $_ if -f $_; } + for ($fn . $bucketed, $fn . $bucketed . ".zst") { + return $_ if -f $_; + } return undef; } diff --git a/src/lib/Hydra/Plugin/CompressLog.pm b/src/lib/Hydra/Plugin/CompressLog.pm index 10e8f6cc..fe4d33b0 100644 --- a/src/lib/Hydra/Plugin/CompressLog.pm +++ b/src/lib/Hydra/Plugin/CompressLog.pm @@ -9,11 +9,24 @@ use Hydra::Helper::CatalystUtils; sub stepFinished { my ($self, $step, $logPath) = @_; - my $doCompress = $self->{config}->{'compress_build_logs'} // "1"; + my $doCompress = $self->{config}->{'compress_build_logs'} // '1'; + my $silent = $self->{config}->{'compress_build_logs_silent'} // '0'; + my $compression = $self->{config}->{'compress_build_logs_compression'} // 'bzip2'; - if ($doCompress eq "1" && -e $logPath) { - print STDERR "compressing ‘$logPath’...\n"; - system("bzip2", "--force", $logPath); + if (not -e $logPath or $doCompress ne "1") { + return; + } + + if ($silent ne '1') { + print STDERR "compressing '$logPath' with $compression...\n"; + } + + if ($compression eq 'bzip2') { + system('bzip2', '--force', $logPath); + } elsif ($compression eq 'zstd') { + system('zstd', '--rm', '--quiet', '-T0', $logPath); + } else { + print STDERR "unknown compression type '$compression'\n"; } } diff --git a/src/lib/Hydra/View/NixLog.pm b/src/lib/Hydra/View/NixLog.pm index 7f37ae78..fe37d900 100644 --- a/src/lib/Hydra/View/NixLog.pm +++ b/src/lib/Hydra/View/NixLog.pm @@ -16,7 +16,10 @@ sub process { my $tail = int($c->stash->{tail} // "0"); - if ($logPath =~ /\.bz2$/) { + if ($logPath =~ /\.zst$/) { + my $doTail = $tail ? "| tail -n '$tail'" : ""; + open($fh, "-|", "zstd -dc < '$logPath' $doTail") or die; + } elsif ($logPath =~ /\.bz2$/) { my $doTail = $tail ? "| tail -n '$tail'" : ""; open($fh, "-|", "bzip2 -dc < '$logPath' $doTail") or die; } else { From 99ca560d583239b1bc34485f2f719989642fa697 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Thu, 8 Aug 2024 11:24:16 +0200 Subject: [PATCH 11/46] Use configured compression in hydra-compress-logs service --- nixos-modules/hydra.nix | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 576bc553..97906cb3 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -459,10 +459,15 @@ in # logs automatically after a step finishes, but this doesn't work # if the queue runner is stopped prematurely. systemd.services.hydra-compress-logs = - { path = [ pkgs.bzip2 ]; + { path = [ pkgs.bzip2 pkgs.zstd ]; script = '' - find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r bzip2 -v -f + set -eou pipefail + compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf) + if [[ $compression == zstd ]]; then + compression="zstd --rm" + fi + find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r $compression --force --quiet ''; startAt = "Sun 01:45"; }; From b2b2d6e26ce8a212541fca780985216279c3c00c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Thu, 8 Aug 2024 13:33:08 +0200 Subject: [PATCH 12/46] Expand docs with new compression options --- doc/manual/src/plugins/README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/manual/src/plugins/README.md b/doc/manual/src/plugins/README.md index 6e46224f..93aa80b4 100644 --- a/doc/manual/src/plugins/README.md +++ b/doc/manual/src/plugins/README.md @@ -42,7 +42,7 @@ Sets CircleCI status. ## Compress build logs -Compresses build logs after a build with bzip2. +Compresses build logs after a build with bzip2 or zstd. ### Configuration options @@ -50,6 +50,14 @@ Compresses build logs after a build with bzip2. Enable log compression +- `compress_build_logs_compression` + +Which compression format to use. Valid values are bzip2 (default) and zstd. + +- `compress_build_logs_silent` + +Whether to compress logs silently. + ### Example ```xml From 250780aaf2c185b4b8530ae82dd67dc8e00f4345 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 20 Aug 2024 08:15:18 +0200 Subject: [PATCH 13/46] tests: use `like` for testing regexes This gives us better diagnostics when the test fails. --- t/Hydra/Controller/Job/builds.t | 13 +++++++------ t/Hydra/Controller/Jobset/http.t | 2 +- t/Hydra/Controller/User/dashboard.t | 2 +- t/api-test.t | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/t/Hydra/Controller/Job/builds.t b/t/Hydra/Controller/Job/builds.t index d933fbb8..aa6a36ce 100644 --- a/t/Hydra/Controller/Job/builds.t +++ b/t/Hydra/Controller/Job/builds.t @@ -54,13 +54,14 @@ subtest "/job/PROJECT/JOBSET/JOB/shield" => sub { subtest "/job/PROJECT/JOBSET/JOB/prometheus" => sub { my $response = request(GET '/job/' . $project->name . '/' . $jobset->name . '/' . $build->job . '/prometheus'); - ok($response->is_success, "The page showing the job's prometheus data returns 200."); - my $metrics = $response->content; - ok($metrics =~ m/hydra_job_failed\{.*\} 0/); - ok($metrics =~ m/hydra_job_completion_time\{.*\} [\d]+/); - ok($metrics =~ m/hydra_build_closure_size\{.*\} 96/); - ok($metrics =~ m/hydra_build_output_size\{.*\} 96/); + ok($response->is_success, "The page showing the job's prometheus data returns 200."); + + my $metrics = $response->content; + like($metrics, qr/hydra_job_failed\{.*\} 0/); + like($metrics, qr/hydra_job_completion_time\{.*\} [\d]+/); + like($metrics, qr/hydra_build_closure_size\{.*\} 96/); + like($metrics, qr/hydra_build_output_size\{.*\} 96/); }; done_testing; diff --git a/t/Hydra/Controller/Jobset/http.t b/t/Hydra/Controller/Jobset/http.t index 4e53949d..1a60715c 100644 --- a/t/Hydra/Controller/Jobset/http.t +++ b/t/Hydra/Controller/Jobset/http.t @@ -186,7 +186,7 @@ subtest 'Update jobset "job" to have an invalid input type' => sub { }) ); ok(!$jobsetupdate->is_success); - ok($jobsetupdate->content =~ m/Invalid input type.*valid types:/); + like($jobsetupdate->content, qr/Invalid input type.*valid types:/); }; diff --git a/t/Hydra/Controller/User/dashboard.t b/t/Hydra/Controller/User/dashboard.t index 8a24585d..12d7dd80 100644 --- a/t/Hydra/Controller/User/dashboard.t +++ b/t/Hydra/Controller/User/dashboard.t @@ -24,7 +24,7 @@ my $cookie = $login->header("set-cookie"); my $my_jobs = request(GET '/dashboard/alice/my-jobs-tab', Accept => 'application/json', Cookie => $cookie); ok($my_jobs->is_success); my $content = $my_jobs->content(); -ok($content =~ /empty_dir/); +like($content, qr/empty_dir/); ok(!($content =~ /fails/)); ok(!($content =~ /succeed_with_failed/)); done_testing; diff --git a/t/api-test.t b/t/api-test.t index e89d4460..637d412c 100644 --- a/t/api-test.t +++ b/t/api-test.t @@ -115,7 +115,7 @@ subtest "evaluation" => sub { my $build = decode_json(request_json({ uri => "/build/" . $evals->[0]->{builds}->[0] })->content()); is($build->{job}, "job", "The build's job name is job"); is($build->{finished}, 0, "The build isn't finished yet"); - ok($build->{buildoutputs}->{out}->{path} =~ /\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'"); + like($build->{buildoutputs}->{out}->{path}, qr/\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'"); subtest "search" => sub { my $search_project = decode_json(request_json({ uri => "/search/?query=sample" })->content()); From 54a9729a0f257b9bb011973ebcae1a42f6284e8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 20 Aug 2024 07:40:22 +0200 Subject: [PATCH 14/46] hacking.md: mention nix develop --- doc/manual/src/hacking.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index de826720..1f1761a7 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -15,6 +15,12 @@ and dependencies can be found: $ nix-shell ``` +of when flakes are enabled: + +```console +$ nix develop +``` + To build Hydra, you should then do: ```console From 02a514234bcb15e20241d98b2004e0292b4cc160 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 20 Aug 2024 07:40:33 +0200 Subject: [PATCH 15/46] hacking.md: make build parallel --- doc/manual/src/hacking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index 1f1761a7..ec96b8c6 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -26,7 +26,7 @@ To build Hydra, you should then do: ```console [nix-shell]$ autoreconfPhase [nix-shell]$ configurePhase -[nix-shell]$ make +[nix-shell]$ make -j$(nproc) ``` You start a local database, the webserver, and other components with From b94a7b6d5c56362af9ea85d944f8454d861ec001 Mon Sep 17 00:00:00 2001 From: hacker1024 Date: Wed, 21 Aug 2024 22:13:38 +1000 Subject: [PATCH 16/46] Use Nix::Store and Nix::Utils in NARInfo.pm These are required for the `signString` and `readFile` subroutines used when signing NARs. --- src/lib/Hydra/View/NARInfo.pm | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/Hydra/View/NARInfo.pm b/src/lib/Hydra/View/NARInfo.pm index bf8711a4..801fc06a 100644 --- a/src/lib/Hydra/View/NARInfo.pm +++ b/src/lib/Hydra/View/NARInfo.pm @@ -6,6 +6,8 @@ use File::Basename; use Hydra::Helper::CatalystUtils; use MIME::Base64; use Nix::Manifest; +use Nix::Store; +use Nix::Utils; use Hydra::Helper::Nix; use base qw/Catalyst::View/; From 0ead8dc65c5d4505aaf67769f8b89e3a9c359f35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janne=20He=C3=9F?= Date: Tue, 27 Aug 2024 17:44:19 +0200 Subject: [PATCH 17/46] default.nix: Drop URL literal --- default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.nix b/default.nix index d4c7ec29..b81119c3 100644 --- a/default.nix +++ b/default.nix @@ -1,6 +1,6 @@ # The `default.nix` in flake-compat reads `flake.nix` and `flake.lock` from `src` and # returns an attribute set of the shape `{ defaultNix, shellNix }` -(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) { +(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") { src = ./.; }).defaultNix From 916531dc9ccee52e6dab256232933fcf6d198158 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janne=20He=C3=9F?= Date: Tue, 27 Aug 2024 17:34:48 +0200 Subject: [PATCH 18/46] api: Require POST for /api/push --- hydra-api.yaml | 2 +- src/lib/Hydra/Controller/API.pm | 2 ++ src/lib/Hydra/Helper/CatalystUtils.pm | 2 +- src/root/jobset.tt | 1 + t/Hydra/Controller/API/checks.t | 4 ++-- 5 files changed, 7 insertions(+), 4 deletions(-) diff --git a/hydra-api.yaml b/hydra-api.yaml index 623c9082..a2fdea28 100644 --- a/hydra-api.yaml +++ b/hydra-api.yaml @@ -70,7 +70,7 @@ paths: $ref: '#/components/examples/projects-success' /api/push: - put: + post: summary: trigger jobsets parameters: - in: query diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 06f35d4b..cd005551 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -239,6 +239,8 @@ sub triggerJobset { sub push : Chained('api') PathPart('push') Args(0) { my ($self, $c) = @_; + requirePost($c); + $c->{stash}->{json}->{jobsetsTriggered} = []; my $force = exists $c->request->query_params->{force}; diff --git a/src/lib/Hydra/Helper/CatalystUtils.pm b/src/lib/Hydra/Helper/CatalystUtils.pm index 2a2ad86f..15d50b1a 100644 --- a/src/lib/Hydra/Helper/CatalystUtils.pm +++ b/src/lib/Hydra/Helper/CatalystUtils.pm @@ -272,7 +272,7 @@ sub requireAdmin { sub requirePost { my ($c) = @_; - error($c, "Request must be POSTed.") if $c->request->method ne "POST"; + error($c, "Request must be POSTed.", 405) if $c->request->method ne "POST"; } diff --git a/src/root/jobset.tt b/src/root/jobset.tt index 56abdb50..5d8345f9 100644 --- a/src/root/jobset.tt +++ b/src/root/jobset.tt @@ -205,6 +205,7 @@ if (!c) return; requestJSON({ url: "[% HTML.escape(c.uri_for('/api/push', { jobsets = project.name _ ':' _ jobset.name, force = "1" })) %]", + type: 'POST', success: function(data) { bootbox.alert("The jobset has been scheduled for evaluation."); } diff --git a/t/Hydra/Controller/API/checks.t b/t/Hydra/Controller/API/checks.t index 2b97b489..f0f51f1c 100644 --- a/t/Hydra/Controller/API/checks.t +++ b/t/Hydra/Controller/API/checks.t @@ -109,7 +109,7 @@ subtest "/api/push" => sub { my $jobsetName = $jobset->name; is($jobset->forceeval, undef, "The existing jobset is not set to be forced to eval"); - my $response = request(GET "/api/push?jobsets=$projectName:$jobsetName&force=1"); + my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1"); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); @@ -128,7 +128,7 @@ subtest "/api/push" => sub { print STDERR $repo; - my $response = request(GET "/api/push?repos=$repo&force=1"); + my $response = request(POST "/api/push?repos=$repo&force=1"); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); From f73043378907c2c7e44f633ad764c8bdd1c947d5 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 27 Aug 2024 17:00:00 +0200 Subject: [PATCH 19/46] Create eval-jobset role and guard /api/push route --- doc/manual/src/configuration.md | 3 ++- src/lib/Hydra/Config.pm | 2 ++ src/lib/Hydra/Controller/API.pm | 7 ++++++- src/lib/Hydra/Helper/CatalystUtils.pm | 22 ++++++++++++++++++++++ src/root/user.tt | 1 + t/Hydra/Config/ldap_role_map.t | 2 ++ t/Hydra/Controller/API/checks.t | 27 ++++++++++++++++++++++++--- t/Hydra/Controller/User/ldap-legacy.t | 3 ++- t/Hydra/Controller/User/ldap.t | 5 ++++- 9 files changed, 65 insertions(+), 7 deletions(-) diff --git a/doc/manual/src/configuration.md b/doc/manual/src/configuration.md index 4954040c..d370312a 100644 --- a/doc/manual/src/configuration.md +++ b/doc/manual/src/configuration.md @@ -208,7 +208,8 @@ Example configuration: # Make all users in the hydra_admin group Hydra admins hydra_admin = admin - # Allow all users in the dev group to restart jobs and cancel builds + # Allow all users in the dev group to eval jobsets, restart jobs and cancel builds + dev = eval-jobset dev = restart-jobs dev = cancel-build diff --git a/src/lib/Hydra/Config.pm b/src/lib/Hydra/Config.pm index af686fca..6aae5a5e 100644 --- a/src/lib/Hydra/Config.pm +++ b/src/lib/Hydra/Config.pm @@ -95,6 +95,7 @@ sub get_legacy_ldap_config { "hydra_bump-to-front" => [ "bump-to-front" ], "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], + "hydra_eval-jobset" => [ "eval-jobset" ], "hydra_restart-jobs" => [ "restart-jobs" ], }, }; @@ -159,6 +160,7 @@ sub valid_roles { "bump-to-front", "cancel-build", "create-projects", + "eval-jobset", "restart-jobs", ]; } diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index cd005551..9f8b7cba 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -248,19 +248,24 @@ sub push : Chained('api') PathPart('push') Args(0) { foreach my $s (@jobsets) { my ($p, $j) = parseJobsetName($s); my $jobset = $c->model('DB::Jobsets')->find($p, $j); + requireEvalJobsetPrivileges($c, $jobset->project); next unless defined $jobset && ($force || ($jobset->project->enabled && $jobset->enabled)); triggerJobset($self, $c, $jobset, $force); } my @repos = split /,/, ($c->request->query_params->{repos} // ""); foreach my $r (@repos) { - triggerJobset($self, $c, $_, $force) foreach $c->model('DB::Jobsets')->search( + my @jobsets = $c->model('DB::Jobsets')->search( { 'project.enabled' => 1, 'me.enabled' => 1 }, { join => 'project', where => \ [ 'exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value = ?)', [ 'value', $r ] ], order_by => 'me.id DESC' }); + foreach my $jobset (@jobsets) { + requireEvalJobsetPrivileges($c, $jobset->project); + triggerJobset($self, $c, $jobset, $force) + } } $self->status_ok( diff --git a/src/lib/Hydra/Helper/CatalystUtils.pm b/src/lib/Hydra/Helper/CatalystUtils.pm index 15d50b1a..6ccdbc4d 100644 --- a/src/lib/Hydra/Helper/CatalystUtils.pm +++ b/src/lib/Hydra/Helper/CatalystUtils.pm @@ -15,6 +15,7 @@ our @EXPORT = qw( forceLogin requireUser requireProjectOwner requireRestartPrivileges requireAdmin requirePost isAdmin isProjectOwner requireBumpPrivileges requireCancelBuildPrivileges + requireEvalJobsetPrivileges trim getLatestFinishedEval getFirstEval paramToList @@ -186,6 +187,27 @@ sub isProjectOwner { defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username })); } +sub hasEvalJobsetRole { + my ($c) = @_; + return $c->user_exists && $c->check_user_roles("eval-jobset"); +} + +sub mayEvalJobset { + my ($c, $project) = @_; + return + $c->user_exists && + (isAdmin($c) || + hasEvalJobsetRole($c) || + isProjectOwner($c, $project)); +} + +sub requireEvalJobsetPrivileges { + my ($c, $project) = @_; + requireUser($c); + accessDenied($c, "Only the project members, administrators, and accounts with eval-jobset privileges can perform this operation.") + unless mayEvalJobset($c, $project); +} + sub hasCancelBuildRole { my ($c) = @_; return $c->user_exists && $c->check_user_roles('cancel-build'); diff --git a/src/root/user.tt b/src/root/user.tt index 76f85850..04eb6e68 100644 --- a/src/root/user.tt +++ b/src/root/user.tt @@ -91,6 +91,7 @@ [% INCLUDE roleoption mutable=mutable role="restart-jobs" %] [% INCLUDE roleoption mutable=mutable role="bump-to-front" %] [% INCLUDE roleoption mutable=mutable role="cancel-build" %] + [% INCLUDE roleoption mutable=mutable role="eval-jobset" %]

diff --git a/t/Hydra/Config/ldap_role_map.t b/t/Hydra/Config/ldap_role_map.t index cb1adf46..9287c782 100644 --- a/t/Hydra/Config/ldap_role_map.t +++ b/t/Hydra/Config/ldap_role_map.t @@ -57,6 +57,7 @@ subtest "getLDAPConfig" => sub { "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], "hydra_restart-jobs" => [ "restart-jobs" ], + "hydra_eval-jobset" => [ "eval-jobset" ], } }, "The empty file and set env var make legacy mode active." @@ -177,6 +178,7 @@ subtest "get_legacy_ldap_config" => sub { "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], "hydra_restart-jobs" => [ "restart-jobs" ], + "hydra_eval-jobset" => [ "eval-jobset" ], } }, "Legacy, default role maps are applied." diff --git a/t/Hydra/Controller/API/checks.t b/t/Hydra/Controller/API/checks.t index f0f51f1c..e4c72ff2 100644 --- a/t/Hydra/Controller/API/checks.t +++ b/t/Hydra/Controller/API/checks.t @@ -22,9 +22,24 @@ sub is_json { } my $ctx = test_context(); - Catalyst::Test->import('Hydra'); +# Create a user to log in to +my $user = $ctx->db->resultset('Users')->create({ username => 'alice', emailaddress => 'alice@example.com', password => '!' }); +$user->setPassword('foobar'); +$user->userroles->update_or_create({ role => 'admin' }); + +# Login and save cookie for future requests +my $req = request(POST '/login', + Referer => 'http://localhost/', + Content => { + username => 'alice', + password => 'foobar' + } +); +is($req->code, 302, "The login redirects"); +my $cookie = $req->header("set-cookie"); + my $finishedBuilds = $ctx->makeAndEvaluateJobset( expression => "one-job.nix", build => 1 @@ -109,7 +124,10 @@ subtest "/api/push" => sub { my $jobsetName = $jobset->name; is($jobset->forceeval, undef, "The existing jobset is not set to be forced to eval"); - my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1"); + my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1", + Cookie => $cookie, + Referer => 'http://localhost/', + ); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); @@ -128,7 +146,10 @@ subtest "/api/push" => sub { print STDERR $repo; - my $response = request(POST "/api/push?repos=$repo&force=1"); + my $response = request(POST "/api/push?repos=$repo&force=1", + Cookie => $cookie, + Referer => 'http://localhost/', + ); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); diff --git a/t/Hydra/Controller/User/ldap-legacy.t b/t/Hydra/Controller/User/ldap-legacy.t index 9cb197c0..19f0c6bf 100644 --- a/t/Hydra/Controller/User/ldap-legacy.t +++ b/t/Hydra/Controller/User/ldap-legacy.t @@ -24,6 +24,7 @@ $ldap->add_group("hydra_create-projects", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"}); +$ldap->add_group("hydra_eval-jobset", $users->{"many_roles"}->{"username"}); my $hydra_ldap_config = "${\$ldap->tmpdir()}/hydra_ldap_config.yaml"; LDAPContext::write_file($hydra_ldap_config, < sub { unrelated => [], admin => ["admin"], not_admin => [], - many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build" ], + many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build", "eval-jobset" ], ); for my $username (keys %users_to_roles) { my $user = $users->{$username}; diff --git a/t/Hydra/Controller/User/ldap.t b/t/Hydra/Controller/User/ldap.t index 175b66aa..050fde23 100644 --- a/t/Hydra/Controller/User/ldap.t +++ b/t/Hydra/Controller/User/ldap.t @@ -24,6 +24,7 @@ $ldap->add_group("hydra_create-projects", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"}); +$ldap->add_group("hydra_eval-jobset", $users->{"many_roles"}->{"username"}); my $ctx = test_context( @@ -76,10 +77,12 @@ my $ctx = test_context( hydra_cancel-build = cancel-build hydra_bump-to-front = bump-to-front hydra_restart-jobs = restart-jobs + hydra_eval-jobset = eval-jobset hydra_one_group_many_roles = create-projects hydra_one_group_many_roles = cancel-build hydra_one_group_many_roles = bump-to-front + hydra_one_group_many-roles = eval-jobset CFG @@ -92,7 +95,7 @@ subtest "Valid login attempts" => sub { unrelated => [], admin => ["admin"], not_admin => [], - many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build" ], + many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build", "eval-jobset" ], many_roles_one_group => [ "create-projects", "bump-to-front", "cancel-build" ], ); for my $username (keys %users_to_roles) { From 2dad87ad89ee6e483516c9bc0da21c60977fe024 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 20 Sep 2024 07:50:24 +0200 Subject: [PATCH 20/46] hydra-queue-runner: fix compilation warning instead of converting to double, we can convert to float right away. --- src/hydra-queue-runner/hydra-queue-runner.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hydra-queue-runner/hydra-queue-runner.cc b/src/hydra-queue-runner/hydra-queue-runner.cc index 5ffa7fe6..03b322f8 100644 --- a/src/hydra-queue-runner/hydra-queue-runner.cc +++ b/src/hydra-queue-runner/hydra-queue-runner.cc @@ -164,7 +164,7 @@ void State::parseMachines(const std::string & contents) ? string2Int(tokens[3]).value() : 1, // `speedFactor` - atof(tokens[4].c_str()), + std::stof(tokens[4].c_str()), // `supportedFeatures` std::move(supportedFeatures), // `mandatoryFeatures` From ae787e5799cc637824b1101ca9edb04ade1c0610 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 20 Sep 2024 08:49:32 +0200 Subject: [PATCH 21/46] bump nix to 2.24 --- flake.lock | 108 ++++++++++++++++++++++++++-------------------------- flake.nix | 5 ++- package.nix | 3 +- 3 files changed, 60 insertions(+), 56 deletions(-) diff --git a/flake.lock b/flake.lock index e828aa7b..246b5380 100644 --- a/flake.lock +++ b/flake.lock @@ -37,33 +37,50 @@ "type": "github" } }, - "flake-utils": { + "git-hooks-nix": { + "inputs": { + "flake-compat": [ + "nix" + ], + "gitignore": [ + "nix" + ], + "nixpkgs": [ + "nix", + "nixpkgs" + ], + "nixpkgs-stable": [ + "nix", + "nixpkgs" + ] + }, "locked": { - "lastModified": 1667395993, - "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", + "lastModified": 1726745158, + "narHash": "sha256-D5AegvGoEjt4rkKedmxlSEmC+nNLMBPWFxvmYnVLhjk=", + "owner": "cachix", + "repo": "git-hooks.nix", + "rev": "4e743a6920eab45e8ba0fbe49dc459f1423a4b74", "type": "github" }, "original": { - "owner": "numtide", - "repo": "flake-utils", + "owner": "cachix", + "repo": "git-hooks.nix", "type": "github" } }, "libgit2": { "flake": false, "locked": { - "lastModified": 1697646580, - "narHash": "sha256-oX4Z3S9WtJlwvj0uH9HlYcWv+x1hqp8mhXl7HsLu2f0=", + "lastModified": 1715853528, + "narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=", "owner": "libgit2", "repo": "libgit2", - "rev": "45fd9ed7ae1a9b74b957ef4f337bc3c8b3df01b5", + "rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96", "type": "github" }, "original": { "owner": "libgit2", + "ref": "v1.8.1", "repo": "libgit2", "type": "github" } @@ -72,44 +89,61 @@ "inputs": { "flake-compat": "flake-compat", "flake-parts": "flake-parts", + "git-hooks-nix": "git-hooks-nix", "libgit2": "libgit2", "nixpkgs": [ "nixpkgs" ], - "nixpkgs-regression": "nixpkgs-regression", - "pre-commit-hooks": "pre-commit-hooks" + "nixpkgs-23-11": "nixpkgs-23-11", + "nixpkgs-regression": "nixpkgs-regression" }, "locked": { - "lastModified": 1713874370, - "narHash": "sha256-gW1mO/CvsQQ5gvgiwzxsGhPFI/tx30NING+qgF5Do0s=", + "lastModified": 1726787955, + "narHash": "sha256-XFznzb8L4SdUm9u+w3DPpMWJhffuv+/6+aiVl00slns=", "owner": "NixOS", "repo": "nix", - "rev": "1c8150ac312b5f9ba1b3f6768ff43b09867e5883", + "rev": "a7fdef6858dd45b9d7bda7c92324c63faee7f509", "type": "github" }, "original": { "owner": "NixOS", - "ref": "2.22-maintenance", + "ref": "2.24-maintenance", "repo": "nix", "type": "github" } }, "nixpkgs": { "locked": { - "lastModified": 1712848736, - "narHash": "sha256-CzZwhqyLlebljv1zFS2KWVH/3byHND0LfaO1jKsGuVo=", + "lastModified": 1726688310, + "narHash": "sha256-Xc9lEtentPCEtxc/F1e6jIZsd4MPDYv4Kugl9WtXlz0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "1d6a23f11e44d0fb64b3237569b87658a9eb5643", + "rev": "dbebdd67a6006bb145d98c8debf9140ac7e651d0", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-23.11-small", + "ref": "nixos-24.05-small", "repo": "nixpkgs", "type": "github" } }, + "nixpkgs-23-11": { + "locked": { + "lastModified": 1717159533, + "narHash": "sha256-oamiKNfr2MS6yH64rUn99mIZjc45nGJlj9eGth/3Xuw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446", + "type": "github" + } + }, "nixpkgs-regression": { "locked": { "lastModified": 1643052045, @@ -126,38 +160,6 @@ "type": "github" } }, - "pre-commit-hooks": { - "inputs": { - "flake-compat": [ - "nix" - ], - "flake-utils": "flake-utils", - "gitignore": [ - "nix" - ], - "nixpkgs": [ - "nix", - "nixpkgs" - ], - "nixpkgs-stable": [ - "nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1712897695, - "narHash": "sha256-nMirxrGteNAl9sWiOhoN5tIHyjBbVi5e2tgZUgZlK3Y=", - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "rev": "40e6053ecb65fcbf12863338a6dcefb3f55f1bf8", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "pre-commit-hooks.nix", - "type": "github" - } - }, "root": { "inputs": { "nix": "nix", diff --git a/flake.nix b/flake.nix index 1f73e3b3..19381efd 100644 --- a/flake.nix +++ b/flake.nix @@ -1,8 +1,8 @@ { description = "A Nix-based continuous build system"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small"; - inputs.nix.url = "github:NixOS/nix/2.22-maintenance"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05-small"; + inputs.nix.url = "github:NixOS/nix/2.24-maintenance"; inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; outputs = { self, nixpkgs, nix }: @@ -25,6 +25,7 @@ hydra = final.callPackage ./package.nix { inherit (nixpkgs.lib) fileset; rawSrc = self; + nix-perl-bindings = final.nixComponents.nix-perl-bindings; }; }; diff --git a/package.nix b/package.nix index f8b1849f..73dd0f2a 100644 --- a/package.nix +++ b/package.nix @@ -9,6 +9,7 @@ , perlPackages , nix +, nix-perl-bindings , git , makeWrapper @@ -59,7 +60,7 @@ let name = "hydra-perl-deps"; paths = lib.closePropagation ([ - nix.perl-bindings + nix-perl-bindings git ] ++ (with perlPackages; [ AuthenSASL From 0231453cc5d21a802a630ca0e0ebaab5738f7b82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 20 Sep 2024 08:49:46 +0200 Subject: [PATCH 22/46] hydra-eval-jobs: fix build against nix 2.24 --- src/hydra-eval-jobs/Makefile.am | 2 +- src/hydra-eval-jobs/hydra-eval-jobs.cc | 14 +++++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/hydra-eval-jobs/Makefile.am b/src/hydra-eval-jobs/Makefile.am index 90742a30..fb67f89b 100644 --- a/src/hydra-eval-jobs/Makefile.am +++ b/src/hydra-eval-jobs/Makefile.am @@ -1,5 +1,5 @@ bin_PROGRAMS = hydra-eval-jobs hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc -hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixcmd +hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixcmd -lnixflake hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra diff --git a/src/hydra-eval-jobs/hydra-eval-jobs.cc b/src/hydra-eval-jobs/hydra-eval-jobs.cc index 5adea42b..b83cae91 100644 --- a/src/hydra-eval-jobs/hydra-eval-jobs.cc +++ b/src/hydra-eval-jobs/hydra-eval-jobs.cc @@ -6,6 +6,7 @@ #include "shared.hh" #include "store-api.hh" #include "eval.hh" +#include "eval-gc.hh" #include "eval-inline.hh" #include "eval-settings.hh" #include "signals.hh" @@ -125,11 +126,14 @@ static void worker( if (myArgs.flake) { using namespace flake; - auto flakeRef = parseFlakeRef(myArgs.releaseExpr); + auto [flakeRef, fragment, outputSpec] = parseFlakeRefWithFragmentAndExtendedOutputsSpec(fetchSettings, myArgs.releaseExpr, absPath(".")); auto vFlake = state.allocValue(); - auto lockedFlake = lockFlake(state, flakeRef, + auto lockedFlake = lockFlake( + flakeSettings, + state, + flakeRef, LockFlags { .updateLockFile = false, .useRegistries = false, @@ -368,7 +372,11 @@ int main(int argc, char * * argv) ]() { try { - EvalState state(myArgs.lookupPath, openStore()); + auto evalStore = myArgs.evalStoreUrl + ? openStore(*myArgs.evalStoreUrl) + : openStore(); + EvalState state(myArgs.lookupPath, + evalStore, fetchSettings, evalSettings); Bindings & autoArgs = *myArgs.getAutoArgs(state); worker(state, autoArgs, *to, *from); } catch (Error & e) { From b472f55563285ae9ecfa1322530a31a81cc3d42d Mon Sep 17 00:00:00 2001 From: zowoq <59103226+zowoq@users.noreply.github.com> Date: Sun, 8 Sep 2024 12:15:34 +1000 Subject: [PATCH 23/46] set a default for hydra-compress-logs service follow up from 99ca560d583239b1bc34485f2f719989642fa697 --- nixos-modules/hydra.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 97906cb3..d001675d 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -464,10 +464,12 @@ in '' set -eou pipefail compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf) - if [[ $compression == zstd ]]; then + if [[ $compression == "" ]]; then + compression="bzip2" + elif [[ $compression == zstd ]]; then compression="zstd --rm" fi - find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r $compression --force --quiet + find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet ''; startAt = "Sun 01:45"; }; From cc1b6d394e2e65575b29093b6a63af7a7b30e4ec Mon Sep 17 00:00:00 2001 From: Michael Bishop Date: Fri, 20 Sep 2024 04:08:22 -0300 Subject: [PATCH 24/46] fix the binary cache calling isValidPath, and 2 other files with the same issue --- src/lib/Hydra/Controller/Root.pm | 2 +- src/lib/Hydra/Plugin/S3Backup.pm | 2 +- src/lib/Hydra/Plugin/SubversionInput.pm | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index 47389251..75d35ac3 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -329,7 +329,7 @@ sub nar :Local :Args(1) { else { $path = $Nix::Config::storeDir . "/$path"; - gone($c, "Path " . $path . " is no longer available.") unless isValidPath($path); + gone($c, "Path " . $path . " is no longer available.") unless $MACHINE_LOCAL_STORE->isValidPath($path); $c->stash->{current_view} = 'NixNAR'; $c->stash->{storePath} = $path; diff --git a/src/lib/Hydra/Plugin/S3Backup.pm b/src/lib/Hydra/Plugin/S3Backup.pm index 98e79747..a0d67851 100644 --- a/src/lib/Hydra/Plugin/S3Backup.pm +++ b/src/lib/Hydra/Plugin/S3Backup.pm @@ -92,7 +92,7 @@ sub buildFinished { my $hash = substr basename($path), 0, 32; my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0); my $system; - if (defined $deriver and isValidPath($deriver)) { + if (defined $deriver and $MACHINE_LOCAL_STORE->isValidPath($deriver)) { $system = derivationFromPath($deriver)->{platform}; } foreach my $reference (@{$refs}) { diff --git a/src/lib/Hydra/Plugin/SubversionInput.pm b/src/lib/Hydra/Plugin/SubversionInput.pm index 83c1f39d..d3579c40 100644 --- a/src/lib/Hydra/Plugin/SubversionInput.pm +++ b/src/lib/Hydra/Plugin/SubversionInput.pm @@ -46,7 +46,7 @@ sub fetchInput { $MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput; - if (defined $cachedInput && isValidPath($cachedInput->storepath)) { + if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) { $storePath = $cachedInput->storepath; $sha256 = $cachedInput->sha256hash; } else { From 1ef6b5e7b467c67c085facca909f3eafc33d9b55 Mon Sep 17 00:00:00 2001 From: hacker1024 Date: Wed, 21 Aug 2024 22:13:38 +1000 Subject: [PATCH 25/46] Use Nix::Store and Nix::Utils in NARInfo.pm These are required for the `signString` and `readFile` subroutines used when signing NARs. (cherry picked from commit b94a7b6d5c56362af9ea85d944f8454d861ec001) --- src/lib/Hydra/View/NARInfo.pm | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/Hydra/View/NARInfo.pm b/src/lib/Hydra/View/NARInfo.pm index bf8711a4..801fc06a 100644 --- a/src/lib/Hydra/View/NARInfo.pm +++ b/src/lib/Hydra/View/NARInfo.pm @@ -6,6 +6,8 @@ use File::Basename; use Hydra::Helper::CatalystUtils; use MIME::Base64; use Nix::Manifest; +use Nix::Store; +use Nix::Utils; use Hydra::Helper::Nix; use base qw/Catalyst::View/; From 029116422d01772bf532988814f529512ab02efb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 24 Sep 2024 11:28:55 -0400 Subject: [PATCH 26/46] Update to Nix 2.23 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nix': 'github:NixOS/nix/1c8150ac312b5f9ba1b3f6768ff43b09867e5883' (2024-04-23) → 'github:NixOS/nix/5ffd239adc9b7fddca7a2a59a8b87da5af14ec4d' (2024-09-23) --- flake.lock | 8 ++++---- flake.nix | 2 +- src/hydra-queue-runner/state.hh | 1 + 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/flake.lock b/flake.lock index e828aa7b..535ca096 100644 --- a/flake.lock +++ b/flake.lock @@ -80,16 +80,16 @@ "pre-commit-hooks": "pre-commit-hooks" }, "locked": { - "lastModified": 1713874370, - "narHash": "sha256-gW1mO/CvsQQ5gvgiwzxsGhPFI/tx30NING+qgF5Do0s=", + "lastModified": 1727132408, + "narHash": "sha256-esYY9tT7Q79rG4+Xvf32L3AZvVGgdVeT1n0uK6Gd6ls=", "owner": "NixOS", "repo": "nix", - "rev": "1c8150ac312b5f9ba1b3f6768ff43b09867e5883", + "rev": "5ffd239adc9b7fddca7a2a59a8b87da5af14ec4d", "type": "github" }, "original": { "owner": "NixOS", - "ref": "2.22-maintenance", + "ref": "2.23-maintenance", "repo": "nix", "type": "github" } diff --git a/flake.nix b/flake.nix index 1f73e3b3..1acb245a 100644 --- a/flake.nix +++ b/flake.nix @@ -2,7 +2,7 @@ description = "A Nix-based continuous build system"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small"; - inputs.nix.url = "github:NixOS/nix/2.22-maintenance"; + inputs.nix.url = "github:NixOS/nix/2.23-maintenance"; inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; outputs = { self, nixpkgs, nix }: diff --git a/src/hydra-queue-runner/state.hh b/src/hydra-queue-runner/state.hh index 5d242cdf..839239fe 100644 --- a/src/hydra-queue-runner/state.hh +++ b/src/hydra-queue-runner/state.hh @@ -23,6 +23,7 @@ #include "nar-extractor.hh" #include "serve-protocol.hh" #include "serve-protocol-impl.hh" +#include "serve-protocol-connection.hh" #include "machines.hh" From 012cbd43f555c5657168d667b8018e54f22adc5a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 24 Sep 2024 11:51:17 -0400 Subject: [PATCH 27/46] Add missing include --- src/hydra-queue-runner/dispatcher.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/hydra-queue-runner/dispatcher.cc b/src/hydra-queue-runner/dispatcher.cc index 6d738ded..a4c84252 100644 --- a/src/hydra-queue-runner/dispatcher.cc +++ b/src/hydra-queue-runner/dispatcher.cc @@ -2,6 +2,7 @@ #include #include #include +#include #include "state.hh" From ceb8b48cce4cc21ddb8941ed3858f6ed630361c8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 24 Sep 2024 12:14:23 -0400 Subject: [PATCH 28/46] Fix type error with NAR accesssor --- src/hydra-queue-runner/nar-extractor.cc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/hydra-queue-runner/nar-extractor.cc b/src/hydra-queue-runner/nar-extractor.cc index 61299ecd..9f94af9c 100644 --- a/src/hydra-queue-runner/nar-extractor.cc +++ b/src/hydra-queue-runner/nar-extractor.cc @@ -60,26 +60,26 @@ struct Extractor : FileSystemObjectSink : members(members), prefix(prefix) { } - void createDirectory(const Path & path) override + void createDirectory(const CanonPath & path) override { - members.insert_or_assign(prefix + path, NarMemberData { .type = SourceAccessor::Type::tDirectory }); + members.insert_or_assign(prefix + path.abs(), NarMemberData { .type = SourceAccessor::Type::tDirectory }); } - void createRegularFile(const Path & path, std::function func) override + void createRegularFile(const CanonPath & path, std::function func) override { NarMemberConstructor nmc { - members.insert_or_assign(prefix + path, NarMemberData { + members.insert_or_assign(prefix + path.abs(), NarMemberData { .type = SourceAccessor::Type::tRegular, .fileSize = 0, - .contents = filesToKeep.count(path) ? std::optional("") : std::nullopt, + .contents = filesToKeep.count(path.abs()) ? std::optional("") : std::nullopt, }).first->second, }; func(nmc); } - void createSymlink(const Path & path, const std::string & target) override + void createSymlink(const CanonPath & path, const std::string & target) override { - members.insert_or_assign(prefix + path, NarMemberData { .type = SourceAccessor::Type::tSymlink }); + members.insert_or_assign(prefix + path.abs(), NarMemberData { .type = SourceAccessor::Type::tSymlink }); } }; From 750275d6e8740ec7e170e7ac98c68ab3e36f37e6 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 7 Oct 2024 11:43:58 -0400 Subject: [PATCH 29/46] Avoid trailing slash that broke lookup --- src/hydra-queue-runner/nar-extractor.cc | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/hydra-queue-runner/nar-extractor.cc b/src/hydra-queue-runner/nar-extractor.cc index 9f94af9c..8729dd39 100644 --- a/src/hydra-queue-runner/nar-extractor.cc +++ b/src/hydra-queue-runner/nar-extractor.cc @@ -54,7 +54,15 @@ struct Extractor : FileSystemObjectSink }; NarMemberDatas & members; - Path prefix; + std::filesystem::path prefix; + + Path toKey(const CanonPath & path) + { + std::filesystem::path p = prefix; + // Conditional to avoid trailing slash + if (!path.isRoot()) p /= path.rel(); + return p; + } Extractor(NarMemberDatas & members, const Path & prefix) : members(members), prefix(prefix) @@ -62,13 +70,13 @@ struct Extractor : FileSystemObjectSink void createDirectory(const CanonPath & path) override { - members.insert_or_assign(prefix + path.abs(), NarMemberData { .type = SourceAccessor::Type::tDirectory }); + members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tDirectory }); } void createRegularFile(const CanonPath & path, std::function func) override { NarMemberConstructor nmc { - members.insert_or_assign(prefix + path.abs(), NarMemberData { + members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tRegular, .fileSize = 0, .contents = filesToKeep.count(path.abs()) ? std::optional("") : std::nullopt, @@ -79,7 +87,7 @@ struct Extractor : FileSystemObjectSink void createSymlink(const CanonPath & path, const std::string & target) override { - members.insert_or_assign(prefix + path.abs(), NarMemberData { .type = SourceAccessor::Type::tSymlink }); + members.insert_or_assign(toKey(path), NarMemberData { .type = SourceAccessor::Type::tSymlink }); } }; From 2a7b070da0ef2b2f23e4331282ba6e013690edb0 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Thu, 17 Oct 2024 22:18:56 +0200 Subject: [PATCH 30/46] S3Backup: fix compilation issue where MACHINE_LOCAL_STORE variable is undefined See https://github.com/NixOS/hydra/pull/1414#issuecomment-2412350929 The variable is defined in src/lib/Hydra/Helper/Nix.pm Error message without this patch: ``` hydra-evaluator[PID]: Couldn't require Hydra::Plugin::S3Backup : Global symbol "$MACHINE_LOCAL_STORE" requires explicit package name (did you forget to declare "my $MACHINE_LOCAL_STORE"?) at /nix/store/xxx-hydra-0-unstable-2024-09-24/libexec/hydra/lib/Hydra/Plugin/S3Backup.pm line 95. hydra-evaluator[PID]: Compilation failed in require at /nix/store/xxx-hydra-perl-deps/lib/perl5/site_perl/5.38.2/Module/Runtime.pm line 314. hydra-evaluator[PID]: at /nix/store/xxx-hydra-perl-deps/lib/perl5/site_perl/5.38.2/Module/Pluggable.pm line 32. ``` --- src/lib/Hydra/Plugin/S3Backup.pm | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/Hydra/Plugin/S3Backup.pm b/src/lib/Hydra/Plugin/S3Backup.pm index a0d67851..f1f50754 100644 --- a/src/lib/Hydra/Plugin/S3Backup.pm +++ b/src/lib/Hydra/Plugin/S3Backup.pm @@ -14,6 +14,7 @@ use Nix::Config; use Nix::Store; use Hydra::Model::DB; use Hydra::Helper::CatalystUtils; +use Hydra::Helper::Nix; sub isEnabled { my ($self) = @_; From 8a54924d2aeffa841a3764b2c916211a8219c34c Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Thu, 17 Oct 2024 22:25:11 +0200 Subject: [PATCH 31/46] nix-prefetch-git: set branch name to suppress hint from git In my system logs I see this every time a new eval starts: ``` hydra-evaluator[PID]: hint: Using 'master' as the name for the initial branch. This default branch name hydra-evaluator[PID]: hint: is subject to change. To configure the initial branch name to use in all hydra-evaluator[PID]: hint: of your new repositories, which will suppress this warning, call: hydra-evaluator[PID]: hint: hydra-evaluator[PID]: hint: git config --global init.defaultBranch hydra-evaluator[PID]: hint: hydra-evaluator[PID]: hint: Names commonly chosen instead of 'master' are 'main', 'trunk' and hydra-evaluator[PID]: hint: 'development'. The just-created branch can be renamed via this command: hydra-evaluator[PID]: hint: hydra-evaluator[PID]: hint: git branch -m ``` This ensures this hint is not logged anymore and unclutters the syslog. I presume it does not really matter what name is chosen for the branch. --- src/script/nix-prefetch-git | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/script/nix-prefetch-git b/src/script/nix-prefetch-git index 3fe399c8..316eb9b0 100755 --- a/src/script/nix-prefetch-git +++ b/src/script/nix-prefetch-git @@ -78,7 +78,7 @@ fi init_remote(){ local url=$1; - git init; + git init --initial-branch=trunk; git remote add origin $url; } From 1222ba03a66811564c7ef4a9117c09e166087a26 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sat, 19 Oct 2024 17:30:59 +0200 Subject: [PATCH 32/46] Make the in-tree package the default package There is an overlay for the `hydra` name, but `hydra_unstable` was used, which can refer to the nixpkgs package and lead to and outdated hydra version and requires configuring the correct package attribute downstream. --- nixos-modules/hydra.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index d001675d..6a2390c4 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -68,7 +68,7 @@ in package = mkOption { type = types.path; - default = pkgs.hydra_unstable; + default = pkgs.hydra; defaultText = literalExpression "pkgs.hydra"; description = "The Hydra package."; }; From a9a5b143317dfa343348ecdc80f04e1b4b118d1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 19 Oct 2024 18:05:04 +0200 Subject: [PATCH 33/46] make nixos module hydra from this repository by default When people reach out to the git repository they probably want to use hydra from the same source. This also removes the need for an overlay with simpler and more performant direct use of the nixpkgs passed in. Before it was re-importing nixpkgs. test --- flake.nix | 33 +++++++++++++++------------------ nixos-modules/default.nix | 11 +++++------ nixos-modules/hydra.nix | 2 -- nixos-tests.nix | 14 ++++++-------- 4 files changed, 26 insertions(+), 34 deletions(-) diff --git a/flake.nix b/flake.nix index 19381efd..78024656 100644 --- a/flake.nix +++ b/flake.nix @@ -9,14 +9,6 @@ let systems = [ "x86_64-linux" "aarch64-linux" ]; forEachSystem = nixpkgs.lib.genAttrs systems; - - overlayList = [ self.overlays.default nix.overlays.default ]; - - pkgsBySystem = forEachSystem (system: import nixpkgs { - inherit system; - overlays = overlayList; - }); - in rec { @@ -30,7 +22,6 @@ }; hydraJobs = { - build = forEachSystem (system: packages.${system}.hydra); buildNoTests = forEachSystem (system: @@ -39,19 +30,21 @@ }) ); - manual = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in - pkgs.runCommand "hydra-manual-${pkgs.hydra.version}" { } + manual = forEachSystem (system: let + pkgs = nixpkgs.legacyPackages.${system}; + hydra = self.packages.${pkgs.hostPlatform.system}.hydra; + in + pkgs.runCommand "hydra-manual-${hydra.version}" { } '' mkdir -p $out/share - cp -prvd ${pkgs.hydra}/share/doc $out/share/ + cp -prvd ${hydra}/share/doc $out/share/ mkdir $out/nix-support echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products ''); tests = import ./nixos-tests.nix { - inherit forEachSystem nixpkgs pkgsBySystem nixosModules; + inherit forEachSystem nixpkgs nixosModules; }; container = nixosConfigurations.container.config.system.build.toplevel; @@ -64,12 +57,17 @@ }); packages = forEachSystem (system: { - hydra = pkgsBySystem.${system}.hydra; - default = pkgsBySystem.${system}.hydra; + hydra = nixpkgs.legacyPackages.${system}.callPackage ./package.nix { + inherit (nixpkgs.lib) fileset; + rawSrc = self; + nix = nix.packages.${system}.nix; + nix-perl-bindings = nix.hydraJobs.perlBindings.${system}; + }; + default = self.packages.${system}.hydra; }); nixosModules = import ./nixos-modules { - overlays = overlayList; + inherit self; }; nixosConfigurations.container = nixpkgs.lib.nixosSystem { @@ -77,7 +75,6 @@ modules = [ self.nixosModules.hydra - self.nixosModules.overlayNixpkgsForThisHydra self.nixosModules.hydraTest self.nixosModules.hydraProxy { diff --git a/nixos-modules/default.nix b/nixos-modules/default.nix index dac705c3..62b18406 100644 --- a/nixos-modules/default.nix +++ b/nixos-modules/default.nix @@ -1,11 +1,10 @@ -{ overlays }: +{ self }: { - hydra = import ./hydra.nix; - - overlayNixpkgsForThisHydra = { pkgs, ... }: { - nixpkgs = { inherit overlays; }; - services.hydra.package = pkgs.hydra; + hydra = { pkgs, lib,... }: { + _file = ./default.nix; + imports = [ ./hydra.nix ]; + services.hydra-dev.package = lib.mkDefault self.packages.${pkgs.hostPlatform.system}.hydra; }; hydraTest = { pkgs, ... }: { diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 6a2390c4..c471b0ef 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -68,8 +68,6 @@ in package = mkOption { type = types.path; - default = pkgs.hydra; - defaultText = literalExpression "pkgs.hydra"; description = "The Hydra package."; }; diff --git a/nixos-tests.nix b/nixos-tests.nix index 19a9ba35..9efe68c8 100644 --- a/nixos-tests.nix +++ b/nixos-tests.nix @@ -1,13 +1,12 @@ -{ forEachSystem, nixpkgs, pkgsBySystem, nixosModules }: +{ forEachSystem, nixpkgs, nixosModules }: let # NixOS configuration used for VM tests. hydraServer = - { config, pkgs, ... }: + { pkgs, ... }: { imports = [ nixosModules.hydra - nixosModules.overlayNixpkgsForThisHydra nixosModules.hydraTest ]; @@ -44,11 +43,10 @@ in }); notifications = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; simpleTest { name = "hydra-notifications"; - nodes.machine = { pkgs, ... }: { + nodes.machine = { imports = [ hydraServer ]; services.hydra-dev.extraConfig = '' @@ -89,7 +87,7 @@ in # Setup the project and jobset machine.succeed( - "su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" + "su - hydra -c 'perl -I ${config.services.hydra-dev.package.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" ) # Wait until hydra has build the job and @@ -103,7 +101,7 @@ in }); gitea = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in + let pkgs = nixpkgs.legacyPackages.${system}; in with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; makeTest { name = "hydra-gitea"; @@ -298,7 +296,7 @@ in }); validate-openapi = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in + let pkgs = nixpkgs.legacyPackages.${system}; in pkgs.runCommand "validate-openapi" { buildInputs = [ pkgs.openapi-generator-cli ]; } '' From f442d74f6e6ea3fdc4109b4051cced263058f09d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Sat, 19 Oct 2024 16:46:45 +0000 Subject: [PATCH 34/46] remove unused nix dev flake inputs --- flake.lock | 115 +++++------------------------------------------------ flake.nix | 12 +++++- 2 files changed, 20 insertions(+), 107 deletions(-) diff --git a/flake.lock b/flake.lock index 246b5380..897785ed 100644 --- a/flake.lock +++ b/flake.lock @@ -1,73 +1,5 @@ { "nodes": { - "flake-compat": { - "flake": false, - "locked": { - "lastModified": 1673956053, - "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", - "owner": "edolstra", - "repo": "flake-compat", - "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", - "type": "github" - }, - "original": { - "owner": "edolstra", - "repo": "flake-compat", - "type": "github" - } - }, - "flake-parts": { - "inputs": { - "nixpkgs-lib": [ - "nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1712014858, - "narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=", - "owner": "hercules-ci", - "repo": "flake-parts", - "rev": "9126214d0a59633752a136528f5f3b9aa8565b7d", - "type": "github" - }, - "original": { - "owner": "hercules-ci", - "repo": "flake-parts", - "type": "github" - } - }, - "git-hooks-nix": { - "inputs": { - "flake-compat": [ - "nix" - ], - "gitignore": [ - "nix" - ], - "nixpkgs": [ - "nix", - "nixpkgs" - ], - "nixpkgs-stable": [ - "nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1726745158, - "narHash": "sha256-D5AegvGoEjt4rkKedmxlSEmC+nNLMBPWFxvmYnVLhjk=", - "owner": "cachix", - "repo": "git-hooks.nix", - "rev": "4e743a6920eab45e8ba0fbe49dc459f1423a4b74", - "type": "github" - }, - "original": { - "owner": "cachix", - "repo": "git-hooks.nix", - "type": "github" - } - }, "libgit2": { "flake": false, "locked": { @@ -87,15 +19,17 @@ }, "nix": { "inputs": { - "flake-compat": "flake-compat", - "flake-parts": "flake-parts", - "git-hooks-nix": "git-hooks-nix", - "libgit2": "libgit2", + "flake-compat": [], + "flake-parts": [], + "git-hooks-nix": [], + "libgit2": [ + "libgit2" + ], "nixpkgs": [ "nixpkgs" ], - "nixpkgs-23-11": "nixpkgs-23-11", - "nixpkgs-regression": "nixpkgs-regression" + "nixpkgs-23-11": [], + "nixpkgs-regression": [] }, "locked": { "lastModified": 1726787955, @@ -128,40 +62,9 @@ "type": "github" } }, - "nixpkgs-23-11": { - "locked": { - "lastModified": 1717159533, - "narHash": "sha256-oamiKNfr2MS6yH64rUn99mIZjc45nGJlj9eGth/3Xuw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "a62e6edd6d5e1fa0329b8653c801147986f8d446", - "type": "github" - } - }, - "nixpkgs-regression": { - "locked": { - "lastModified": 1643052045, - "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - }, - "original": { - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "github" - } - }, "root": { "inputs": { + "libgit2": "libgit2", "nix": "nix", "nixpkgs": "nixpkgs" } diff --git a/flake.nix b/flake.nix index 78024656..c06b5f62 100644 --- a/flake.nix +++ b/flake.nix @@ -2,10 +2,20 @@ description = "A Nix-based continuous build system"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05-small"; + + inputs.libgit2 = { url = "github:libgit2/libgit2/v1.8.1"; flake = false; }; inputs.nix.url = "github:NixOS/nix/2.24-maintenance"; inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; + inputs.nix.inputs.libgit2.follows = "libgit2"; - outputs = { self, nixpkgs, nix }: + # hide nix dev tooling from our lock file + inputs.nix.inputs.flake-parts.follows = ""; + inputs.nix.inputs.git-hooks-nix.follows = ""; + inputs.nix.inputs.nixpkgs-regression.follows = ""; + inputs.nix.inputs.nixpkgs-23-11.follows = ""; + inputs.nix.inputs.flake-compat.follows = ""; + + outputs = { self, nixpkgs, nix, ... }: let systems = [ "x86_64-linux" "aarch64-linux" ]; forEachSystem = nixpkgs.lib.genAttrs systems; From 4e2c06ec2c74e0387825e8025ee8eb2121b8433b Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sun, 20 Oct 2024 01:57:07 +0200 Subject: [PATCH 35/46] queue-runner: don't decode base64 hostkey in hydra Nix expects a base64 encoded hostkey in SSHMaster, so make sure we don't decode this prematurely in hydra. Reported-By: Puck Meerburg --- src/hydra-queue-runner/hydra-queue-runner.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hydra-queue-runner/hydra-queue-runner.cc b/src/hydra-queue-runner/hydra-queue-runner.cc index 5ffa7fe6..bc2974b2 100644 --- a/src/hydra-queue-runner/hydra-queue-runner.cc +++ b/src/hydra-queue-runner/hydra-queue-runner.cc @@ -171,7 +171,7 @@ void State::parseMachines(const std::string & contents) std::move(mandatoryFeatures), // `sshPublicHostKey` tokens[7] != "" && tokens[7] != "-" - ? base64Decode(tokens[7]) + ? tokens[7] : "", }); From 182a48c9fbcd6e6063bd6166c314174a8e79294c Mon Sep 17 00:00:00 2001 From: Pierre Bourdon Date: Mon, 22 Jul 2024 15:54:29 +0200 Subject: [PATCH 36/46] autotools -> meson Original commit message: > There are some known regressions regarding local testing setups - since > everything was kinda half written with the expectation that build dir = > source dir (which should not be true anymore). But everything builds and > the test suite runs fine, after several hours spent debugging random > crashes in libpqxx with MALLOC_PERTURB_... I have not experienced regressions with local testing. (cherry picked from commit 4b886d9c45cd2d7fe9b0a8dbc05c7318d46f615d) --- .gitignore | 42 +------------- .yath.rc | 2 - Makefile.am | 12 ---- configure.ac | 91 ------------------------------ doc/Makefile.am | 4 -- doc/manual/Makefile.am | 6 -- doc/manual/meson.build | 36 ++++++++++++ flake.nix | 2 +- meson.build | 40 +++++++++++++ nixos-modules/meson.build | 4 ++ package.nix | 37 ++++++------ src/Makefile.am | 3 - src/hydra-eval-jobs/Makefile.am | 5 -- src/hydra-eval-jobs/meson.build | 8 +++ src/hydra-evaluator/Makefile.am | 5 -- src/hydra-evaluator/meson.build | 9 +++ src/hydra-queue-runner/Makefile.am | 8 --- src/hydra-queue-runner/meson.build | 22 ++++++++ src/lib/Makefile.am | 22 -------- src/libhydra/meson.build | 5 ++ src/meson.build | 86 ++++++++++++++++++++++++++++ src/root/Makefile.am | 39 ------------- src/script/Makefile.am | 19 ------- src/sql/Makefile.am | 9 --- src/sql/meson.build | 90 +++++++++++++++++++++++++++++ src/ttf/Makefile.am | 4 -- src/ttf/meson.build | 5 ++ t/Makefile.am | 39 ------------- t/lib/HydraTestContext.pm | 23 +++++++- t/meson.build | 43 ++++++++++++++ 30 files changed, 391 insertions(+), 329 deletions(-) delete mode 100644 .yath.rc delete mode 100644 Makefile.am delete mode 100644 configure.ac delete mode 100644 doc/Makefile.am delete mode 100644 doc/manual/Makefile.am create mode 100644 doc/manual/meson.build create mode 100644 meson.build create mode 100644 nixos-modules/meson.build delete mode 100644 src/Makefile.am delete mode 100644 src/hydra-eval-jobs/Makefile.am create mode 100644 src/hydra-eval-jobs/meson.build delete mode 100644 src/hydra-evaluator/Makefile.am create mode 100644 src/hydra-evaluator/meson.build delete mode 100644 src/hydra-queue-runner/Makefile.am create mode 100644 src/hydra-queue-runner/meson.build delete mode 100644 src/lib/Makefile.am create mode 100644 src/libhydra/meson.build create mode 100644 src/meson.build delete mode 100644 src/root/Makefile.am delete mode 100644 src/script/Makefile.am delete mode 100644 src/sql/Makefile.am create mode 100644 src/sql/meson.build delete mode 100644 src/ttf/Makefile.am create mode 100644 src/ttf/meson.build delete mode 100644 t/Makefile.am create mode 100644 t/meson.build diff --git a/.gitignore b/.gitignore index f8bf5718..ddcbadc4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,48 +1,8 @@ -/.pls_cache -*.o *~ -Makefile -Makefile.in -.deps -.hydra-data -/config.guess -/config.log -/config.status -/config.sub -/configure -/depcomp -/libtool -/ltmain.sh -/autom4te.cache -/aclocal.m4 -/missing -/install-sh +.test_info.* /src/sql/hydra-postgresql.sql /src/sql/hydra-sqlite.sql /src/sql/tmp.sqlite -/src/hydra-eval-jobs/hydra-eval-jobs -/src/root/static/bootstrap -/src/root/static/js/flot -/tests -/doc/manual/images -/doc/manual/manual.html -/doc/manual/manual.pdf -/t/.bzr* -/t/.git* -/t/.hg* -/t/nix -/t/data -/t/jobs/config.nix -t/jobs/declarative/project.json -/inst -hydra-config.h -hydra-config.h.in result result-* outputs -config -stamp-h1 -src/hydra-evaluator/hydra-evaluator -src/hydra-queue-runner/hydra-queue-runner -src/root/static/fontawesome/ -src/root/static/bootstrap*/ diff --git a/.yath.rc b/.yath.rc deleted file mode 100644 index 19bb35af..00000000 --- a/.yath.rc +++ /dev/null @@ -1,2 +0,0 @@ -[test] --I=rel(t/lib) diff --git a/Makefile.am b/Makefile.am deleted file mode 100644 index a28e3f33..00000000 --- a/Makefile.am +++ /dev/null @@ -1,12 +0,0 @@ -SUBDIRS = src doc -if CAN_DO_CHECK - SUBDIRS += t -endif - -BOOTCLEAN_SUBDIRS = $(SUBDIRS) -DIST_SUBDIRS = $(SUBDIRS) -EXTRA_DIST = nixos-modules/hydra.nix - -install-data-local: nixos-modules/hydra.nix - $(INSTALL) -d $(DESTDIR)$(datadir)/nix - $(INSTALL_DATA) nixos-modules/hydra.nix $(DESTDIR)$(datadir)/nix/hydra-module.nix diff --git a/configure.ac b/configure.ac deleted file mode 100644 index e5c57d14..00000000 --- a/configure.ac +++ /dev/null @@ -1,91 +0,0 @@ -AC_INIT([Hydra], [m4_esyscmd([echo -n $(cat ./version.txt)$VERSION_SUFFIX])]) -AC_CONFIG_AUX_DIR(config) -AM_INIT_AUTOMAKE([foreign serial-tests]) - -AC_LANG([C++]) - -AC_PROG_CC -AC_PROG_INSTALL -AC_PROG_LN_S -AC_PROG_LIBTOOL -AC_PROG_CXX - -AC_PATH_PROG([XSLTPROC], [xsltproc]) - -AC_ARG_WITH([docbook-xsl], - [AS_HELP_STRING([--with-docbook-xsl=PATH], - [path of the DocBook XSL stylesheets])], - [docbookxsl="$withval"], - [docbookxsl="/docbook-xsl-missing"]) -AC_SUBST([docbookxsl]) - - -AC_DEFUN([NEED_PROG], -[ -AC_PATH_PROG($1, $2) -if test -z "$$1"; then - AC_MSG_ERROR([$2 is required]) -fi -]) - -NEED_PROG(perl, perl) - -NEED_PROG([NIX_STORE_PROGRAM], [nix-store]) - -AC_MSG_CHECKING([whether $NIX_STORE_PROGRAM is recent enough]) -if test -n "$NIX_STORE" -a -n "$TMPDIR" -then - # This may be executed from within a build chroot, so pacify - # `nix-store' instead of letting it choke while trying to mkdir - # /nix/var. - NIX_STATE_DIR="$TMPDIR" - export NIX_STATE_DIR -fi -if NIX_REMOTE=daemon PAGER=cat "$NIX_STORE_PROGRAM" --timeout 123 -q; then - AC_MSG_RESULT([yes]) -else - AC_MSG_RESULT([no]) - AC_MSG_ERROR([`$NIX_STORE_PROGRAM' doesn't support `--timeout'; please use a newer version.]) -fi - -PKG_CHECK_MODULES([NIX], [nix-main nix-expr nix-store]) - -testPath="$(dirname $(type -p expr))" -AC_SUBST(testPath) - -CXXFLAGS+=" -include nix/config.h" - -AC_CONFIG_FILES([ - Makefile - doc/Makefile - doc/manual/Makefile - src/Makefile - src/hydra-evaluator/Makefile - src/hydra-eval-jobs/Makefile - src/hydra-queue-runner/Makefile - src/sql/Makefile - src/ttf/Makefile - src/lib/Makefile - src/root/Makefile - src/script/Makefile -]) - -# Tests might be filtered out -AM_CONDITIONAL([CAN_DO_CHECK], [test -f "$srcdir/t/api-test.t"]) -AM_COND_IF( - [CAN_DO_CHECK], - [ - jobsPath="$(realpath ./t/jobs)" - AC_SUBST(jobsPath) - AC_CONFIG_FILES([ - t/Makefile - t/jobs/config.nix - t/jobs/declarative/project.json - ]) - ]) - -AC_CONFIG_COMMANDS([executable-scripts], []) - -AC_CONFIG_HEADER([hydra-config.h]) - -AC_OUTPUT diff --git a/doc/Makefile.am b/doc/Makefile.am deleted file mode 100644 index 9ac91d24..00000000 --- a/doc/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -SUBDIRS = manual -BOOTCLEAN_SUBDIRS = $(SUBDIRS) -DIST_SUBDIRS = $(SUBDIRS) - diff --git a/doc/manual/Makefile.am b/doc/manual/Makefile.am deleted file mode 100644 index ec732166..00000000 --- a/doc/manual/Makefile.am +++ /dev/null @@ -1,6 +0,0 @@ -MD_FILES = src/*.md - -EXTRA_DIST = $(MD_FILES) - -install: $(MD_FILES) - mdbook build . -d $(docdir) diff --git a/doc/manual/meson.build b/doc/manual/meson.build new file mode 100644 index 00000000..11178809 --- /dev/null +++ b/doc/manual/meson.build @@ -0,0 +1,36 @@ +srcs = files( + 'src/SUMMARY.md', + 'src/about.md', + 'src/api.md', + 'src/configuration.md', + 'src/hacking.md', + 'src/installation.md', + 'src/introduction.md', + 'src/jobs.md', + 'src/monitoring/README.md', + 'src/notifications.md', + 'src/plugins/README.md', + 'src/plugins/RunCommand.md', + 'src/plugins/declarative-projects.md', + 'src/projects.md', + 'src/webhooks.md', +) + +manual = custom_target( + 'manual', + command: [ + mdbook, + 'build', + '@SOURCE_ROOT@/doc/manual', + '-d', meson.current_build_dir() / 'html' + ], + depend_files: srcs, + output: ['html'], + build_by_default: true, +) + +install_subdir( + manual.full_path(), + install_dir: get_option('datadir') / 'doc/hydra', + strip_directory: true, +) diff --git a/flake.nix b/flake.nix index c06b5f62..fccd45b9 100644 --- a/flake.nix +++ b/flake.nix @@ -47,7 +47,7 @@ pkgs.runCommand "hydra-manual-${hydra.version}" { } '' mkdir -p $out/share - cp -prvd ${hydra}/share/doc $out/share/ + cp -prvd ${hydra.doc}/share/doc $out/share/ mkdir $out/nix-support echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products diff --git a/meson.build b/meson.build new file mode 100644 index 00000000..b9327d51 --- /dev/null +++ b/meson.build @@ -0,0 +1,40 @@ +project('hydra', 'cpp', + version: files('version.txt'), + license: 'GPL-3.0', + default_options: [ + 'debug=true', + 'optimization=2', + 'cpp_std=c++20', + ], +) + +nix_store_dep = dependency('nix-store', required: true) +nix_main_dep = dependency('nix-main', required: true) +nix_expr_dep = dependency('nix-expr', required: true) +nix_flake_dep = dependency('nix-flake', required: true) +nix_cmd_dep = dependency('nix-cmd', required: true) + +# Nix need extra flags not provided in its pkg-config files. +nix_dep = declare_dependency( + dependencies: [ + nix_store_dep, + nix_main_dep, + nix_expr_dep, + nix_flake_dep, + nix_cmd_dep, + ], + compile_args: ['-include', 'nix/config.h'], +) + +pqxx_dep = dependency('libpqxx', required: true) + +prom_cpp_core_dep = dependency('prometheus-cpp-core', required: true) +prom_cpp_pull_dep = dependency('prometheus-cpp-pull', required: true) + +mdbook = find_program('mdbook', native: true) +perl = find_program('perl', native: true) + +subdir('doc/manual') +subdir('nixos-modules') +subdir('src') +subdir('t') diff --git a/nixos-modules/meson.build b/nixos-modules/meson.build new file mode 100644 index 00000000..95c47e9f --- /dev/null +++ b/nixos-modules/meson.build @@ -0,0 +1,4 @@ +install_data('hydra.nix', + install_dir: get_option('datadir') / 'nix', + rename: ['hydra-module.nix'], +) diff --git a/package.nix b/package.nix index 73dd0f2a..f944fe2b 100644 --- a/package.nix +++ b/package.nix @@ -13,7 +13,8 @@ , git , makeWrapper -, autoreconfHook +, meson +, ninja , nukeReferences , pkg-config , mdbook @@ -91,6 +92,7 @@ let DigestSHA1 EmailMIME EmailSender + FileCopyRecursive FileLibMagic FileSlurper FileWhich @@ -138,28 +140,24 @@ stdenv.mkDerivation (finalAttrs: { src = fileset.toSource { root = ./.; fileset = fileset.unions ([ - ./version.txt - ./configure.ac - ./Makefile.am - ./src ./doc - ./nixos-modules/hydra.nix - # These are always needed to appease Automake - ./t/Makefile.am - ./t/jobs/config.nix.in - ./t/jobs/declarative/project.json.in - ] ++ lib.optionals finalAttrs.doCheck [ + ./meson.build + ./nixos-modules + ./src ./t + ./version.txt ./.perlcriticrc - ./.yath.rc ]); }; + outputs = [ "out" "doc" ]; + strictDeps = true; nativeBuildInputs = [ makeWrapper - autoreconfHook + meson + ninja nukeReferences pkg-config mdbook @@ -225,6 +223,12 @@ stdenv.mkDerivation (finalAttrs: { OPENLDAP_ROOT = openldap; + mesonBuildType = "release"; + + postPatch = '' + patchShebangs . + ''; + shellHook = '' pushd $(git rev-parse --show-toplevel) >/dev/null @@ -238,14 +242,11 @@ stdenv.mkDerivation (finalAttrs: { popd >/dev/null ''; - NIX_LDFLAGS = [ "-lpthread" ]; - - enableParallelBuilding = true; - doCheck = true; + mesonCheckFlags = [ "--verbose" ]; + preCheck = '' - patchShebangs . export LOGNAME=''${LOGNAME:-foo} # set $HOME for bzr so it can create its trace file export HOME=$(mktemp -d) diff --git a/src/Makefile.am b/src/Makefile.am deleted file mode 100644 index a28780b6..00000000 --- a/src/Makefile.am +++ /dev/null @@ -1,3 +0,0 @@ -SUBDIRS = hydra-evaluator hydra-eval-jobs hydra-queue-runner sql script lib root ttf -BOOTCLEAN_SUBDIRS = $(SUBDIRS) -DIST_SUBDIRS = $(SUBDIRS) diff --git a/src/hydra-eval-jobs/Makefile.am b/src/hydra-eval-jobs/Makefile.am deleted file mode 100644 index fb67f89b..00000000 --- a/src/hydra-eval-jobs/Makefile.am +++ /dev/null @@ -1,5 +0,0 @@ -bin_PROGRAMS = hydra-eval-jobs - -hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc -hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixcmd -lnixflake -hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra diff --git a/src/hydra-eval-jobs/meson.build b/src/hydra-eval-jobs/meson.build new file mode 100644 index 00000000..916212e1 --- /dev/null +++ b/src/hydra-eval-jobs/meson.build @@ -0,0 +1,8 @@ +hydra_eval_jobs = executable('hydra-eval-jobs', + 'hydra-eval-jobs.cc', + dependencies: [ + libhydra_dep, + nix_dep, + ], + install: true, +) diff --git a/src/hydra-evaluator/Makefile.am b/src/hydra-evaluator/Makefile.am deleted file mode 100644 index 73638cfe..00000000 --- a/src/hydra-evaluator/Makefile.am +++ /dev/null @@ -1,5 +0,0 @@ -bin_PROGRAMS = hydra-evaluator - -hydra_evaluator_SOURCES = hydra-evaluator.cc -hydra_evaluator_LDADD = $(NIX_LIBS) -lpqxx -hydra_evaluator_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations diff --git a/src/hydra-evaluator/meson.build b/src/hydra-evaluator/meson.build new file mode 100644 index 00000000..b3dc542e --- /dev/null +++ b/src/hydra-evaluator/meson.build @@ -0,0 +1,9 @@ +hydra_evaluator = executable('hydra-evaluator', + 'hydra-evaluator.cc', + dependencies: [ + libhydra_dep, + nix_dep, + pqxx_dep, + ], + install: true, +) diff --git a/src/hydra-queue-runner/Makefile.am b/src/hydra-queue-runner/Makefile.am deleted file mode 100644 index 117112f6..00000000 --- a/src/hydra-queue-runner/Makefile.am +++ /dev/null @@ -1,8 +0,0 @@ -bin_PROGRAMS = hydra-queue-runner - -hydra_queue_runner_SOURCES = hydra-queue-runner.cc queue-monitor.cc dispatcher.cc \ - builder.cc build-result.cc build-remote.cc \ - hydra-build-result.hh counter.hh state.hh db.hh \ - nar-extractor.cc nar-extractor.hh -hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx -lprometheus-cpp-pull -lprometheus-cpp-core -hydra_queue_runner_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations diff --git a/src/hydra-queue-runner/meson.build b/src/hydra-queue-runner/meson.build new file mode 100644 index 00000000..1c20299f --- /dev/null +++ b/src/hydra-queue-runner/meson.build @@ -0,0 +1,22 @@ +srcs = files( + 'builder.cc', + 'build-remote.cc', + 'build-result.cc', + 'dispatcher.cc', + 'hydra-queue-runner.cc', + 'nar-extractor.cc', + 'queue-monitor.cc', +) + +hydra_queue_runner = executable('hydra-queue-runner', + 'hydra-queue-runner.cc', + srcs, + dependencies: [ + libhydra_dep, + nix_dep, + pqxx_dep, + prom_cpp_core_dep, + prom_cpp_pull_dep, + ], + install: true, +) diff --git a/src/lib/Makefile.am b/src/lib/Makefile.am deleted file mode 100644 index 434868e0..00000000 --- a/src/lib/Makefile.am +++ /dev/null @@ -1,22 +0,0 @@ -PERL_MODULES = \ - $(wildcard *.pm) \ - $(wildcard Hydra/*.pm) \ - $(wildcard Hydra/Helper/*.pm) \ - $(wildcard Hydra/Model/*.pm) \ - $(wildcard Hydra/View/*.pm) \ - $(wildcard Hydra/Schema/*.pm) \ - $(wildcard Hydra/Schema/Result/*.pm) \ - $(wildcard Hydra/Schema/ResultSet/*.pm) \ - $(wildcard Hydra/Controller/*.pm) \ - $(wildcard Hydra/Base/*.pm) \ - $(wildcard Hydra/Base/Controller/*.pm) \ - $(wildcard Hydra/Script/*.pm) \ - $(wildcard Hydra/Component/*.pm) \ - $(wildcard Hydra/Event/*.pm) \ - $(wildcard Hydra/Plugin/*.pm) - -EXTRA_DIST = \ - $(PERL_MODULES) - -hydradir = $(libexecdir)/hydra/lib -nobase_hydra_DATA = $(PERL_MODULES) diff --git a/src/libhydra/meson.build b/src/libhydra/meson.build new file mode 100644 index 00000000..1866233c --- /dev/null +++ b/src/libhydra/meson.build @@ -0,0 +1,5 @@ +libhydra_inc = include_directories('.') + +libhydra_dep = declare_dependency( + include_directories: [libhydra_inc], +) diff --git a/src/meson.build b/src/meson.build new file mode 100644 index 00000000..8c7562ed --- /dev/null +++ b/src/meson.build @@ -0,0 +1,86 @@ +# Native code +subdir('libhydra') +subdir('hydra-eval-jobs') +subdir('hydra-evaluator') +subdir('hydra-queue-runner') + +hydra_libexecdir = get_option('libexecdir') / 'hydra' + +# Data and interpreted +foreach dir : ['lib', 'root'] + install_subdir(dir, + install_dir: hydra_libexecdir, + ) +endforeach +subdir('sql') +subdir('ttf') + +# Static files for website + +hydra_libexecdir_static = hydra_libexecdir / 'root' / 'static' + +## Bootstrap + +bootstrap_name = 'bootstrap-4.3.1-dist' +bootstrap = custom_target( + 'extract-bootstrap', + input: 'root' / (bootstrap_name + '.zip'), + output: bootstrap_name, + command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'], +) +custom_target( + 'name-bootstrap', + input: bootstrap, + output: 'bootstrap', + command: ['cp', '-r', '@INPUT@' , '@OUTPUT@'], + install: true, + install_dir: hydra_libexecdir_static, +) + +## Flot + +custom_target( + 'extract-flot', + input: 'root' / 'flot-0.8.3.zip', + output: 'flot', + command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'], + install: true, + install_dir: hydra_libexecdir_static / 'js', +) + +## Fontawesome + +fontawesome_name = 'fontawesome-free-5.10.2-web' +fontawesome = custom_target( + 'extract-fontawesome', + input: 'root' / (fontawesome_name + '.zip'), + output: fontawesome_name, + command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'], +) +custom_target( + 'name-fontawesome-css', + input: fontawesome, + output: 'css', + command: ['cp', '-r', '@INPUT@/css', '@OUTPUT@'], + install: true, + install_dir: hydra_libexecdir_static / 'fontawesome', +) +custom_target( + 'name-fontawesome-webfonts', + input: fontawesome, + output: 'webfonts', + command: ['cp', '-r', '@INPUT@/webfonts', '@OUTPUT@'], + install: true, + install_dir: hydra_libexecdir_static / 'fontawesome', +) + +# Scripts + +install_subdir('script', + install_dir: get_option('bindir'), + exclude_files: [ + 'hydra-dev-server', + ], + install_mode: 'rwxr-xr-x', + strip_directory: true, +) diff --git a/src/root/Makefile.am b/src/root/Makefile.am deleted file mode 100644 index 163b96e0..00000000 --- a/src/root/Makefile.am +++ /dev/null @@ -1,39 +0,0 @@ -TEMPLATES = $(wildcard *.tt) -STATIC = \ - $(wildcard static/images/*) \ - $(wildcard static/css/*) \ - static/js/bootbox.min.js \ - static/js/popper.min.js \ - static/js/common.js \ - static/js/jquery/jquery-3.4.1.min.js \ - static/js/jquery/jquery-ui-1.10.4.min.js - -FLOT = flot-0.8.3.zip -BOOTSTRAP = bootstrap-4.3.1-dist.zip -FONTAWESOME = fontawesome-free-5.10.2-web.zip - -ZIPS = $(FLOT) $(BOOTSTRAP) $(FONTAWESOME) - -EXTRA_DIST = $(TEMPLATES) $(STATIC) $(ZIPS) - -hydradir = $(libexecdir)/hydra/root -nobase_hydra_DATA = $(EXTRA_DIST) - -all: - mkdir -p $(srcdir)/static/js - unzip -u -d $(srcdir)/static $(BOOTSTRAP) - rm -rf $(srcdir)/static/bootstrap - mv $(srcdir)/static/$(basename $(BOOTSTRAP)) $(srcdir)/static/bootstrap - unzip -u -d $(srcdir)/static/js $(FLOT) - unzip -u -d $(srcdir)/static $(FONTAWESOME) - rm -rf $(srcdir)/static/fontawesome - mv $(srcdir)/static/$(basename $(FONTAWESOME)) $(srcdir)/static/fontawesome - -install-data-local: $(ZIPS) - mkdir -p $(hydradir)/static/js - cp -prvd $(srcdir)/static/js/* $(hydradir)/static/js - mkdir -p $(hydradir)/static/bootstrap - cp -prvd $(srcdir)/static/bootstrap/* $(hydradir)/static/bootstrap - mkdir -p $(hydradir)/static/fontawesome/{css,webfonts} - cp -prvd $(srcdir)/static/fontawesome/css/* $(hydradir)/static/fontawesome/css - cp -prvd $(srcdir)/static/fontawesome/webfonts/* $(hydradir)/static/fontawesome/webfonts diff --git a/src/script/Makefile.am b/src/script/Makefile.am deleted file mode 100644 index 466d3153..00000000 --- a/src/script/Makefile.am +++ /dev/null @@ -1,19 +0,0 @@ -EXTRA_DIST = \ - $(distributable_scripts) - -distributable_scripts = \ - hydra-backfill-ids \ - hydra-init \ - hydra-eval-jobset \ - hydra-server \ - hydra-update-gc-roots \ - hydra-s3-backup-collect-garbage \ - hydra-create-user \ - hydra-notify \ - hydra-send-stats \ - nix-prefetch-git \ - nix-prefetch-bzr \ - nix-prefetch-hg - -bin_SCRIPTS = \ - $(distributable_scripts) diff --git a/src/sql/Makefile.am b/src/sql/Makefile.am deleted file mode 100644 index a6b96886..00000000 --- a/src/sql/Makefile.am +++ /dev/null @@ -1,9 +0,0 @@ -sqldir = $(libexecdir)/hydra/sql -nobase_dist_sql_DATA = \ - hydra.sql \ - test.sql \ - upgrade-*.sql \ - update-dbix.pl - -update-dbix: hydra.sql - ./update-dbix-harness.sh diff --git a/src/sql/meson.build b/src/sql/meson.build new file mode 100644 index 00000000..2da35b3c --- /dev/null +++ b/src/sql/meson.build @@ -0,0 +1,90 @@ +sql_files = files( + 'hydra.sql', + 'test.sql', + 'update-dbix.pl', + 'upgrade-2.sql', + 'upgrade-3.sql', + 'upgrade-4.sql', + 'upgrade-5.sql', + 'upgrade-6.sql', + 'upgrade-7.sql', + 'upgrade-8.sql', + 'upgrade-9.sql', + 'upgrade-10.sql', + 'upgrade-11.sql', + 'upgrade-12.sql', + 'upgrade-13.sql', + 'upgrade-14.sql', + 'upgrade-15.sql', + 'upgrade-16.sql', + 'upgrade-17.sql', + 'upgrade-18.sql', + 'upgrade-19.sql', + 'upgrade-20.sql', + 'upgrade-21.sql', + 'upgrade-22.sql', + 'upgrade-23.sql', + 'upgrade-24.sql', + 'upgrade-25.sql', + 'upgrade-26.sql', + 'upgrade-27.sql', + 'upgrade-28.sql', + 'upgrade-29.sql', + 'upgrade-30.sql', + 'upgrade-31.sql', + 'upgrade-32.sql', + 'upgrade-33.sql', + 'upgrade-34.sql', + 'upgrade-35.sql', + 'upgrade-36.sql', + 'upgrade-37.sql', + 'upgrade-38.sql', + 'upgrade-39.sql', + 'upgrade-40.sql', + 'upgrade-41.sql', + 'upgrade-42.sql', + 'upgrade-43.sql', + 'upgrade-44.sql', + 'upgrade-45.sql', + 'upgrade-46.sql', + 'upgrade-47.sql', + 'upgrade-48.sql', + 'upgrade-49.sql', + 'upgrade-50.sql', + 'upgrade-51.sql', + 'upgrade-52.sql', + 'upgrade-53.sql', + 'upgrade-54.sql', + 'upgrade-55.sql', + 'upgrade-56.sql', + 'upgrade-57.sql', + 'upgrade-58.sql', + 'upgrade-59.sql', + 'upgrade-60.sql', + 'upgrade-61.sql', + 'upgrade-62.sql', + 'upgrade-63.sql', + 'upgrade-64.sql', + 'upgrade-65.sql', + 'upgrade-66.sql', + 'upgrade-67.sql', + 'upgrade-68.sql', + 'upgrade-69.sql', + 'upgrade-70.sql', + 'upgrade-71.sql', + 'upgrade-72.sql', + 'upgrade-73.sql', + 'upgrade-74.sql', + 'upgrade-75.sql', + 'upgrade-76.sql', + 'upgrade-77.sql', + 'upgrade-78.sql', + 'upgrade-79.sql', + 'upgrade-80.sql', + 'upgrade-81.sql', + 'upgrade-82.sql', + 'upgrade-83.sql', + 'upgrade-84.sql', +) + +install_data(sql_files, install_dir: hydra_libexecdir / 'sql') diff --git a/src/ttf/Makefile.am b/src/ttf/Makefile.am deleted file mode 100644 index eba78239..00000000 --- a/src/ttf/Makefile.am +++ /dev/null @@ -1,4 +0,0 @@ -EXTRA_DIST = COPYING.LIB StayPuft.ttf - -ttfdir = $(libexecdir)/hydra/ttf -nobase_ttf_DATA = $(EXTRA_DIST) diff --git a/src/ttf/meson.build b/src/ttf/meson.build new file mode 100644 index 00000000..260439e5 --- /dev/null +++ b/src/ttf/meson.build @@ -0,0 +1,5 @@ +data_files = files( + 'StayPuft.ttf', + 'COPYING.LIB', +) +install_data(data_files, install_dir: hydra_libexecdir / 'ttf') diff --git a/t/Makefile.am b/t/Makefile.am deleted file mode 100644 index 9c14c1e3..00000000 --- a/t/Makefile.am +++ /dev/null @@ -1,39 +0,0 @@ -TESTS_ENVIRONMENT = \ - BZR_HOME="$(abs_builddir)/data" \ - HYDRA_DBI="dbi:Pg:dbname=hydra-test-suite;port=6433" \ - HYDRA_DATA="$(abs_builddir)/data" \ - HYDRA_HOME="$(top_srcdir)/src" \ - HYDRA_CONFIG= \ - NIX_REMOTE= \ - NIX_REMOTE_SYSTEMS= \ - NIX_CONF_DIR="$(abs_builddir)/nix/etc/nix" \ - NIX_STATE_DIR="$(abs_builddir)/nix/var/nix" \ - NIX_STORE_DIR="$(abs_builddir)/nix/store" \ - NIX_LOG_DIR="$(abs_builddir)/nix/var/log/nix" \ - PGHOST=/tmp \ - PERL5LIB="$(srcdir):$(abs_top_srcdir)/src/lib:$$PERL5LIB" \ - PYTHONPATH= \ - PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-eval-jobs:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \ - perl -w - -EXTRA_DIST = \ - $(wildcard *.pm) \ - $(wildcard jobs/*.nix) \ - $(wildcard jobs/*.sh) \ - $(TESTS) - -TESTS = \ - perlcritic.pl \ - test.pl - -check_SCRIPTS = repos - -repos: dirs - -dirs: - mkdir -p data - touch data/hydra.conf - mkdir -p nix - mkdir -p nix/etc/nix - mkdir -p nix/store - mkdir -p nix/var diff --git a/t/lib/HydraTestContext.pm b/t/lib/HydraTestContext.pm index 1d6fa909..d1de2212 100644 --- a/t/lib/HydraTestContext.pm +++ b/t/lib/HydraTestContext.pm @@ -4,6 +4,8 @@ use warnings; package HydraTestContext; use File::Path qw(make_path); use File::Basename; +use File::Copy::Recursive qw(rcopy); +use File::Which qw(which); use Cwd qw(abs_path getcwd); use CliRunners; use Hydra::Helper::Exec; @@ -77,6 +79,13 @@ sub new { ); $ENV{'HYDRA_DBI'} = $pgsql->dsn; + my $jobsdir = "$dir/jobs"; + rcopy(abs_path(dirname(__FILE__) . "/../jobs"), $jobsdir); + + my $coreutils_path = dirname(which 'install'); + replace_variable_in_file($jobsdir . "/config.nix", '@testPath@', $coreutils_path); + replace_variable_in_file($jobsdir . "/declarative/project.json", '@jobsPath@', $jobsdir); + my $self = bless { _db => undef, db_handle => $pgsql, @@ -84,7 +93,7 @@ sub new { nix_state_dir => $nix_state_dir, nix_log_dir => $nix_log_dir, testdir => abs_path(dirname(__FILE__) . "/.."), - jobsdir => abs_path(dirname(__FILE__) . "/../jobs"), + jobsdir => $jobsdir, deststoredir => $deststoredir, }, $class; @@ -243,6 +252,18 @@ sub write_file { close $fh; } +sub replace_variable_in_file { + my ($fn, $var, $val) = @_; + + open (my $input, '<', "$fn.in") or die $!; + open (my $output, '>', $fn) or die $!; + + while (my $line = <$input>) { + $line =~ s/$var/$val/g; + print $output $line; + } +} + sub rand_chars { return sprintf("t%08X", rand(0xFFFFFFFF)); } diff --git a/t/meson.build b/t/meson.build new file mode 100644 index 00000000..11044a03 --- /dev/null +++ b/t/meson.build @@ -0,0 +1,43 @@ +fs = import('fs') + +test('perlcritic', + perl, + args: ['-w', files('perlcritic.pl')], + workdir: meson.project_source_root(), + timeout: -1, +) + +testenv = environment( + { + 'BZR_HOME': meson.current_build_dir() / 'data', + 'HYDRA_DBI': 'dbi:Pg:dbname=hydra-test-suite;port=6433', + 'HYDRA_DATA': meson.current_build_dir() / 'data', + 'HYDRA_HOME': meson.project_source_root() / 'src', + 'PGHOST': '/tmp', + 'PYTHONPATH': '', + + # libpqxx seems to randomly crash with certain values of MALLOC_PERTURB_, + # set by default by Meson's test(). Very promising, high quality software. + 'MALLOC_PERTURB_': '0', + }, +) +testenv.prepend('PERL5LIB', + meson.current_source_dir(), + meson.project_source_root() / 'src/lib', + separator: ':' +) +testenv.prepend('PATH', + fs.parent(hydra_eval_jobs.full_path()), + fs.parent(hydra_evaluator.full_path()), + fs.parent(hydra_queue_runner.full_path()), + meson.project_source_root() / 'src/script', + separator: ':' +) + +test('testsuite', + perl, + args: ['-I', meson.current_source_dir() / 'lib', '-w', files('test.pl')], + env: testenv, + workdir: meson.current_source_dir(), + timeout: -1, +) From 6456c1d7d636ed9d046f24c81873b00d226b76da Mon Sep 17 00:00:00 2001 From: Chatnoir Miki Date: Mon, 25 Nov 2024 11:35:08 +0800 Subject: [PATCH 37/46] reproduce.tt: Use realpath for tmpDir to fix macOS compatibility --- src/root/reproduce.tt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/root/reproduce.tt b/src/root/reproduce.tt index dc88edfa..d8a77518 100644 --- a/src/root/reproduce.tt +++ b/src/root/reproduce.tt @@ -7,7 +7,7 @@ main() { set -e -tmpDir=${TMPDIR:-/tmp}/build-[% build.id +%] +tmpDir=$(realpath "${TMPDIR:-/tmp}")/build-[% build.id +%] declare -a args extraArgs From 9de9cb0ad8e23f935eaa24d7e197c0deba885da9 Mon Sep 17 00:00:00 2001 From: Aaron Honeycutt Date: Tue, 26 Nov 2024 00:52:24 -0700 Subject: [PATCH 38/46] Update README (#1271) * Update version in example * Update docs to fix invalid indentifier when using 'hello' * fix build issue for hello example --------- Co-authored-by: Aaron Honeycutt --- README.md | 8 ++++---- examples/hello.nix | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 58373052..8ac18ac9 100644 --- a/README.md +++ b/README.md @@ -39,16 +39,16 @@ In order to evaluate and build anything you need to create _projects_ that conta #### Creating A Project Log in as administrator, click "_Admin_" and select "_Create project_". Fill the form as follows: -- **Identifier**: `hello` +- **Identifier**: `hello-project` - **Display name**: `hello` - **Description**: `hello project` Click "_Create project_". #### Creating A Jobset -After creating a project you are forwarded to the project page. Click "_Actions_" and choose "_Create jobset_". Fill the form with the following values: +After creating a project you are forwarded to the project page. Click "_Actions_" and choose "_Create jobset_". Change **Type** to Legacy for the example below. Fill the form with the following values: -- **Identifier**: `hello` +- **Identifier**: `hello-project` - **Nix expression**: `examples/hello.nix` in `hydra` - **Check interval**: 60 - **Scheduling shares**: 1 @@ -57,7 +57,7 @@ We have to add two inputs for this jobset. One for _nixpkgs_ and one for _hydra_ - **Input name**: `nixpkgs` - **Type**: `Git checkout` -- **Value**: `https://github.com/nixos/nixpkgs-channels nixos-20.03` +- **Value**: `https://github.com/NixOS/nixpkgs nixos-24.05` - **Input name**: `hydra` - **Type**: `Git checkout` diff --git a/examples/hello.nix b/examples/hello.nix index 5a5d2585..84707025 100644 --- a/examples/hello.nix +++ b/examples/hello.nix @@ -1,5 +1,5 @@ # -# jobset example file. This file canbe referenced as Nix expression +# jobset example file. This file can be referenced as Nix expression # in a jobset configuration along with inputs for nixpkgs and the # repository containing this file. # From efadb6a26c694cc23380406ab471bc035b89ca44 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 3 Dec 2024 01:44:55 +0100 Subject: [PATCH 39/46] Make hydra-queue-runner want network-online.target Just ordering yourself after network-online.target will not guarantee that it will be loaded. You'll have to either want or require it. Hence the following trace on recent nixpkgs versions: evaluation warning: hydra-queue-runner.service is ordered after 'network-online.target' but doesn't depend on it --- nixos-modules/hydra.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index c471b0ef..4fc2d311 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -338,6 +338,7 @@ in systemd.services.hydra-queue-runner = { wantedBy = [ "multi-user.target" ]; requires = [ "hydra-init.service" ]; + wants = [ "network-online.target" ]; after = [ "hydra-init.service" "network.target" "network-online.target" ]; path = [ cfg.package pkgs.nettools pkgs.openssh pkgs.bzip2 config.nix.package ]; restartTriggers = [ hydraConf ]; From 8a8ac148772d19188e8e99900cf50ae7b1fc085d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 6 Feb 2025 21:30:49 -0500 Subject: [PATCH 40/46] Test using Hydra with flakes It seemed there was no self-contained end-to-end test actually doing this?! Among other things, this will help ensure that the switch-over to `nix-eval-jobs` is correct. --- t/evaluator/evaluate-flake.t | 67 ++++++++++++++++++++++++++++++++ t/jobs/flake-checks/flake.nix | 6 +++ t/jobs/flake-hydraJobs/flake.nix | 6 +++ t/lib/HydraTestContext.pm | 42 +++++++++++++++----- 4 files changed, 112 insertions(+), 9 deletions(-) create mode 100644 t/evaluator/evaluate-flake.t create mode 100644 t/jobs/flake-checks/flake.nix create mode 100644 t/jobs/flake-hydraJobs/flake.nix diff --git a/t/evaluator/evaluate-flake.t b/t/evaluator/evaluate-flake.t new file mode 100644 index 00000000..d884f25a --- /dev/null +++ b/t/evaluator/evaluate-flake.t @@ -0,0 +1,67 @@ +use feature 'unicode_strings'; +use strict; +use warnings; +use Setup; +use Test2::V0; +use File::Copy qw(cp); + +my $ctx = test_context( + nix_config => qq| + experimental-features = nix-command flakes + |, + hydra_config => q| + + evaluator_pure_eval = false + + | +); + +sub checkFlake { + my ($flake) = @_; + + cp($ctx->jobsdir . "/basic.nix", $ctx->jobsdir . "/" . $flake); + cp($ctx->jobsdir . "/config.nix", $ctx->jobsdir . "/" . $flake); + cp($ctx->jobsdir . "/empty-dir-builder.sh", $ctx->jobsdir . "/" . $flake); + cp($ctx->jobsdir . "/fail.sh", $ctx->jobsdir . "/" . $flake); + cp($ctx->jobsdir . "/succeed-with-failed.sh", $ctx->jobsdir . "/" . $flake); + + chmod 0755, $ctx->jobsdir . "/" . $flake . "/empty-dir-builder.sh"; + chmod 0755, $ctx->jobsdir . "/" . $flake . "/fail.sh"; + chmod 0755, $ctx->jobsdir . "/" . $flake . "/succeed-with-failed.sh"; + + my $builds = $ctx->makeAndEvaluateJobset( + flake => 'path:' . $ctx->jobsdir . "/" . $flake, + build => 1 + ); + + subtest "Build: succeed_with_failed" => sub { + my $build = $builds->{"succeed_with_failed"}; + + is($build->finished, 1, "Build should be finished."); + is($build->buildstatus, 6, "succeeeded-but-failed should have buildstatus 6."); + }; + + subtest "Build: empty_dir" => sub { + my $build = $builds->{"empty_dir"}; + + is($build->finished, 1, "Build should be finished."); + is($build->buildstatus, 0, "Should have succeeded."); + }; + + subtest "Build: fails" => sub { + my $build = $builds->{"fails"}; + + is($build->finished, 1, "Build should be finished."); + is($build->buildstatus, 1, "Should have failed."); + }; +} + +subtest "Flake using `checks`" => sub { + checkFlake 'flake-checks' +}; + +subtest "Flake using `hydraJobs`" => sub { + checkFlake 'flake-hydraJobs' +}; + +done_testing; diff --git a/t/jobs/flake-checks/flake.nix b/t/jobs/flake-checks/flake.nix new file mode 100644 index 00000000..489fa9ec --- /dev/null +++ b/t/jobs/flake-checks/flake.nix @@ -0,0 +1,6 @@ +{ + outputs = { ... }: { + checks = + import ./basic.nix; + }; +} diff --git a/t/jobs/flake-hydraJobs/flake.nix b/t/jobs/flake-hydraJobs/flake.nix new file mode 100644 index 00000000..c02ccddd --- /dev/null +++ b/t/jobs/flake-hydraJobs/flake.nix @@ -0,0 +1,6 @@ +{ + outputs = { ... }: { + hydraJobs = + import ./basic.nix; + }; +} diff --git a/t/lib/HydraTestContext.pm b/t/lib/HydraTestContext.pm index d1de2212..34d41eb2 100644 --- a/t/lib/HydraTestContext.pm +++ b/t/lib/HydraTestContext.pm @@ -165,14 +165,25 @@ sub nix_state_dir { sub makeAndEvaluateJobset { my ($self, %opts) = @_; - my $expression = $opts{'expression'} || die "Mandatory 'expression' option not passed to makeAndEvaluateJobset.\n"; + my $expression = $opts{'expression'}; + my $flake = $opts{'flake'}; + if (not $expression and not $flake) { + die "One of 'expression' or 'flake' must be passed to makeEvaluateJobset.\n"; + } + my $jobsdir = $opts{'jobsdir'} // $self->jobsdir; my $should_build = $opts{'build'} // 0; - my $jobsetCtx = $self->makeJobset( - expression => $expression, + my %args = ( jobsdir => $jobsdir, ); + if ($expression) { + $args{expression} = $expression; + } + if ($flake) { + $args{flake} = $flake; + } + my $jobsetCtx = $self->makeJobset(%args); my $jobset = $jobsetCtx->{"jobset"}; evalSucceeds($jobset) or die "Evaluating jobs/$expression should exit with return code 0.\n"; @@ -195,7 +206,7 @@ sub makeAndEvaluateJobset { # # In return, you get a hash of the user, project, and jobset records. # -# This always uses an `expression` from the `jobsdir` directory. +# This always uses an `expression` or `flake` from the `jobsdir` directory. # # Hash Parameters: # @@ -204,7 +215,12 @@ sub makeAndEvaluateJobset { sub makeJobset { my ($self, %opts) = @_; - my $expression = $opts{'expression'} || die "Mandatory 'expression' option not passed to makeJobset.\n"; + my $expression = $opts{'expression'}; + my $flake = $opts{'flake'}; + if (not $expression and not $flake) { + die "One of 'expression' or 'flake' must be passed to makeJobset.\n"; + } + my $jobsdir = $opts{'jobsdir'} // $self->jobsdir; # Create a new user for this test @@ -222,12 +238,20 @@ sub makeJobset { }); # Create a new jobset for this test and set up the inputs - my $jobset = $project->jobsets->create({ + my %args = ( name => rand_chars(), - nixexprinput => "jobs", - nixexprpath => $expression, emailoverride => "" - }); + ); + if ($expression) { + $args{type} = 0; + $args{nixexprinput} = "jobs"; + $args{nixexprpath} = $expression; + } + if ($flake) { + $args{type} = 1; + $args{flake} = $flake; + } + my $jobset = $project->jobsets->create(\%args); my $jobsetinput = $jobset->jobsetinputs->create({name => "jobs", type => "path"}); $jobsetinput->jobsetinputalts->create({altnr => 0, value => $jobsdir}); From 141b5fd0b5053230e1823f43257fa0ab58b39373 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 7 Feb 2025 16:30:14 -0500 Subject: [PATCH 41/46] Improve tests around constituents - Test how shorter names are preferred when multiple jobs resolve to the same derivation. - Test the exact aggregate map we get, by looking in the DB. --- t/evaluator/evaluate-constituents-gc.t | 49 ++++++++++++++++++++++++-- t/jobs/constituents.nix | 4 ++- t/lib/HydraTestContext.pm | 19 ++++++++-- 3 files changed, 67 insertions(+), 5 deletions(-) diff --git a/t/evaluator/evaluate-constituents-gc.t b/t/evaluator/evaluate-constituents-gc.t index a9b23e6c..2a5337b2 100644 --- a/t/evaluator/evaluate-constituents-gc.t +++ b/t/evaluator/evaluate-constituents-gc.t @@ -5,13 +5,58 @@ use Test2::V0; my $ctx = test_context(); -my $builds = $ctx->makeAndEvaluateJobset( - expression => 'constituents.nix', +my $expression = 'constituents.nix'; +my $jobsetCtx = $ctx->makeJobset( + expression => $expression, +); +my $builds = $ctx->evaluateJobset( + jobset => $jobsetCtx->{"jobset"}, + expression => $expression, + build => 0, ); my $constituentA = $builds->{"constituentA"}; my $directAggregate = $builds->{"direct_aggregate"}; my $indirectAggregate = $builds->{"indirect_aggregate"}; +my $mixedAggregate = $builds->{"mixed_aggregate"}; + +# Ensure that we get exactly the aggregates we expect +my %expected_constituents = ( + 'direct_aggregate' => { + 'constituentA' => 1, + }, + 'indirect_aggregate' => { + 'constituentA' => 1, + }, + 'mixed_aggregate' => { + # Note that `constituentA_alias` becomes `constituentA`, because + # the shorter name is preferred + 'constituentA' => 1, + 'constituentB' => 1, + }, +); + +my $rs = $ctx->db->resultset('AggregateConstituents')->search( + {}, + { + join => [ 'aggregate', 'constituent' ], # Use correct relationship names + columns => [], + '+select' => [ 'aggregate.job', 'constituent.job' ], + '+as' => [ 'aggregate_job', 'constituent_job' ], + } +); + +my %actual_constituents; +while (my $row = $rs->next) { + my $aggregate_job = $row->get_column('aggregate_job'); + my $constituent_job = $row->get_column('constituent_job'); + $actual_constituents{$aggregate_job} //= {}; + $actual_constituents{$aggregate_job}{$constituent_job} = 1; +} + +is(\%actual_constituents, \%expected_constituents, "Exact aggregate constituents as expected"); + +# Check that deletion also doesn't work accordingly is(system('nix-store', '--delete', $constituentA->drvpath), 256, "Deleting a constituent derivation fails"); is(system('nix-store', '--delete', $directAggregate->drvpath), 256, "Deleting the direct aggregate derivation fails"); diff --git a/t/jobs/constituents.nix b/t/jobs/constituents.nix index 5b7106b9..b8b88702 100644 --- a/t/jobs/constituents.nix +++ b/t/jobs/constituents.nix @@ -5,6 +5,8 @@ rec { builder = ./empty-dir-builder.sh; }; + constituentA_alias = constituentA; + constituentB = mkDerivation { name = "empty-dir-B"; builder = ./empty-dir-builder.sh; @@ -32,7 +34,7 @@ rec { name = "mixed_aggregate"; _hydraAggregate = true; constituents = [ - "constituentA" + "constituentA_alias" constituentB ]; builder = ./empty-dir-builder.sh; diff --git a/t/lib/HydraTestContext.pm b/t/lib/HydraTestContext.pm index 34d41eb2..27b0be0a 100644 --- a/t/lib/HydraTestContext.pm +++ b/t/lib/HydraTestContext.pm @@ -172,7 +172,6 @@ sub makeAndEvaluateJobset { } my $jobsdir = $opts{'jobsdir'} // $self->jobsdir; - my $should_build = $opts{'build'} // 0; my %args = ( jobsdir => $jobsdir, @@ -184,12 +183,28 @@ sub makeAndEvaluateJobset { $args{flake} = $flake; } my $jobsetCtx = $self->makeJobset(%args); - my $jobset = $jobsetCtx->{"jobset"}; + + return $self->evaluateJobset( + jobset => $jobsetCtx->{"jobset"}, + expression => $expression, + flake => $flake, + build => $opts{"build"} // 0, + ) +} + +sub evaluateJobset { + my ($self, %opts) = @_; + + my $jobset = $opts{'jobset'}; + + my $expression = $opts{'expression'} // $opts{'flake'}; evalSucceeds($jobset) or die "Evaluating jobs/$expression should exit with return code 0.\n"; my $builds = {}; + my $should_build = $opts{'build'}; + for my $build ($jobset->builds) { if ($should_build) { runBuild($build) or die "Build '".$build->job."' from jobs/$expression should exit with return code 0.\n"; From 0c9726af59f5a19b5fe2928a5542ed3fa8ec2849 Mon Sep 17 00:00:00 2001 From: Pierre Bourdon Date: Tue, 16 Jul 2024 04:01:14 +0200 Subject: [PATCH 42/46] flake: add nix-eval-jobs as input (cherry picked from commit 684cc50d86608cccf7500ce00af89ea34c488473) --- flake.lock | 67 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ flake.nix | 6 +++++ 2 files changed, 73 insertions(+) diff --git a/flake.lock b/flake.lock index 897785ed..8055275b 100644 --- a/flake.lock +++ b/flake.lock @@ -1,5 +1,26 @@ { "nodes": { + "flake-parts": { + "inputs": { + "nixpkgs-lib": [ + "nix-eval-jobs", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1722555600, + "narHash": "sha256-XOQkdLafnb/p9ij77byFQjDf5m5QYl9b2REiVClC+x4=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "8471fe90ad337a8074e957b69ca4d0089218391d", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, "libgit2": { "flake": false, "locked": { @@ -46,6 +67,30 @@ "type": "github" } }, + "nix-eval-jobs": { + "inputs": { + "flake-parts": "flake-parts", + "nix-github-actions": [], + "nixpkgs": [ + "nixpkgs" + ], + "treefmt-nix": "treefmt-nix" + }, + "locked": { + "lastModified": 1733814344, + "narHash": "sha256-3wwtKpS5tUBdjaGeSia7CotonbiRB6K5Kp0dsUt3nzU=", + "owner": "nix-community", + "repo": "nix-eval-jobs", + "rev": "889ea1406736b53cf165b6c28398aae3969418d1", + "type": "github" + }, + "original": { + "owner": "nix-community", + "ref": "release-2.24", + "repo": "nix-eval-jobs", + "type": "github" + } + }, "nixpkgs": { "locked": { "lastModified": 1726688310, @@ -66,8 +111,30 @@ "inputs": { "libgit2": "libgit2", "nix": "nix", + "nix-eval-jobs": "nix-eval-jobs", "nixpkgs": "nixpkgs" } + }, + "treefmt-nix": { + "inputs": { + "nixpkgs": [ + "nix-eval-jobs", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1723303070, + "narHash": "sha256-krGNVA30yptyRonohQ+i9cnK+CfCpedg6z3qzqVJcTs=", + "owner": "numtide", + "repo": "treefmt-nix", + "rev": "14c092e0326de759e16b37535161b3cb9770cea3", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "treefmt-nix", + "type": "github" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index fccd45b9..d25f451c 100644 --- a/flake.nix +++ b/flake.nix @@ -8,6 +8,9 @@ inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; inputs.nix.inputs.libgit2.follows = "libgit2"; + inputs.nix-eval-jobs.url = "github:nix-community/nix-eval-jobs/release-2.24"; + inputs.nix-eval-jobs.inputs.nixpkgs.follows = "nixpkgs"; + # hide nix dev tooling from our lock file inputs.nix.inputs.flake-parts.follows = ""; inputs.nix.inputs.git-hooks-nix.follows = ""; @@ -15,6 +18,9 @@ inputs.nix.inputs.nixpkgs-23-11.follows = ""; inputs.nix.inputs.flake-compat.follows = ""; + # hide nix-eval-jobs dev tooling from our lock file + inputs.nix-eval-jobs.inputs.nix-github-actions.follows = ""; + outputs = { self, nixpkgs, nix, ... }: let systems = [ "x86_64-linux" "aarch64-linux" ]; From d84ff32ce600204c6473889a3ff16cd6053533c9 Mon Sep 17 00:00:00 2001 From: Pierre Bourdon Date: Tue, 16 Jul 2024 04:22:41 +0200 Subject: [PATCH 43/46] hydra-eval-jobset: Use `nix-eval-jobs` instead of `hydra-eval-jobs` incrementally ingest eval results nix-eval-jobs streams output, unlike hydra-eval-jobs. Now that we've migrated, we can use this to: 1. Use less RAM by avoiding buffering a whole eval's worth of metadata into a Perl string and an array of JSON objects. 2. Make evals latency a bit lower by allowing the queue runner to start ingesting builds faster. Also use the newly-restored constituents support in `nix-eval-jobs` Note, we pass --workers and --max-memory-size to n-e-j Lost in the h-e-j -> n-e-j migration, causing evaluation to always be single threaded and limited to 4GiB RAM. Follow the config settings like h-e-j used to do (via C++ code). `nix-eval-jobs` should check `hydraJobs` and then `checks` with flakes (cherry picked from commit 6d4ccff43c41adaf6e4b2b9bced7243bc2f6e97b) (cherry picked from commit b0e9b4b2f99f9d8f5c4e780e89f955c394b5ced4) (cherry picked from commit cdfc5c81e8037d3e4818a3e459d0804b2c157ea9) (cherry picked from commit 4b107e6ff36bd89958fba36e0fe0340903e7cd13) Co-Authored-By: Maximilian Bosch --- flake.nix | 4 +- package.nix | 3 + src/script/hydra-eval-jobset | 190 +++++++++++++-------- t/evaluator/evaluate-constituents-broken.t | 6 +- t/evaluator/evaluate-meta.t | 22 +++ t/jobs/meta.nix | 17 ++ t/queue-runner/constituents.t | 4 +- 7 files changed, 167 insertions(+), 79 deletions(-) create mode 100644 t/evaluator/evaluate-meta.t create mode 100644 t/jobs/meta.nix diff --git a/flake.nix b/flake.nix index d25f451c..f12b8c2f 100644 --- a/flake.nix +++ b/flake.nix @@ -21,7 +21,7 @@ # hide nix-eval-jobs dev tooling from our lock file inputs.nix-eval-jobs.inputs.nix-github-actions.follows = ""; - outputs = { self, nixpkgs, nix, ... }: + outputs = { self, nixpkgs, nix, nix-eval-jobs, ... }: let systems = [ "x86_64-linux" "aarch64-linux" ]; forEachSystem = nixpkgs.lib.genAttrs systems; @@ -32,6 +32,7 @@ overlays.default = final: prev: { hydra = final.callPackage ./package.nix { inherit (nixpkgs.lib) fileset; + nix-eval-jobs = nix-eval-jobs.packages.${final.system}.default; rawSrc = self; nix-perl-bindings = final.nixComponents.nix-perl-bindings; }; @@ -75,6 +76,7 @@ packages = forEachSystem (system: { hydra = nixpkgs.legacyPackages.${system}.callPackage ./package.nix { inherit (nixpkgs.lib) fileset; + nix-eval-jobs = nix-eval-jobs.packages.${system}.default; rawSrc = self; nix = nix.packages.${system}.nix; nix-perl-bindings = nix.hydraJobs.perlBindings.${system}; diff --git a/package.nix b/package.nix index f944fe2b..ceb2c706 100644 --- a/package.nix +++ b/package.nix @@ -50,6 +50,7 @@ , xz , gnutar , gnused +, nix-eval-jobs , rpm , dpkg @@ -190,6 +191,7 @@ stdenv.mkDerivation (finalAttrs: { openldap postgresql_13 pixz + nix-eval-jobs ]; checkInputs = [ @@ -218,6 +220,7 @@ stdenv.mkDerivation (finalAttrs: { darcs gnused breezy + nix-eval-jobs ] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] ); diff --git a/src/script/hydra-eval-jobset b/src/script/hydra-eval-jobset index 72a386f5..9d500a13 100755 --- a/src/script/hydra-eval-jobset +++ b/src/script/hydra-eval-jobset @@ -17,6 +17,7 @@ use Hydra::Helper::Nix; use Hydra::Model::DB; use Hydra::Plugin; use Hydra::Schema; +use IPC::Run; use JSON::MaybeXS; use Net::Statsd; use Nix::Store; @@ -357,22 +358,32 @@ sub evalJobs { my @cmd; if (defined $flakeRef) { - @cmd = ("hydra-eval-jobs", - "--flake", $flakeRef, - "--gc-roots-dir", getGCRootsDir, - "--max-jobs", 1); + my $nix_expr = + "let " . + "flake = builtins.getFlake (toString \"$flakeRef\"); " . + "in " . + "flake.hydraJobs " . + "or flake.checks " . + "or (throw \"flake '$flakeRef' does not provide any Hydra jobs or checks\")"; + + @cmd = ("nix-eval-jobs", "--expr", $nix_expr); } else { my $nixExprInput = $inputInfo->{$nixExprInputName}->[0] or die "cannot find the input containing the job expression\n"; - @cmd = ("hydra-eval-jobs", + @cmd = ("nix-eval-jobs", "<" . $nixExprInputName . "/" . $nixExprPath . ">", - "--gc-roots-dir", getGCRootsDir, - "--max-jobs", 1, inputsToArgs($inputInfo)); } - push @cmd, "--no-allow-import-from-derivation" if $config->{allow_import_from_derivation} // "true" ne "true"; + push @cmd, ("--gc-roots-dir", getGCRootsDir); + push @cmd, ("--max-jobs", 1); + push @cmd, "--meta"; + push @cmd, "--constituents"; + push @cmd, "--force-recurse"; + push @cmd, ("--option", "allow-import-from-derivation", "false") if $config->{allow_import_from_derivation} // "true" ne "true"; + push @cmd, ("--workers", $config->{evaluator_workers} // 1); + push @cmd, ("--max-memory-size", $config->{evaluator_max_memory_size} // 4096); if (defined $ENV{'HYDRA_DEBUG'}) { sub escape { @@ -384,14 +395,33 @@ sub evalJobs { print STDERR "evaluator: @escaped\n"; } - (my $res, my $jobsJSON, my $stderr) = captureStdoutStderr(21600, @cmd); - die "hydra-eval-jobs returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8)) - . ":\n" . ($stderr ? decode("utf-8", $stderr) : "(no output)\n") - if $res; + my $evalProc = IPC::Run::start \@cmd, + '>', IPC::Run::new_chunker, \my $out, + '2>', \my $err; - print STDERR "$stderr"; + return sub { + while (1) { + $evalProc->pump; + if (!defined $out && !defined $err) { + $evalProc->finish; + if ($?) { + die "nix-eval-jobs returned " . ($? & 127 ? "signal $?" : "exit code " . ($? >> 8)) . "\n"; + } + return; + } - return decode_json($jobsJSON); + if (defined $err) { + print STDERR "$err"; + undef $err; + } + + if (defined $out && $out ne '') { + my $job = decode_json($out); + undef $out; + return $job; + } + } + }; } @@ -420,7 +450,7 @@ sub checkBuild { my $firstOutputName = $outputNames[0]; my $firstOutputPath = $buildInfo->{outputs}->{$firstOutputName}; - my $jobName = $buildInfo->{jobName} or die; + my $jobName = $buildInfo->{attr} or die; my $drvPath = $buildInfo->{drvPath} or die; my $build; @@ -474,9 +504,30 @@ sub checkBuild { my $time = time(); - sub null { - my ($s) = @_; - return $s eq "" ? undef : $s; + sub getMeta { + my ($s, $def) = @_; + return ($s || "") eq "" ? $def : $s; + } + + sub getMetaStrings { + my ($v, $k, $acc) = @_; + my $t = ref $v; + + if ($t eq 'HASH') { + push @$acc, $v->{$k} if exists $v->{$k}; + } elsif ($t eq 'ARRAY') { + getMetaStrings($_, $k, $acc) foreach @$v; + } elsif (defined $v) { + push @$acc, $v; + } + } + + sub getMetaConcatStrings { + my ($v, $k) = @_; + + my @strings; + getMetaStrings($v, $k, \@strings); + return join(", ", @strings) || undef; } # Add the build to the database. @@ -484,19 +535,19 @@ sub checkBuild { { timestamp => $time , jobset_id => $jobset->id , job => $jobName - , description => null($buildInfo->{description}) - , license => null($buildInfo->{license}) - , homepage => null($buildInfo->{homepage}) - , maintainers => null($buildInfo->{maintainers}) - , maxsilent => $buildInfo->{maxSilent} - , timeout => $buildInfo->{timeout} - , nixname => $buildInfo->{nixName} + , description => getMeta($buildInfo->{meta}->{description}, undef) + , license => getMetaConcatStrings($buildInfo->{meta}->{license}, "shortName") + , homepage => getMeta($buildInfo->{meta}->{homepage}, undef) + , maintainers => getMetaConcatStrings($buildInfo->{meta}->{maintainers}, "email") + , maxsilent => getMeta($buildInfo->{meta}->{maxSilent}, 7200) + , timeout => getMeta($buildInfo->{meta}->{timeout}, 36000) + , nixname => $buildInfo->{name} , drvpath => $drvPath , system => $buildInfo->{system} - , priority => $buildInfo->{schedulingPriority} + , priority => getMeta($buildInfo->{meta}->{schedulingPriority}, 100) , finished => 0 , iscurrent => 1 - , ischannel => $buildInfo->{isChannel} + , ischannel => getMeta($buildInfo->{meta}->{isChannel}, 0) }); $build->buildoutputs->create({ name => $_, path => $buildInfo->{outputs}->{$_} }) @@ -665,7 +716,7 @@ sub checkJobsetWrapped { return; } - # Hash the arguments to hydra-eval-jobs and check the + # Hash the arguments to nix-eval-jobs and check the # JobsetInputHashes to see if the previous evaluation had the same # inputs. If so, bail out. my @args = ($jobset->nixexprinput // "", $jobset->nixexprpath // "", inputsToArgs($inputInfo)); @@ -687,19 +738,12 @@ sub checkJobsetWrapped { # Evaluate the job expression. my $evalStart = clock_gettime(CLOCK_MONOTONIC); - my $jobs = evalJobs($project->name . ":" . $jobset->name, $inputInfo, $jobset->nixexprinput, $jobset->nixexprpath, $flakeRef); - my $evalStop = clock_gettime(CLOCK_MONOTONIC); - - if ($jobsetsJobset) { - my @keys = keys %$jobs; - die "The .jobsets jobset must only have a single job named 'jobsets'" - unless (scalar @keys) == 1 && $keys[0] eq "jobsets"; - } - Net::Statsd::timing("hydra.evaluator.eval_time", int(($evalStop - $evalStart) * 1000)); + my $evalStop; + my $jobsIter = evalJobs($project->name . ":" . $jobset->name, $inputInfo, $jobset->nixexprinput, $jobset->nixexprpath, $flakeRef); if ($dryRun) { - foreach my $name (keys %{$jobs}) { - my $job = $jobs->{$name}; + while (defined(my $job = $jobsIter->())) { + my $name = $job->{attr}; if (defined $job->{drvPath}) { print STDERR "good job $name: $job->{drvPath}\n"; } else { @@ -709,36 +753,20 @@ sub checkJobsetWrapped { return; } - die "Jobset contains a job with an empty name. Make sure the jobset evaluates to an attrset of jobs.\n" - if defined $jobs->{""}; - - $jobs->{$_}->{jobName} = $_ for keys %{$jobs}; - - my $jobOutPathMap = {}; - my $jobsetChanged = 0; - my $dbStart = clock_gettime(CLOCK_MONOTONIC); - - # Store the error messages for jobs that failed to evaluate. my $evaluationErrorTime = time; my $evaluationErrorMsg = ""; - foreach my $job (values %{$jobs}) { - next unless defined $job->{error}; - $evaluationErrorMsg .= - ($job->{jobName} ne "" ? "in job ‘$job->{jobName}’" : "at top-level") . - ":\n" . $job->{error} . "\n\n"; - } - setJobsetError($jobset, $evaluationErrorMsg, $evaluationErrorTime); - my $evaluationErrorRecord = $db->resultset('EvaluationErrors')->create( { errormsg => $evaluationErrorMsg , errortime => $evaluationErrorTime } ); + my $jobOutPathMap = {}; + my $jobsetChanged = 0; my %buildMap; - $db->txn_do(sub { + $db->txn_do(sub { my $prevEval = getPrevJobsetEval($db, $jobset, 1); # Clear the "current" flag on all builds. Since we're in a @@ -751,7 +779,7 @@ sub checkJobsetWrapped { , evaluationerror => $evaluationErrorRecord , timestamp => time , checkouttime => abs(int($checkoutStop - $checkoutStart)) - , evaltime => abs(int($evalStop - $evalStart)) + , evaltime => 0 , hasnewbuilds => 0 , nrbuilds => 0 , flake => $flakeRef @@ -759,11 +787,24 @@ sub checkJobsetWrapped { , nixexprpath => $jobset->nixexprpath }); - # Schedule each successfully evaluated job. - foreach my $job (permute(values %{$jobs})) { - next if defined $job->{error}; - #print STDERR "considering job " . $project->name, ":", $jobset->name, ":", $job->{jobName} . "\n"; - checkBuild($db, $jobset, $ev, $inputInfo, $job, \%buildMap, $prevEval, $jobOutPathMap, $plugins); + my @jobsWithConstituents; + + while (defined(my $job = $jobsIter->())) { + if ($jobsetsJobset) { + die "The .jobsets jobset must only have a single job named 'jobsets'" + unless $job->{attr} eq "jobsets"; + } + + $evaluationErrorMsg .= + ($job->{attr} ne "" ? "in job ‘$job->{attr}’" : "at top-level") . + ":\n" . $job->{error} . "\n\n" if defined $job->{error}; + + checkBuild($db, $jobset, $ev, $inputInfo, $job, \%buildMap, $prevEval, $jobOutPathMap, $plugins) + unless defined $job->{error}; + + if (defined $job->{constituents}) { + push @jobsWithConstituents, $job; + } } # Have any builds been added or removed since last time? @@ -801,21 +842,20 @@ sub checkJobsetWrapped { $drvPathToId{$x->{drvPath}} = $x; } - foreach my $job (values %{$jobs}) { - next unless $job->{constituents}; - + foreach my $job (values @jobsWithConstituents) { + next unless defined $job->{constituents}; if (defined $job->{error}) { - die "aggregate job ‘$job->{jobName}’ failed with the error: $job->{error}\n"; + die "aggregate job ‘$job->{attr}’ failed with the error: $job->{error}\n"; } my $x = $drvPathToId{$job->{drvPath}} or - die "aggregate job ‘$job->{jobName}’ has no corresponding build record.\n"; + die "aggregate job ‘$job->{attr}’ has no corresponding build record.\n"; foreach my $drvPath (@{$job->{constituents}}) { my $constituent = $drvPathToId{$drvPath}; if (defined $constituent) { $db->resultset('AggregateConstituents')->update_or_create({aggregate => $x->{id}, constituent => $constituent->{id}}); } else { - warn "aggregate job ‘$job->{jobName}’ has a constituent ‘$drvPath’ that doesn't correspond to a Hydra build\n"; + warn "aggregate job ‘$job->{attr}’ has a constituent ‘$drvPath’ that doesn't correspond to a Hydra build\n"; } } } @@ -857,11 +897,15 @@ sub checkJobsetWrapped { $jobset->update({ enabled => 0 }) if $jobset->enabled == 2; $jobset->update({ lastcheckedtime => time, forceeval => undef }); + + $evaluationErrorRecord->update({ errormsg => $evaluationErrorMsg }); + setJobsetError($jobset, $evaluationErrorMsg, $evaluationErrorTime); + + $evalStop = clock_gettime(CLOCK_MONOTONIC); + $ev->update({ evaltime => abs(int($evalStop - $evalStart)) }); }); - my $dbStop = clock_gettime(CLOCK_MONOTONIC); - - Net::Statsd::timing("hydra.evaluator.db_time", int(($dbStop - $dbStart) * 1000)); + Net::Statsd::timing("hydra.evaluator.eval_time", int(($evalStop - $evalStart) * 1000)); Net::Statsd::increment("hydra.evaluator.evals"); Net::Statsd::increment("hydra.evaluator.cached_evals") unless $jobsetChanged; } diff --git a/t/evaluator/evaluate-constituents-broken.t b/t/evaluator/evaluate-constituents-broken.t index ed25d192..0e5960bf 100644 --- a/t/evaluator/evaluate-constituents-broken.t +++ b/t/evaluator/evaluate-constituents-broken.t @@ -18,14 +18,14 @@ isnt($res, 0, "hydra-eval-jobset exits non-zero"); ok(utf8::decode($stderr), "Stderr output is UTF8-clean"); like( $stderr, - qr/aggregate job ‘mixed_aggregate’ failed with the error: constituentA: does not exist/, + qr/aggregate job ‘mixed_aggregate’ failed with the error: "constituentA": does not exist/, "The stderr record includes a relevant error message" ); -$jobset->discard_changes; # refresh from DB +$jobset->discard_changes({ '+columns' => {'errormsg' => 'errormsg'} }); # refresh from DB like( $jobset->errormsg, - qr/aggregate job ‘mixed_aggregate’ failed with the error: constituentA: does not exist/, + qr/aggregate job ‘mixed_aggregate’ failed with the error: "constituentA": does not exist/, "The jobset records a relevant error message" ); diff --git a/t/evaluator/evaluate-meta.t b/t/evaluator/evaluate-meta.t new file mode 100644 index 00000000..9f546a7f --- /dev/null +++ b/t/evaluator/evaluate-meta.t @@ -0,0 +1,22 @@ +use feature 'unicode_strings'; +use strict; +use warnings; +use Setup; +use Test2::V0; + +my $ctx = test_context(); + +my $builds = $ctx->makeAndEvaluateJobset( + expression => "meta.nix", + build => 1 +); + +my $build = $builds->{"full-of-meta"}; + +is($build->finished, 1, "Build should be finished."); +is($build->description, "This is the description of the job.", "Wrong description extracted from the build."); +is($build->license, "MIT, BSD", "Wrong licenses extracted from the build."); +is($build->homepage, "https://example.com/", "Wrong homepage extracted from the build."); +is($build->maintainers, 'alice@example.com, bob@not.found', "Wrong maintainers extracted from the build."); + +done_testing; diff --git a/t/jobs/meta.nix b/t/jobs/meta.nix new file mode 100644 index 00000000..9204e384 --- /dev/null +++ b/t/jobs/meta.nix @@ -0,0 +1,17 @@ +with import ./config.nix; +{ + full-of-meta = + mkDerivation { + name = "full-of-meta"; + builder = ./empty-dir-builder.sh; + + meta = { + description = "This is the description of the job."; + license = [ { shortName = "MIT"; } "BSD" ]; + homepage = "https://example.com/"; + maintainers = [ "alice@example.com" { email = "bob@not.found"; } ]; + + outPath = "${placeholder "out"}"; + }; + }; +} diff --git a/t/queue-runner/constituents.t b/t/queue-runner/constituents.t index c6333642..e1b8d733 100644 --- a/t/queue-runner/constituents.t +++ b/t/queue-runner/constituents.t @@ -22,11 +22,11 @@ is(nrQueuedBuildsForJobset($jobset), 0, "Evaluating jobs/broken-constituent.nix like( $jobset->errormsg, - qr/^does-not-exist: does not exist$/m, + qr/^"does-not-exist": does not exist$/m, "Evaluating jobs/broken-constituent.nix should log an error for does-not-exist"); like( $jobset->errormsg, - qr/^does-not-evaluate: error: assertion 'false' failed$/m, + qr/^"does-not-evaluate": "error: assertion 'false' failed/m, "Evaluating jobs/broken-constituent.nix should log an error for does-not-evaluate"); done_testing; From 2f92846e5a37b284ffe3df0f6082911dbfc6f0e2 Mon Sep 17 00:00:00 2001 From: Pierre Bourdon Date: Tue, 16 Jul 2024 04:04:08 +0200 Subject: [PATCH 44/46] hydra-eval-jobs: remove, replaced by nix-eval-jobs (cherry picked from commit ed7c58708cd3affd62a598a22a500ed2adf318bf) --- package.nix | 2 +- src/hydra-eval-jobs/hydra-eval-jobs.cc | 587 ------------------------- src/hydra-eval-jobs/meson.build | 8 - src/meson.build | 1 - t/meson.build | 1 - 5 files changed, 1 insertion(+), 598 deletions(-) delete mode 100644 src/hydra-eval-jobs/hydra-eval-jobs.cc delete mode 100644 src/hydra-eval-jobs/meson.build diff --git a/package.nix b/package.nix index ceb2c706..a9ec12c8 100644 --- a/package.nix +++ b/package.nix @@ -235,7 +235,7 @@ stdenv.mkDerivation (finalAttrs: { shellHook = '' pushd $(git rev-parse --show-toplevel) >/dev/null - PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH + PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-queue-runner:$PATH PERL5LIB=$(pwd)/src/lib:$PERL5LIB export HYDRA_HOME="$(pwd)/src/" mkdir -p .hydra-data diff --git a/src/hydra-eval-jobs/hydra-eval-jobs.cc b/src/hydra-eval-jobs/hydra-eval-jobs.cc deleted file mode 100644 index b83cae91..00000000 --- a/src/hydra-eval-jobs/hydra-eval-jobs.cc +++ /dev/null @@ -1,587 +0,0 @@ -#include -#include -#include -#include - -#include "shared.hh" -#include "store-api.hh" -#include "eval.hh" -#include "eval-gc.hh" -#include "eval-inline.hh" -#include "eval-settings.hh" -#include "signals.hh" -#include "terminal.hh" -#include "util.hh" -#include "get-drvs.hh" -#include "globals.hh" -#include "common-eval-args.hh" -#include "flake/flakeref.hh" -#include "flake/flake.hh" -#include "attr-path.hh" -#include "derivations.hh" -#include "local-fs-store.hh" - -#include "hydra-config.hh" - -#include -#include -#include - -#include - -void check_pid_status_nonblocking(pid_t check_pid) -{ - // Only check 'initialized' and known PID's - if (check_pid <= 0) { return; } - - int wstatus = 0; - pid_t pid = waitpid(check_pid, &wstatus, WNOHANG); - // -1 = failure, WNOHANG: 0 = no change - if (pid <= 0) { return; } - - std::cerr << "child process (" << pid << ") "; - - if (WIFEXITED(wstatus)) { - std::cerr << "exited with status=" << WEXITSTATUS(wstatus) << std::endl; - } else if (WIFSIGNALED(wstatus)) { - std::cerr << "killed by signal=" << WTERMSIG(wstatus) << std::endl; - } else if (WIFSTOPPED(wstatus)) { - std::cerr << "stopped by signal=" << WSTOPSIG(wstatus) << std::endl; - } else if (WIFCONTINUED(wstatus)) { - std::cerr << "continued" << std::endl; - } -} - -using namespace nix; - -static Path gcRootsDir; -static size_t maxMemorySize; - -struct MyArgs : MixEvalArgs, MixCommonArgs, RootArgs -{ - Path releaseExpr; - bool flake = false; - bool dryRun = false; - - MyArgs() : MixCommonArgs("hydra-eval-jobs") - { - addFlag({ - .longName = "gc-roots-dir", - .description = "garbage collector roots directory", - .labels = {"path"}, - .handler = {&gcRootsDir} - }); - - addFlag({ - .longName = "dry-run", - .description = "don't create store derivations", - .handler = {&dryRun, true} - }); - - addFlag({ - .longName = "flake", - .description = "build a flake", - .handler = {&flake, true} - }); - - expectArg("expr", &releaseExpr); - } -}; - -static MyArgs myArgs; - -static std::string queryMetaStrings(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & subAttribute) -{ - Strings res; - std::function rec; - - rec = [&](Value & v) { - state.forceValue(v, noPos); - if (v.type() == nString) - res.emplace_back(v.string_view()); - else if (v.isList()) - for (unsigned int n = 0; n < v.listSize(); ++n) - rec(*v.listElems()[n]); - else if (v.type() == nAttrs) { - auto a = v.attrs()->find(state.symbols.create(subAttribute)); - if (a != v.attrs()->end()) - res.push_back(std::string(state.forceString(*a->value, a->pos, "while evaluating meta attributes"))); - } - }; - - Value * v = drv.queryMeta(name); - if (v) rec(*v); - - return concatStringsSep(", ", res); -} - -static void worker( - EvalState & state, - Bindings & autoArgs, - AutoCloseFD & to, - AutoCloseFD & from) -{ - Value vTop; - - if (myArgs.flake) { - using namespace flake; - - auto [flakeRef, fragment, outputSpec] = parseFlakeRefWithFragmentAndExtendedOutputsSpec(fetchSettings, myArgs.releaseExpr, absPath(".")); - - auto vFlake = state.allocValue(); - - auto lockedFlake = lockFlake( - flakeSettings, - state, - flakeRef, - LockFlags { - .updateLockFile = false, - .useRegistries = false, - .allowUnlocked = false, - }); - - callFlake(state, lockedFlake, *vFlake); - - auto vOutputs = vFlake->attrs()->get(state.symbols.create("outputs"))->value; - state.forceValue(*vOutputs, noPos); - - auto aHydraJobs = vOutputs->attrs()->get(state.symbols.create("hydraJobs")); - if (!aHydraJobs) - aHydraJobs = vOutputs->attrs()->get(state.symbols.create("checks")); - if (!aHydraJobs) - throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef); - - vTop = *aHydraJobs->value; - - } else { - state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop); - } - - auto vRoot = state.allocValue(); - state.autoCallFunction(autoArgs, vTop, *vRoot); - - while (true) { - /* Wait for the master to send us a job name. */ - writeLine(to.get(), "next"); - - auto s = readLine(from.get()); - if (s == "exit") break; - if (!hasPrefix(s, "do ")) abort(); - std::string attrPath(s, 3); - - debug("worker process %d at '%s'", getpid(), attrPath); - - /* Evaluate it and send info back to the master. */ - nlohmann::json reply; - - try { - auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot).first; - - auto v = state.allocValue(); - state.autoCallFunction(autoArgs, *vTmp, *v); - - if (auto drv = getDerivation(state, *v, false)) { - - // CA derivations do not have static output paths, so we - // have to defensively not query output paths in case we - // encounter one. - PackageInfo::Outputs outputs = drv->queryOutputs( - !experimentalFeatureSettings.isEnabled(Xp::CaDerivations)); - - if (drv->querySystem() == "unknown") - state.error("derivation must have a 'system' attribute").debugThrow(); - - auto drvPath = state.store->printStorePath(drv->requireDrvPath()); - - nlohmann::json job; - - job["nixName"] = drv->queryName(); - job["system"] =drv->querySystem(); - job["drvPath"] = drvPath; - job["description"] = drv->queryMetaString("description"); - job["license"] = queryMetaStrings(state, *drv, "license", "shortName"); - job["homepage"] = drv->queryMetaString("homepage"); - job["maintainers"] = queryMetaStrings(state, *drv, "maintainers", "email"); - job["schedulingPriority"] = drv->queryMetaInt("schedulingPriority", 100); - job["timeout"] = drv->queryMetaInt("timeout", 36000); - job["maxSilent"] = drv->queryMetaInt("maxSilent", 7200); - job["isChannel"] = drv->queryMetaBool("isHydraChannel", false); - - /* If this is an aggregate, then get its constituents. */ - auto a = v->attrs()->get(state.symbols.create("_hydraAggregate")); - if (a && state.forceBool(*a->value, a->pos, "while evaluating the `_hydraAggregate` attribute")) { - auto a = v->attrs()->get(state.symbols.create("constituents")); - if (!a) - state.error("derivation must have a ‘constituents’ attribute").debugThrow(); - - NixStringContext context; - state.coerceToString(a->pos, *a->value, context, "while evaluating the `constituents` attribute", true, false); - for (auto & c : context) - std::visit(overloaded { - [&](const NixStringContextElem::Built & b) { - job["constituents"].push_back(b.drvPath->to_string(*state.store)); - }, - [&](const NixStringContextElem::Opaque & o) { - }, - [&](const NixStringContextElem::DrvDeep & d) { - }, - }, c.raw); - - state.forceList(*a->value, a->pos, "while evaluating the `constituents` attribute"); - for (unsigned int n = 0; n < a->value->listSize(); ++n) { - auto v = a->value->listElems()[n]; - state.forceValue(*v, noPos); - if (v->type() == nString) - job["namedConstituents"].push_back(v->string_view()); - } - } - - /* Register the derivation as a GC root. !!! This - registers roots for jobs that we may have already - done. */ - auto localStore = state.store.dynamic_pointer_cast(); - if (gcRootsDir != "" && localStore) { - Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath)); - if (!pathExists(root)) - localStore->addPermRoot(localStore->parseStorePath(drvPath), root); - } - - nlohmann::json out; - for (auto & [outputName, optOutputPath] : outputs) { - if (optOutputPath) { - out[outputName] = state.store->printStorePath(*optOutputPath); - } else { - // See the `queryOutputs` call above; we should - // not encounter missing output paths otherwise. - assert(experimentalFeatureSettings.isEnabled(Xp::CaDerivations)); - out[outputName] = nullptr; - } - } - job["outputs"] = std::move(out); - reply["job"] = std::move(job); - } - - else if (v->type() == nAttrs) { - auto attrs = nlohmann::json::array(); - StringSet ss; - for (auto & i : v->attrs()->lexicographicOrder(state.symbols)) { - std::string name(state.symbols[i->name]); - if (name.find(' ') != std::string::npos) { - printError("skipping job with illegal name '%s'", name); - continue; - } - attrs.push_back(name); - } - reply["attrs"] = std::move(attrs); - } - - else if (v->type() == nNull) - ; - - else state.error("attribute '%s' is %s, which is not supported", attrPath, showType(*v)).debugThrow(); - - } catch (EvalError & e) { - auto msg = e.msg(); - // Transmits the error we got from the previous evaluation - // in the JSON output. - reply["error"] = filterANSIEscapes(msg, true); - // Don't forget to print it into the STDERR log, this is - // what's shown in the Hydra UI. - printError(msg); - } - - writeLine(to.get(), reply.dump()); - - /* If our RSS exceeds the maximum, exit. The master will - start a new process. */ - struct rusage r; - getrusage(RUSAGE_SELF, &r); - if ((size_t) r.ru_maxrss > maxMemorySize * 1024) break; - } - - writeLine(to.get(), "restart"); -} - -int main(int argc, char * * argv) -{ - /* Prevent undeclared dependencies in the evaluation via - $NIX_PATH. */ - unsetenv("NIX_PATH"); - - return handleExceptions(argv[0], [&]() { - - auto config = std::make_unique(); - - auto nrWorkers = config->getIntOption("evaluator_workers", 1); - maxMemorySize = config->getIntOption("evaluator_max_memory_size", 4096); - - initNix(); - initGC(); - - myArgs.parseCmdline(argvToStrings(argc, argv)); - - auto pureEval = config->getBoolOption("evaluator_pure_eval", myArgs.flake); - - /* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */ - settings.builders = ""; - - /* Prevent access to paths outside of the Nix search path and - to the environment. */ - evalSettings.restrictEval = true; - - /* When building a flake, use pure evaluation (no access to - 'getEnv', 'currentSystem' etc. */ - evalSettings.pureEval = pureEval; - - if (myArgs.dryRun) settings.readOnlyMode = true; - - if (myArgs.releaseExpr == "") throw UsageError("no expression specified"); - - if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified"); - - struct State - { - std::set todo{""}; - std::set active; - nlohmann::json jobs; - std::exception_ptr exc; - }; - - std::condition_variable wakeup; - - Sync state_; - - /* Start a handler thread per worker process. */ - auto handler = [&]() - { - pid_t pid = -1; - try { - AutoCloseFD from, to; - - while (true) { - - /* Start a new worker process if necessary. */ - if (pid == -1) { - Pipe toPipe, fromPipe; - toPipe.create(); - fromPipe.create(); - pid = startProcess( - [&, - to{std::make_shared(std::move(fromPipe.writeSide))}, - from{std::make_shared(std::move(toPipe.readSide))} - ]() - { - try { - auto evalStore = myArgs.evalStoreUrl - ? openStore(*myArgs.evalStoreUrl) - : openStore(); - EvalState state(myArgs.lookupPath, - evalStore, fetchSettings, evalSettings); - Bindings & autoArgs = *myArgs.getAutoArgs(state); - worker(state, autoArgs, *to, *from); - } catch (Error & e) { - nlohmann::json err; - auto msg = e.msg(); - err["error"] = filterANSIEscapes(msg, true); - printError(msg); - writeLine(to->get(), err.dump()); - // Don't forget to print it into the STDERR log, this is - // what's shown in the Hydra UI. - writeLine(to->get(), "restart"); - } - }, - ProcessOptions { .allowVfork = false }); - from = std::move(fromPipe.readSide); - to = std::move(toPipe.writeSide); - debug("created worker process %d", pid); - } - - /* Check whether the existing worker process is still there. */ - auto s = readLine(from.get()); - if (s == "restart") { - pid = -1; - continue; - } else if (s != "next") { - auto json = nlohmann::json::parse(s); - throw Error("worker error: %s", (std::string) json["error"]); - } - - /* Wait for a job name to become available. */ - std::string attrPath; - - while (true) { - checkInterrupt(); - auto state(state_.lock()); - if ((state->todo.empty() && state->active.empty()) || state->exc) { - writeLine(to.get(), "exit"); - return; - } - if (!state->todo.empty()) { - attrPath = *state->todo.begin(); - state->todo.erase(state->todo.begin()); - state->active.insert(attrPath); - break; - } else - state.wait(wakeup); - } - - /* Tell the worker to evaluate it. */ - writeLine(to.get(), "do " + attrPath); - - /* Wait for the response. */ - auto response = nlohmann::json::parse(readLine(from.get())); - - /* Handle the response. */ - StringSet newAttrs; - - if (response.find("job") != response.end()) { - auto state(state_.lock()); - state->jobs[attrPath] = response["job"]; - } - - if (response.find("attrs") != response.end()) { - for (auto & i : response["attrs"]) { - std::string path = i; - if (path.find(".") != std::string::npos){ - path = "\"" + path + "\""; - } - auto s = (attrPath.empty() ? "" : attrPath + ".") + (std::string) path; - newAttrs.insert(s); - } - } - - if (response.find("error") != response.end()) { - auto state(state_.lock()); - state->jobs[attrPath]["error"] = response["error"]; - } - - /* Add newly discovered job names to the queue. */ - { - auto state(state_.lock()); - state->active.erase(attrPath); - for (auto & s : newAttrs) - state->todo.insert(s); - wakeup.notify_all(); - } - } - } catch (...) { - check_pid_status_nonblocking(pid); - auto state(state_.lock()); - state->exc = std::current_exception(); - wakeup.notify_all(); - } - }; - - std::vector threads; - for (size_t i = 0; i < nrWorkers; i++) - threads.emplace_back(std::thread(handler)); - - for (auto & thread : threads) - thread.join(); - - auto state(state_.lock()); - - if (state->exc) - std::rethrow_exception(state->exc); - - /* For aggregate jobs that have named consistuents - (i.e. constituents that are a job name rather than a - derivation), look up the referenced job and add it to the - dependencies of the aggregate derivation. */ - auto store = openStore(); - - for (auto i = state->jobs.begin(); i != state->jobs.end(); ++i) { - auto jobName = i.key(); - auto & job = i.value(); - - auto named = job.find("namedConstituents"); - if (named == job.end()) continue; - - std::unordered_map brokenJobs; - auto getNonBrokenJobOrRecordError = [&brokenJobs, &jobName, &state]( - const std::string & childJobName) -> std::optional { - auto childJob = state->jobs.find(childJobName); - if (childJob == state->jobs.end()) { - printError("aggregate job '%s' references non-existent job '%s'", jobName, childJobName); - brokenJobs[childJobName] = "does not exist"; - return std::nullopt; - } - if (childJob->find("error") != childJob->end()) { - std::string error = (*childJob)["error"]; - printError("aggregate job '%s' references broken job '%s': %s", jobName, childJobName, error); - brokenJobs[childJobName] = error; - return std::nullopt; - } - return *childJob; - }; - - if (myArgs.dryRun) { - for (std::string jobName2 : *named) { - auto job2 = getNonBrokenJobOrRecordError(jobName2); - if (!job2) { - continue; - } - std::string drvPath2 = (*job2)["drvPath"]; - job["constituents"].push_back(drvPath2); - } - } else { - auto drvPath = store->parseStorePath((std::string) job["drvPath"]); - auto drv = store->readDerivation(drvPath); - - for (std::string jobName2 : *named) { - auto job2 = getNonBrokenJobOrRecordError(jobName2); - if (!job2) { - continue; - } - auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]); - auto drv2 = store->readDerivation(drvPath2); - job["constituents"].push_back(store->printStorePath(drvPath2)); - drv.inputDrvs.map[drvPath2].value = {drv2.outputs.begin()->first}; - } - - if (brokenJobs.empty()) { - std::string drvName(drvPath.name()); - assert(hasSuffix(drvName, drvExtension)); - drvName.resize(drvName.size() - drvExtension.size()); - - auto hashModulo = hashDerivationModulo(*store, drv, true); - if (hashModulo.kind != DrvHash::Kind::Regular) continue; - auto h = hashModulo.hashes.find("out"); - if (h == hashModulo.hashes.end()) continue; - auto outPath = store->makeOutputPath("out", h->second, drvName); - drv.env["out"] = store->printStorePath(outPath); - drv.outputs.insert_or_assign("out", DerivationOutput::InputAddressed { .path = outPath }); - auto newDrvPath = store->printStorePath(writeDerivation(*store, drv)); - - debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath); - - job["drvPath"] = newDrvPath; - job["outputs"]["out"] = store->printStorePath(outPath); - } - } - - job.erase("namedConstituents"); - - /* Register the derivation as a GC root. !!! This - registers roots for jobs that we may have already - done. */ - auto localStore = store.dynamic_pointer_cast(); - if (gcRootsDir != "" && localStore) { - auto drvPath = job["drvPath"].get(); - Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath)); - if (!pathExists(root)) - localStore->addPermRoot(localStore->parseStorePath(drvPath), root); - } - - if (!brokenJobs.empty()) { - std::stringstream ss; - for (const auto& [jobName, error] : brokenJobs) { - ss << jobName << ": " << error << "\n"; - } - job["error"] = ss.str(); - } - } - - std::cout << state->jobs.dump(2) << "\n"; - }); -} diff --git a/src/hydra-eval-jobs/meson.build b/src/hydra-eval-jobs/meson.build deleted file mode 100644 index 916212e1..00000000 --- a/src/hydra-eval-jobs/meson.build +++ /dev/null @@ -1,8 +0,0 @@ -hydra_eval_jobs = executable('hydra-eval-jobs', - 'hydra-eval-jobs.cc', - dependencies: [ - libhydra_dep, - nix_dep, - ], - install: true, -) diff --git a/src/meson.build b/src/meson.build index 8c7562ed..52b821bc 100644 --- a/src/meson.build +++ b/src/meson.build @@ -1,6 +1,5 @@ # Native code subdir('libhydra') -subdir('hydra-eval-jobs') subdir('hydra-evaluator') subdir('hydra-queue-runner') diff --git a/t/meson.build b/t/meson.build index 11044a03..c3c58458 100644 --- a/t/meson.build +++ b/t/meson.build @@ -27,7 +27,6 @@ testenv.prepend('PERL5LIB', separator: ':' ) testenv.prepend('PATH', - fs.parent(hydra_eval_jobs.full_path()), fs.parent(hydra_evaluator.full_path()), fs.parent(hydra_queue_runner.full_path()), meson.project_source_root() / 'src/script', From 85383b952222e62baeda050b282682c68a8d2581 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 10 Dec 2024 11:31:40 -0500 Subject: [PATCH 45/46] Render the `nix-eval-jobs` version too --- package.nix | 3 ++- src/lib/Hydra/Controller/Root.pm | 1 + src/root/layout.tt | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/package.nix b/package.nix index a9ec12c8..e0046024 100644 --- a/package.nix +++ b/package.nix @@ -266,7 +266,8 @@ stdenv.mkDerivation (finalAttrs: { --prefix PATH ':' $out/bin:$hydraPath \ --set HYDRA_RELEASE ${version} \ --set HYDRA_HOME $out/libexec/hydra \ - --set NIX_RELEASE ${nix.name or "unknown"} + --set NIX_RELEASE ${nix.name or "unknown"} \ + --set NIX_EVAL_JOBS_RELEASE ${nix-eval-jobs.name or "unknown"} done ''; diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index aa1ad5ab..a231d7c0 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -51,6 +51,7 @@ sub begin :Private { $c->stash->{curUri} = $c->request->uri; $c->stash->{version} = $ENV{"HYDRA_RELEASE"} || ""; $c->stash->{nixVersion} = $ENV{"NIX_RELEASE"} || ""; + $c->stash->{nixEvalJobsVersion} = $ENV{"NIX_EVAL_JOBS_RELEASE"} || ""; $c->stash->{curTime} = time; $c->stash->{logo} = defined $c->config->{hydra_logo} ? "/logo" : ""; $c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : ""; diff --git a/src/root/layout.tt b/src/root/layout.tt index d67ff1b8..399962b4 100644 --- a/src/root/layout.tt +++ b/src/root/layout.tt @@ -93,7 +93,7 @@