10 Commits

Author SHA1 Message Date
3b4c4972c2 Merge pull request #1449 from knedlsepp/fix-metrics-rendering-with-special-characters
Fix rendering of metrics with special characters
2025-03-29 08:52:07 +01:00
b2fe3f5218 Merge pull request #1455 from qowoz/226-constituent
nix-eval-jobs + constituent globs
2025-03-27 23:18:39 -04:00
9911f0107f Reimplement (named) constituent jobs (+globbing) based on nix-eval-jobs
Depends on https://github.com/nix-community/nix-eval-jobs/pull/349 & #1421.

Almost equivalent to #1425, but with a small change: when having e.g. an
aggregate job with a glob that matches nothing, the jobset evaluation is
failed now. This was the intended behavior before (hydra-eval-jobset
fails hard if an aggregate is broken), the code-path was never reached
however since the aggregate was never marked as broken in this case
before.
2025-03-28 11:12:54 +10:00
feebb61897 flake.lock: Update
Flake lock file updates:

• Updated input 'nix-eval-jobs':
    'github:nix-community/nix-eval-jobs/4b392b284877d203ae262e16af269f702df036bc?narHash=sha256-3wIReAqdTALv39gkWXLMZQvHyBOc3yPkWT2ZsItxedY%3D' (2025-02-14)
  → 'github:nix-community/nix-eval-jobs/f7418fc1fa45b96d37baa95ff3c016dd5be3876b?narHash=sha256-Lo4KFBNcY8tmBuCmEr2XV0IUZtxXHmbXPNLkov/QSU0%3D' (2025-03-26)
2025-03-28 11:12:54 +10:00
4bcbed2f1b hydraTest: remove outdated postgresql version
error: postgresql_12 has been removed since it reached its EOL upstream
2025-03-28 11:12:48 +10:00
d2db3c7446 Merge pull request #1450 from NixOS/hydra-compress-race
Fix race condition in hydra-compress-logs
2025-03-16 14:37:39 -04:00
97dcdae068 Merge pull request #1451 from NixOS/revert-to-fix-hangs
Revert "Use `LegacySSHStore`"
2025-03-03 10:18:28 -05:00
9a5bd39d4c Revert "Use LegacySSHStore"
There were some hangs caused by this. Need to fix them, ideally
reproducing the issue in a test, before trying this again.

This reverts commit 4a4a0f901c.
2025-03-03 10:12:38 -05:00
f1deb22c02 Fix race condition in hydra-compress-logs 2025-03-02 03:08:26 +01:00
d22d030503 Fix rendering of metrics with special characters
My main motivation here is to get metrics with brackets to work in order
to support "pytest" test names:

- test_foo.py::test_bar[1]
- test_foo.py::test_bar[2]

I couldn't find an "HTML escape"-style function that would generate
valid html `id` attribute names from random strings, so I went with a
hash digest instead.
2025-02-27 09:25:42 +01:00
17 changed files with 482 additions and 95 deletions

6
flake.lock generated
View File

@ -29,11 +29,11 @@
"nix-eval-jobs": {
"flake": false,
"locked": {
"lastModified": 1739500569,
"narHash": "sha256-3wIReAqdTALv39gkWXLMZQvHyBOc3yPkWT2ZsItxedY=",
"lastModified": 1743008255,
"narHash": "sha256-Lo4KFBNcY8tmBuCmEr2XV0IUZtxXHmbXPNLkov/QSU0=",
"owner": "nix-community",
"repo": "nix-eval-jobs",
"rev": "4b392b284877d203ae262e16af269f702df036bc",
"rev": "f7418fc1fa45b96d37baa95ff3c016dd5be3876b",
"type": "github"
},
"original": {

View File

@ -15,7 +15,6 @@
systemd.services.hydra-send-stats.enable = false;
services.postgresql.enable = true;
services.postgresql.package = pkgs.postgresql_12;
# The following is to work around the following error from hydra-server:
# [error] Caught exception in engine "Cannot determine local time zone"

View File

@ -468,7 +468,7 @@ in
elif [[ $compression == zstd ]]; then
compression="zstd --rm"
fi
find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet
find ${baseDir}/build-logs -ignore_readdir_race -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet
'';
startAt = "Sun 01:45";
};

View File

@ -9,10 +9,13 @@
#include "path.hh"
#include "legacy-ssh-store.hh"
#include "serve-protocol.hh"
#include "serve-protocol-impl.hh"
#include "state.hh"
#include "current-process.hh"
#include "processes.hh"
#include "util.hh"
#include "serve-protocol.hh"
#include "serve-protocol-impl.hh"
#include "ssh.hh"
#include "finally.hh"
#include "url.hh"
@ -36,6 +39,38 @@ bool ::Machine::isLocalhost() const
namespace nix::build_remote {
static std::unique_ptr<SSHMaster::Connection> openConnection(
::Machine::ptr machine, SSHMaster & master)
{
Strings command = {"nix-store", "--serve", "--write"};
if (machine->isLocalhost()) {
command.push_back("--builders");
command.push_back("");
} else {
auto remoteStore = machine->storeUri.params.find("remote-store");
if (remoteStore != machine->storeUri.params.end()) {
command.push_back("--store");
command.push_back(shellEscape(remoteStore->second));
}
}
auto ret = master.startCommand(std::move(command), {
"-a", "-oBatchMode=yes", "-oConnectTimeout=60", "-oTCPKeepAlive=yes"
});
// XXX: determine the actual max value we can use from /proc.
// FIXME: Should this be upstreamed into `startCommand` in Nix?
int pipesize = 1024 * 1024;
fcntl(ret->in.get(), F_SETPIPE_SZ, &pipesize);
fcntl(ret->out.get(), F_SETPIPE_SZ, &pipesize);
return ret;
}
static void copyClosureTo(
::Machine::Connection & conn,
Store & destStore,
@ -52,8 +87,8 @@ static void copyClosureTo(
// FIXME: substitute output pollutes our build log
/* Get back the set of paths that are already valid on the remote
host. */
auto present = conn.store->queryValidPaths(
closure, true, useSubstitutes);
auto present = conn.queryValidPaths(
destStore, true, closure, useSubstitutes);
if (present.size() == closure.size()) return;
@ -68,7 +103,12 @@ static void copyClosureTo(
std::unique_lock<std::timed_mutex> sendLock(conn.machine->state->sendLock,
std::chrono::seconds(600));
conn.store->addMultipleToStoreLegacy(destStore, missing);
conn.to << ServeProto::Command::ImportPaths;
destStore.exportPaths(missing, conn.to);
conn.to.flush();
if (readInt(conn.from) != 1)
throw Error("remote machine failed to import closure");
}
@ -188,7 +228,7 @@ static BuildResult performBuild(
counter & nrStepsBuilding
)
{
auto kont = conn.store->buildDerivationAsync(drvPath, drv, options);
conn.putBuildDerivationRequest(localStore, drvPath, drv, options);
BuildResult result;
@ -197,10 +237,7 @@ static BuildResult performBuild(
startTime = time(0);
{
MaintainCount<counter> mc(nrStepsBuilding);
result = kont();
// Without proper call-once functions, we need to manually
// delete after calling.
kont = {};
result = ServeProto::Serialise<BuildResult>::read(localStore, conn);
}
stopTime = time(0);
@ -216,7 +253,7 @@ static BuildResult performBuild(
// If the protocol was too old to give us `builtOutputs`, initialize
// it manually by introspecting the derivation.
if (GET_PROTOCOL_MINOR(conn.store->getProtocol()) < 6)
if (GET_PROTOCOL_MINOR(conn.remoteVersion) < 6)
{
// If the remote is too old to handle CA derivations, we cant get this
// far anyways
@ -249,25 +286,26 @@ static void copyPathFromRemote(
const ValidPathInfo & info
)
{
/* Receive the NAR from the remote and add it to the
destination store. Meanwhile, extract all the info from the
NAR that getBuildOutput() needs. */
auto source2 = sinkToSource([&](Sink & sink)
{
/* Note: we should only send the command to dump the store
path to the remote if the NAR is actually going to get read
by the destination store, which won't happen if this path
is already valid on the destination store. Since this
lambda function only gets executed if someone tries to read
from source2, we will send the command from here rather
than outside the lambda. */
conn.store->narFromPath(info.path, [&](Source & source) {
TeeSource tee{source, sink};
extractNarData(tee, conn.store->printStorePath(info.path), narMembers);
});
});
/* Receive the NAR from the remote and add it to the
destination store. Meanwhile, extract all the info from the
NAR that getBuildOutput() needs. */
auto source2 = sinkToSource([&](Sink & sink)
{
/* Note: we should only send the command to dump the store
path to the remote if the NAR is actually going to get read
by the destination store, which won't happen if this path
is already valid on the destination store. Since this
lambda function only gets executed if someone tries to read
from source2, we will send the command from here rather
than outside the lambda. */
conn.to << ServeProto::Command::DumpStorePath << localStore.printStorePath(info.path);
conn.to.flush();
destStore.addToStore(info, *source2, NoRepair, NoCheckSigs);
TeeSource tee(conn.from, sink);
extractNarData(tee, localStore.printStorePath(info.path), narMembers);
});
destStore.addToStore(info, *source2, NoRepair, NoCheckSigs);
}
static void copyPathsFromRemote(
@ -366,39 +404,30 @@ void State::buildRemote(ref<Store> destStore,
updateStep(ssConnecting);
// FIXME: rewrite to use Store.
::Machine::Connection conn {
.machine = machine,
.store = [&]{
auto * pSpecified = std::get_if<StoreReference::Specified>(&machine->storeUri.variant);
if (!pSpecified || pSpecified->scheme != "ssh") {
throw Error("Currently, only (legacy-)ssh stores are supported!");
}
auto storeRef = machine->completeStoreReference();
auto remoteStore = machine->openStore().dynamic_pointer_cast<LegacySSHStore>();
assert(remoteStore);
auto * pSpecified = std::get_if<StoreReference::Specified>(&storeRef.variant);
if (!pSpecified || pSpecified->scheme != "ssh") {
throw Error("Currently, only (legacy-)ssh stores are supported!");
}
remoteStore->connPipeSize = 1024 * 1024;
if (machine->isLocalhost()) {
auto rp_new = remoteStore->remoteProgram.get();
rp_new.push_back("--builders");
rp_new.push_back("");
const_cast<nix::Setting<Strings> &>(remoteStore->remoteProgram).assign(rp_new);
}
remoteStore->extraSshArgs = {
"-a", "-oBatchMode=yes", "-oConnectTimeout=60", "-oTCPKeepAlive=yes"
};
const_cast<nix::Setting<int> &>(remoteStore->logFD).assign(logFD.get());
return nix::ref{remoteStore};
}(),
LegacySSHStoreConfig storeConfig {
pSpecified->scheme,
pSpecified->authority,
storeRef.params
};
auto master = storeConfig.createSSHMaster(
false, // no SSH master yet
logFD.get());
// FIXME: rewrite to use Store.
auto child = build_remote::openConnection(machine, master);
{
auto activeStepState(activeStep->state_.lock());
if (activeStepState->cancelled) throw Error("step cancelled");
activeStepState->pid = conn.store->getConnectionPid();
activeStepState->pid = child->sshPid;
}
Finally clearPid([&]() {
@ -413,12 +442,35 @@ void State::buildRemote(ref<Store> destStore,
process. Meh. */
});
::Machine::Connection conn {
{
.to = child->in.get(),
.from = child->out.get(),
/* Handshake. */
.remoteVersion = 0xdadbeef, // FIXME avoid dummy initialize
},
/*.machine =*/ machine,
};
Finally updateStats([&]() {
auto stats = conn.store->getConnectionStats();
bytesReceived += stats.bytesReceived;
bytesSent += stats.bytesSent;
bytesReceived += conn.from.read;
bytesSent += conn.to.written;
});
constexpr ServeProto::Version our_version = 0x206;
try {
conn.remoteVersion = decltype(conn)::handshake(
conn.to,
conn.from,
our_version,
machine->storeUri.render());
} catch (EndOfFile & e) {
child->sshPid.wait();
std::string s = chomp(readFile(result.logFile));
throw Error("cannot connect to %1%: %2%", machine->storeUri.render(), s);
}
{
auto info(machine->state->connectInfo.lock());
info->consecutiveFailures = 0;
@ -487,7 +539,7 @@ void State::buildRemote(ref<Store> destStore,
auto now1 = std::chrono::steady_clock::now();
auto infos = conn.store->queryPathInfosUncached(outputs);
auto infos = conn.queryPathInfos(*localStore, outputs);
size_t totalNarSize = 0;
for (auto & [_, info] : infos) totalNarSize += info.narSize;
@ -522,11 +574,9 @@ void State::buildRemote(ref<Store> destStore,
}
}
/* Shut down the connection done by RAII.
Only difference is kill() instead of wait() (i.e. send signal
then wait())
*/
/* Shut down the connection. */
child->in = -1;
child->sshPid.wait();
} catch (Error & e) {
/* Disable this machine until a certain period of time has

View File

@ -20,7 +20,9 @@
#include "store-api.hh"
#include "sync.hh"
#include "nar-extractor.hh"
#include "legacy-ssh-store.hh"
#include "serve-protocol.hh"
#include "serve-protocol-impl.hh"
#include "serve-protocol-connection.hh"
#include "machines.hh"
@ -290,11 +292,9 @@ struct Machine : nix::Machine
bool isLocalhost() const;
// A connection to a machine
struct Connection {
struct Connection : nix::ServeProto::BasicClientConnection {
// Backpointer to the machine
ptr machine;
// Opened store
nix::ref<nix::LegacySSHStore> store;
};
};

View File

@ -6,6 +6,7 @@ use base 'Catalyst::View::TT';
use Template::Plugin::HTML;
use Hydra::Helper::Nix;
use Time::Seconds;
use Digest::SHA qw(sha1_hex);
__PACKAGE__->config(
TEMPLATE_EXTENSION => '.tt',
@ -25,8 +26,14 @@ __PACKAGE__->config(
makeNameTextForJobset
relativeDuration
stripSSHUser
metricDivId
/]);
sub metricDivId {
my ($self, $c, $text) = @_;
return "metric-" . sha1_hex($text);
}
sub buildLogExists {
my ($self, $c, $build) = @_;
return 1 if defined $c->config->{log_prefix};

View File

@ -18,8 +18,7 @@
<h3>Metric: <a [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'metric' metric.name)) %]><tt>[%HTML.escape(metric.name)%]</tt></a></h3>
[% id = "metric-" _ metric.name;
id = id.replace('\.', '_');
[% id = metricDivId(metric.name);
INCLUDE createChart dataUrl=c.uri_for('/job' project.name jobset.name job 'metric' metric.name); %]
[% END %]

View File

@ -6,27 +6,55 @@ use Hydra::Helper::Exec;
my $ctx = test_context();
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-broken.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
subtest "broken constituents expression" => sub {
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-broken.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
isnt($res, 0, "hydra-eval-jobset exits non-zero");
ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
like(
$stderr,
qr/aggregate job mixed_aggregate failed with the error: "constituentA": does not exist/,
"The stderr record includes a relevant error message"
);
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
isnt($res, 0, "hydra-eval-jobset exits non-zero");
ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
like(
$stderr,
qr/aggregate job 'mixed_aggregate' references non-existent job 'constituentA'/,
"The stderr record includes a relevant error message"
);
$jobset->discard_changes({ '+columns' => {'errormsg' => 'errormsg'} }); # refresh from DB
like(
$jobset->errormsg,
qr/aggregate job mixed_aggregate failed with the error: "constituentA": does not exist/,
"The jobset records a relevant error message"
);
$jobset->discard_changes({ '+columns' => {'errormsg' => 'errormsg'} }); # refresh from DB
like(
$jobset->errormsg,
qr/aggregate job mixed_aggregate failed with the error: constituentA: does not exist/,
"The jobset records a relevant error message"
);
};
subtest "no matches" => sub {
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-no-matches.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
isnt($res, 0, "hydra-eval-jobset exits non-zero");
ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
like(
$stderr,
qr/aggregate job 'non_match_aggregate' references constituent glob pattern 'tests\.\*' with no matches/,
"The stderr record includes a relevant error message"
);
$jobset->discard_changes({ '+columns' => {'errormsg' => 'errormsg'} }); # refresh from DB
like(
$jobset->errormsg,
qr/aggregate job non_match_aggregate failed with the error: tests\.\*: constituent glob pattern had no matches/,
qr/in job non_match_aggregate:\ntests\.\*: constituent glob pattern had no matches/,
"The jobset records a relevant error message"
);
};
done_testing;

View File

@ -0,0 +1,138 @@
use strict;
use warnings;
use Setup;
use Test2::V0;
use Hydra::Helper::Exec;
use Data::Dumper;
my $ctx = test_context();
subtest "general glob testing" => sub {
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-glob.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
is($res, 0, "hydra-eval-jobset exits zero");
my $builds = {};
for my $build ($jobset->builds) {
$builds->{$build->job} = $build;
}
subtest "basic globbing works" => sub {
ok(defined $builds->{"ok_aggregate"}, "'ok_aggregate' is part of the jobset evaluation");
my @constituents = $builds->{"ok_aggregate"}->constituents->all;
is(2, scalar @constituents, "'ok_aggregate' has two constituents");
my @sortedConstituentNames = sort (map { $_->nixname } @constituents);
is($sortedConstituentNames[0], "empty-dir-A", "first constituent of 'ok_aggregate' is 'empty-dir-A'");
is($sortedConstituentNames[1], "empty-dir-B", "second constituent of 'ok_aggregate' is 'empty-dir-B'");
};
subtest "transitivity is OK" => sub {
ok(defined $builds->{"indirect_aggregate"}, "'indirect_aggregate' is part of the jobset evaluation");
my @constituents = $builds->{"indirect_aggregate"}->constituents->all;
is(1, scalar @constituents, "'indirect_aggregate' has one constituent");
is($constituents[0]->nixname, "direct_aggregate", "'indirect_aggregate' has 'direct_aggregate' as single constituent");
};
};
subtest "* selects all except current aggregate" => sub {
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-glob-all.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
subtest "no eval errors" => sub {
ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
ok(
$stderr !~ "aggregate job ok_aggregate has a constituent .* that doesn't correspond to a Hydra build",
"Catchall wildcard must not select itself as constituent"
);
$jobset->discard_changes; # refresh from DB
is(
$jobset->errormsg,
"",
"eval-errors non-empty"
);
};
my $builds = {};
for my $build ($jobset->builds) {
$builds->{$build->job} = $build;
}
subtest "two constituents" => sub {
ok(defined $builds->{"ok_aggregate"}, "'ok_aggregate' is part of the jobset evaluation");
my @constituents = $builds->{"ok_aggregate"}->constituents->all;
is(2, scalar @constituents, "'ok_aggregate' has two constituents");
my @sortedConstituentNames = sort (map { $_->nixname } @constituents);
is($sortedConstituentNames[0], "empty-dir-A", "first constituent of 'ok_aggregate' is 'empty-dir-A'");
is($sortedConstituentNames[1], "empty-dir-B", "second constituent of 'ok_aggregate' is 'empty-dir-B'");
};
};
subtest "trivial cycle check" => sub {
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-cycle.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
ok(
$stderr =~ "Found dependency cycle between jobs 'indirect_aggregate' and 'ok_aggregate'",
"Dependency cycle error is on stderr"
);
ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
$jobset->discard_changes; # refresh from DB
like(
$jobset->errormsg,
qr/Dependency cycle: indirect_aggregate <-> ok_aggregate/,
"eval-errors non-empty"
);
is(0, $jobset->builds->count, "No builds should be scheduled");
};
subtest "cycle check with globbing" => sub {
my $jobsetCtx = $ctx->makeJobset(
expression => 'constituents-cycle-glob.nix',
);
my $jobset = $jobsetCtx->{"jobset"};
my ($res, $stdout, $stderr) = captureStdoutStderr(60,
("hydra-eval-jobset", $jobsetCtx->{"project"}->name, $jobset->name)
);
ok(utf8::decode($stderr), "Stderr output is UTF8-clean");
$jobset->discard_changes; # refresh from DB
like(
$jobset->errormsg,
qr/aggregate job indirect_aggregate failed with the error: Dependency cycle: indirect_aggregate <-> packages.constituentA/,
"packages.constituentA error missing"
);
# on this branch of Hydra, hydra-eval-jobset fails hard if an aggregate
# job is broken.
is(0, $jobset->builds->count, "Zero jobs are scheduled");
};
done_testing;

14
t/jobs/config.nix Normal file
View File

@ -0,0 +1,14 @@
rec {
path = "/nix/store/l9mg93sgx50y88p5rr6x1vib6j1rjsds-coreutils-9.1/bin";
mkDerivation = args:
derivation ({
system = builtins.currentSystem;
PATH = path;
} // args);
mkContentAddressedDerivation = args: mkDerivation ({
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
} // args);
}

View File

@ -0,0 +1,34 @@
with import ./config.nix;
{
packages.constituentA = mkDerivation {
name = "empty-dir-A";
builder = ./empty-dir-builder.sh;
_hydraAggregate = true;
_hydraGlobConstituents = true;
constituents = [ "*_aggregate" ];
};
packages.constituentB = mkDerivation {
name = "empty-dir-B";
builder = ./empty-dir-builder.sh;
};
ok_aggregate = mkDerivation {
name = "direct_aggregate";
_hydraAggregate = true;
_hydraGlobConstituents = true;
constituents = [
"packages.*"
];
builder = ./empty-dir-builder.sh;
};
indirect_aggregate = mkDerivation {
name = "indirect_aggregate";
_hydraAggregate = true;
constituents = [
"ok_aggregate"
];
builder = ./empty-dir-builder.sh;
};
}

View File

@ -0,0 +1,21 @@
with import ./config.nix;
{
ok_aggregate = mkDerivation {
name = "direct_aggregate";
_hydraAggregate = true;
_hydraGlobConstituents = true;
constituents = [
"indirect_aggregate"
];
builder = ./empty-dir-builder.sh;
};
indirect_aggregate = mkDerivation {
name = "indirect_aggregate";
_hydraAggregate = true;
constituents = [
"ok_aggregate"
];
builder = ./empty-dir-builder.sh;
};
}

View File

@ -0,0 +1,22 @@
with import ./config.nix;
{
packages.constituentA = mkDerivation {
name = "empty-dir-A";
builder = ./empty-dir-builder.sh;
};
packages.constituentB = mkDerivation {
name = "empty-dir-B";
builder = ./empty-dir-builder.sh;
};
ok_aggregate = mkDerivation {
name = "direct_aggregate";
_hydraAggregate = true;
_hydraGlobConstituents = true;
constituents = [
"*"
];
builder = ./empty-dir-builder.sh;
};
}

View File

@ -0,0 +1,31 @@
with import ./config.nix;
{
packages.constituentA = mkDerivation {
name = "empty-dir-A";
builder = ./empty-dir-builder.sh;
};
packages.constituentB = mkDerivation {
name = "empty-dir-B";
builder = ./empty-dir-builder.sh;
};
ok_aggregate = mkDerivation {
name = "direct_aggregate";
_hydraAggregate = true;
_hydraGlobConstituents = true;
constituents = [
"packages.*"
];
builder = ./empty-dir-builder.sh;
};
indirect_aggregate = mkDerivation {
name = "indirect_aggregate";
_hydraAggregate = true;
constituents = [
"ok_aggregate"
];
builder = ./empty-dir-builder.sh;
};
}

View File

@ -0,0 +1,20 @@
with import ./config.nix;
{
non_match_aggregate = mkDerivation {
name = "mixed_aggregate";
_hydraAggregate = true;
_hydraGlobConstituents = true;
constituents = [
"tests.*"
];
builder = ./empty-dir-builder.sh;
};
# Without a second job no jobset is attempted to be created
# (the only job would be broken)
# and thus the constituent validation is never reached.
dummy = mkDerivation {
name = "dummy";
builder = ./empty-dir-builder.sh;
};
}

View File

@ -0,0 +1,24 @@
{
"enabled": 1,
"hidden": false,
"description": "declarative-jobset-example",
"nixexprinput": "src",
"nixexprpath": "declarative/generator.nix",
"checkinterval": 300,
"schedulingshares": 100,
"enableemail": false,
"emailoverride": "",
"keepnr": 3,
"inputs": {
"src": {
"type": "path",
"value": "/home/ma27/Projects/hydra-cppnix/t/jobs",
"emailresponsible": false
},
"jobspath": {
"type": "string",
"value": "/home/ma27/Projects/hydra-cppnix/t/jobs",
"emailresponsible": false
}
}
}

View File

@ -22,11 +22,11 @@ is(nrQueuedBuildsForJobset($jobset), 0, "Evaluating jobs/broken-constituent.nix
like(
$jobset->errormsg,
qr/^"does-not-exist": does not exist$/m,
qr/^does-not-exist: does not exist$/m,
"Evaluating jobs/broken-constituent.nix should log an error for does-not-exist");
like(
$jobset->errormsg,
qr/^"does-not-evaluate": "error: assertion 'false' failed/m,
qr/^does-not-evaluate: error: assertion 'false' failed/m,
"Evaluating jobs/broken-constituent.nix should log an error for does-not-evaluate");
done_testing;