tests: move to t, allow yath test
from root
By moving the tests subdirectory to t, we gain the ability to run `yath test` with no arguments from inside `nix develop` in the root of the the repo. (`nix develop` is necessary in order to set the proper env vars for `yath` to find our test libraries.)
This commit is contained in:
42
t/Makefile.am
Normal file
42
t/Makefile.am
Normal file
@ -0,0 +1,42 @@
|
||||
TESTS_ENVIRONMENT = \
|
||||
BZR_HOME="$(abs_builddir)/data" \
|
||||
HYDRA_DBI="dbi:Pg:dbname=hydra-test-suite;port=6433" \
|
||||
HYDRA_DATA="$(abs_builddir)/data" \
|
||||
HYDRA_HOME="$(top_srcdir)/src" \
|
||||
HYDRA_CONFIG= \
|
||||
NIX_REMOTE= \
|
||||
NIX_REMOTE_SYSTEMS= \
|
||||
NIX_CONF_DIR="$(abs_builddir)/nix/etc/nix" \
|
||||
NIX_STATE_DIR="$(abs_builddir)/nix/var/nix" \
|
||||
NIX_MANIFESTS_DIR="$(abs_builddir)/nix/var/nix/manifests" \
|
||||
NIX_STORE_DIR="$(abs_builddir)/nix/store" \
|
||||
NIX_LOG_DIR="$(abs_builddir)/nix/var/log/nix" \
|
||||
NIX_BUILD_HOOK= \
|
||||
PGHOST=/tmp \
|
||||
PERL5LIB="$(srcdir):$(abs_top_srcdir)/src/lib:$$PERL5LIB" \
|
||||
PYTHONPATH= \
|
||||
PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-eval-jobs:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \
|
||||
perl -w
|
||||
|
||||
EXTRA_DIST = \
|
||||
$(wildcard *.pm) \
|
||||
$(wildcard jobs/*.nix) \
|
||||
$(wildcard jobs/*.sh) \
|
||||
$(TESTS)
|
||||
|
||||
TESTS = \
|
||||
test.pl
|
||||
|
||||
check_SCRIPTS = repos
|
||||
|
||||
repos: dirs
|
||||
|
||||
dirs:
|
||||
mkdir -p data
|
||||
touch data/hydra.conf
|
||||
mkdir -p nix
|
||||
mkdir -p nix/etc/nix
|
||||
touch nix/etc/nix/nix.conf
|
||||
echo "sandbox = false" >> nix/etc/nix/nix.conf
|
||||
mkdir -p nix/store
|
||||
mkdir -p nix/var
|
13
t/api-test.nix
Normal file
13
t/api-test.nix
Normal file
@ -0,0 +1,13 @@
|
||||
let
|
||||
thisFile = builtins.toFile "default.nix" (builtins.readFile ./default.nix);
|
||||
builder = builtins.toFile "builder.sh" ''
|
||||
echo ${thisFile} > $out
|
||||
'';
|
||||
in {
|
||||
job = derivation {
|
||||
name = "job";
|
||||
system = builtins.currentSystem;
|
||||
builder = "/bin/sh";
|
||||
args = [ builder ];
|
||||
};
|
||||
}
|
75
t/api-test.pl
Normal file
75
t/api-test.pl
Normal file
@ -0,0 +1,75 @@
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
use Test::Simple tests => 20;
|
||||
|
||||
my $ua = LWP::UserAgent->new;
|
||||
$ua->cookie_jar({});
|
||||
|
||||
sub request_json {
|
||||
my ($opts) = @_;
|
||||
my $req = HTTP::Request->new;
|
||||
$req->method($opts->{method} or "GET");
|
||||
$req->uri("http://localhost:3000$opts->{uri}");
|
||||
$req->header(Accept => "application/json");
|
||||
$req->header(Referer => "http://localhost:3000/") if $opts->{method} eq "POST";
|
||||
$req->content(encode_json($opts->{data})) if defined $opts->{data};
|
||||
my $res = $ua->request($req);
|
||||
print $res->as_string();
|
||||
return $res;
|
||||
}
|
||||
|
||||
my $result = request_json({ uri => "/login", method => "POST", data => { username => "root", password => "wrong" } });
|
||||
ok($result->code() == 403, "Incorrect password rejected.");
|
||||
|
||||
my $result = request_json({ uri => "/login", method => "POST", data => { username => "root", password => "foobar" } });
|
||||
|
||||
my $user = decode_json($result->content());
|
||||
|
||||
ok($user->{username} eq "root", "The root user is named root");
|
||||
ok($user->{userroles}->[0] eq "admin", "The root user is an admin");
|
||||
|
||||
$user = decode_json(request_json({ uri => "/current-user" })->content());
|
||||
ok($user->{username} eq "root", "The current user is named root");
|
||||
ok($user->{userroles}->[0] eq "admin", "The current user is an admin");
|
||||
|
||||
ok(request_json({ uri => '/project/sample' })->code() == 404, "Non-existent projects don't exist");
|
||||
|
||||
$result = request_json({ uri => '/project/sample', method => 'PUT', data => { displayname => "Sample", enabled => "1", visible => "1", } });
|
||||
ok($result->code() == 201, "PUTting a new project creates it");
|
||||
|
||||
my $project = decode_json(request_json({ uri => '/project/sample' })->content());
|
||||
|
||||
ok((not @{$project->{jobsets}}), "A new project has no jobsets");
|
||||
|
||||
$result = request_json({ uri => '/jobset/sample/default', method => 'PUT', data => { nixexprpath => "default.nix", nixexprinput => "my-src", inputs => { "my-src" => { type => "path", value => "/run/jobset" } }, enabled => "1", visible => "1", checkinterval => "3600"} });
|
||||
ok($result->code() == 201, "PUTting a new jobset creates it");
|
||||
|
||||
my $jobset = decode_json(request_json({ uri => '/jobset/sample/default' })->content());
|
||||
|
||||
ok(exists $jobset->{jobsetinputs}->{"my-src"}, "The new jobset has a 'my-src' input");
|
||||
|
||||
ok($jobset->{jobsetinputs}->{"my-src"}->{jobsetinputalts}->[0] eq "/run/jobset", "The 'my-src' input is in /run/jobset");
|
||||
|
||||
system("hydra-eval-jobset sample default");
|
||||
$result = request_json({ uri => '/jobset/sample/default/evals' });
|
||||
ok($result->code() == 200, "Can get evals of a jobset");
|
||||
my $evals = decode_json($result->content())->{evals};
|
||||
my $eval = $evals->[0];
|
||||
ok($eval->{hasnewbuilds} == 1, "The first eval of a jobset has new builds");
|
||||
|
||||
system("echo >> /run/jobset/default.nix; hydra-eval-jobset sample default");
|
||||
my $evals = decode_json(request_json({ uri => '/jobset/sample/default/evals' })->content())->{evals};
|
||||
ok(scalar(@$evals) == 2, "Changing a jobset source creates the second evaluation");
|
||||
ok($evals->[0]->{jobsetevalinputs}->{"my-src"}->{revision} != $evals->[1]->{jobsetevalinputs}->{"my-src"}->{revision}, "Changing a jobset source changes its revision");
|
||||
|
||||
my $build = decode_json(request_json({ uri => "/build/" . $evals->[0]->{builds}->[0] })->content());
|
||||
ok($build->{job} eq "job", "The build's job name is job");
|
||||
ok($build->{finished} == 0, "The build isn't finished yet");
|
||||
ok($build->{buildoutputs}->{out}->{path} =~ /^\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'");
|
||||
|
||||
|
||||
my $search_project = decode_json(request_json({ uri => "/search/?query=sample" })->content());
|
||||
ok($search_project->{projects}[0]->{name} == "sample", "Search for project 'sample' works");
|
||||
|
||||
my $search_build = decode_json(request_json({ uri => "/search/?query=" . $build->{buildoutputs}->{out}->{path} })->content());
|
||||
ok($search_build->{builds}[0]->{buildoutputs}->{out}->{path} == $build->{buildoutputs}->{out}->{path}, "Search for builds work");
|
42
t/build-products.t
Normal file
42
t/build-products.t
Normal file
@ -0,0 +1,42 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
|
||||
# Test build products
|
||||
|
||||
my $jobset = createBaseJobset("build-products", "build-products.nix", $ctx{jobsdir});
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/build-products.nix should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 2, "Evaluating jobs/build-products.nix should result in 2 builds");
|
||||
|
||||
for my $build (queuedBuildsForJobset($jobset)) {
|
||||
subtest "For the build job '" . $build->job . "'" => sub {
|
||||
ok(runBuild($build), "Build should exit with code 0");
|
||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||
|
||||
is($newbuild->finished, 1, "Build should have finished");
|
||||
is($newbuild->buildstatus, 0, "Build should have buildstatus 0");
|
||||
|
||||
my $buildproducts = $db->resultset('BuildProducts')->search({ build => $build->id });
|
||||
my $buildproduct = $buildproducts->next;
|
||||
|
||||
if($build->job eq "simple") {
|
||||
is($buildproduct->name, "text.txt", "We should have \"text.txt\"");
|
||||
} elsif ($build->job eq "with_spaces") {
|
||||
is($buildproduct->name, "some text.txt", "We should have: \"some text.txt\"");
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
done_testing;
|
31
t/evaluate-basic.t
Normal file
31
t/evaluate-basic.t
Normal file
@ -0,0 +1,31 @@
|
||||
use feature 'unicode_strings';
|
||||
use strict;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
my $project = $db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
||||
|
||||
# Most basic test case, no parameters
|
||||
my $jobset = createBaseJobset("basic", "basic.nix", $ctx{jobsdir});
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/basic.nix should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 3, "Evaluating jobs/basic.nix should result in 3 builds");
|
||||
|
||||
for my $build (queuedBuildsForJobset($jobset)) {
|
||||
ok(runBuild($build), "Build '".$build->job."' from jobs/basic.nix should exit with code 0");
|
||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||
is($newbuild->finished, 1, "Build '".$build->job."' from jobs/basic.nix should be finished.");
|
||||
my $expected = $build->job eq "fails" ? 1 : $build->job =~ /with_failed/ ? 6 : 0;
|
||||
is($newbuild->buildstatus, $expected, "Build '".$build->job."' from jobs/basic.nix should have buildstatus $expected.");
|
||||
}
|
||||
|
||||
done_testing;
|
42
t/evaluate-dependent-jobsets.t
Normal file
42
t/evaluate-dependent-jobsets.t
Normal file
@ -0,0 +1,42 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Test jobset with 2 jobs, one has parameter of succeeded build of the other
|
||||
my $jobset = createJobsetWithOneInput("build-output-as-input", "build-output-as-input.nix", "build1", "build", "build1", $ctx{jobsdir});
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/build-output-as-input.nix should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 1 , "Evaluation should result in 1 build in queue");
|
||||
|
||||
subtest "For the 'build1' job" => sub {
|
||||
my ($build) = queuedBuildsForJobset($jobset);
|
||||
is($build->job, "build1", "Verify the only job we got is for 'build1'");
|
||||
|
||||
ok(runBuild($build), "Build should exit with code 0");
|
||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||
is($newbuild->finished, 1, "Build should be finished.");
|
||||
is($newbuild->buildstatus, 0, "Build should have buildstatus 0.");
|
||||
};
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/build-output-as-input.nix for second time should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 1 , "The second evaluation should result in 1 new build in queue: build2");
|
||||
subtest "For the 'build2' job" => sub {
|
||||
my ($build) = queuedBuildsForJobset($jobset);
|
||||
is($build->job, "build2", "Verify the only job we got is for 'build2'");
|
||||
|
||||
ok(runBuild($build), "Build should exit with code 0");
|
||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||
is($newbuild->finished, 1, "Build should be finished.");
|
||||
is($newbuild->buildstatus, 0, "Build should have buildstatus 0.");
|
||||
};
|
||||
|
||||
done_testing;
|
28
t/input-types/bzr-checkout.t
Normal file
28
t/input-types/bzr-checkout.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a bzr checkout as input.
|
||||
testScmInput(
|
||||
type => 'bzr-checkout',
|
||||
expr => 'bzr-checkout-input.nix',
|
||||
uri => 'bzr-checkout-repo',
|
||||
update => 'jobs/bzr-checkout-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
28
t/input-types/bzr.t
Normal file
28
t/input-types/bzr.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a bzr repo as input.
|
||||
testScmInput(
|
||||
type => 'bzr',
|
||||
expr => 'bzr-input.nix',
|
||||
uri => 'bzr-repo',
|
||||
update => 'jobs/bzr-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
28
t/input-types/darcs.t
Normal file
28
t/input-types/darcs.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a darcs repo as input.
|
||||
testScmInput(
|
||||
type => 'darcs',
|
||||
expr => 'darcs-input.nix',
|
||||
uri => 'darcs-repo',
|
||||
update => 'jobs/darcs-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
29
t/input-types/deepgit.t
Normal file
29
t/input-types/deepgit.t
Normal file
@ -0,0 +1,29 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a deep git clone as input.
|
||||
testScmInput(
|
||||
type => 'git',
|
||||
name => 'deepgit',
|
||||
expr => 'deepgit-input.nix',
|
||||
uri => 'git-repo master 1',
|
||||
update => 'jobs/git-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
29
t/input-types/git-rev.t
Normal file
29
t/input-types/git-rev.t
Normal file
@ -0,0 +1,29 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a git revision as input.
|
||||
testScmInput(
|
||||
type => 'git',
|
||||
name => 'git-rev',
|
||||
expr => 'git-rev-input.nix',
|
||||
uri => 'git-repo 7f60df502b96fd54bbfa64dd94b56d936a407701',
|
||||
update => 'jobs/git-rev-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
28
t/input-types/git.t
Normal file
28
t/input-types/git.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a git repo as input.
|
||||
testScmInput(
|
||||
type => 'git',
|
||||
expr => 'git-input.nix',
|
||||
uri => 'git-repo',
|
||||
update => 'jobs/git-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
28
t/input-types/hg.t
Normal file
28
t/input-types/hg.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a hg repo as input.
|
||||
testScmInput(
|
||||
type => 'hg',
|
||||
expr => 'hg-input.nix',
|
||||
uri => 'hg-repo',
|
||||
update => 'jobs/hg-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
28
t/input-types/svn-checkout.t
Normal file
28
t/input-types/svn-checkout.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a svn checkout as input.
|
||||
testScmInput(
|
||||
type => 'svn-checkout',
|
||||
expr => 'svn-checkout-input.nix',
|
||||
uri => 'svn-checkout-repo',
|
||||
update => 'jobs/svn-checkout-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
28
t/input-types/svn.t
Normal file
28
t/input-types/svn.t
Normal file
@ -0,0 +1,28 @@
|
||||
use strict;
|
||||
use Setup;
|
||||
use TestScmInput;
|
||||
|
||||
my %ctx = test_init();
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
# Tests the creation of a Hydra jobset using a svn repo as input.
|
||||
testScmInput(
|
||||
type => 'svn',
|
||||
expr => 'svn-input.nix',
|
||||
uri => 'svn-repo',
|
||||
update => 'jobs/svn-update.sh',
|
||||
|
||||
# directories
|
||||
datadir => $ctx{tmpdir},
|
||||
testdir => $ctx{testdir},
|
||||
jobsdir => $ctx{jobsdir},
|
||||
);
|
||||
|
||||
done_testing;
|
20
t/jobs/basic.nix
Normal file
20
t/jobs/basic.nix
Normal file
@ -0,0 +1,20 @@
|
||||
with import ./config.nix;
|
||||
{
|
||||
empty_dir =
|
||||
mkDerivation {
|
||||
name = "empty-dir";
|
||||
builder = ./empty-dir-builder.sh;
|
||||
};
|
||||
|
||||
fails =
|
||||
mkDerivation {
|
||||
name = "fails";
|
||||
builder = ./fail.sh;
|
||||
};
|
||||
|
||||
succeed_with_failed =
|
||||
mkDerivation {
|
||||
name = "succeed-with-failed";
|
||||
builder = ./succeed-with-failed.sh;
|
||||
};
|
||||
}
|
18
t/jobs/build-output-as-input.nix
Normal file
18
t/jobs/build-output-as-input.nix
Normal file
@ -0,0 +1,18 @@
|
||||
with import ./config.nix;
|
||||
let
|
||||
jobs = {
|
||||
build1 =
|
||||
mkDerivation {
|
||||
name = "build1";
|
||||
builder = ./empty-dir-builder.sh;
|
||||
};
|
||||
|
||||
build2 =
|
||||
{ build1 }:
|
||||
mkDerivation {
|
||||
name = "build2";
|
||||
builder = ./empty-dir-builder.sh;
|
||||
inherit build1;
|
||||
};
|
||||
};
|
||||
in jobs
|
5
t/jobs/build-product-simple.sh
Executable file
5
t/jobs/build-product-simple.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#! /bin/sh
|
||||
|
||||
mkdir -p $out/nix-support
|
||||
echo "Hello" > $out/text.txt
|
||||
echo "doc none $out/text.txt" > $out/nix-support/hydra-build-products
|
5
t/jobs/build-product-with-spaces.sh
Executable file
5
t/jobs/build-product-with-spaces.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#! /bin/sh
|
||||
|
||||
mkdir -p $out/nix-support
|
||||
echo "Hello" > "$out/some text.txt"
|
||||
echo "doc none \"$out/some text.txt\"" > $out/nix-support/hydra-build-products
|
14
t/jobs/build-products.nix
Normal file
14
t/jobs/build-products.nix
Normal file
@ -0,0 +1,14 @@
|
||||
with import ./config.nix;
|
||||
{
|
||||
simple =
|
||||
mkDerivation {
|
||||
name = "build-product-simple";
|
||||
builder = ./build-product-simple.sh;
|
||||
};
|
||||
|
||||
with_spaces =
|
||||
mkDerivation {
|
||||
name = "build-product-with-spaces";
|
||||
builder = ./build-product-with-spaces.sh;
|
||||
};
|
||||
}
|
10
t/jobs/bzr-checkout-input.nix
Normal file
10
t/jobs/bzr-checkout-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "bzr-checkout-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
25
t/jobs/bzr-checkout-update.sh
Executable file
25
t/jobs/bzr-checkout-update.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#! /bin/sh
|
||||
|
||||
repo="$1"
|
||||
STATE_FILE=$(pwd)/.bzr-checkout-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 1 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
export BZR_HOME; # Set by the Makefile
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
bzr init bzr-repo
|
||||
bzr whoami "build <build@invalid.org>" -d bzr-repo
|
||||
touch bzr-repo/bzr-file
|
||||
bzr add bzr-repo/bzr-file
|
||||
bzr commit -m "add bzr-file" bzr-repo/bzr-file
|
||||
ln -s bzr-repo bzr-checkout-repo
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::" ;;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
10
t/jobs/bzr-input.nix
Normal file
10
t/jobs/bzr-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "bzr-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
25
t/jobs/bzr-update.sh
Executable file
25
t/jobs/bzr-update.sh
Executable file
@ -0,0 +1,25 @@
|
||||
#! /bin/sh
|
||||
set -e
|
||||
|
||||
repo="$1"
|
||||
STATE_FILE=$(pwd)/.bzr-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 1 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
export BZR_HOME; # Set by the Makefile
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
bzr init bzr-repo
|
||||
bzr whoami "build <build@invalid.org>" -d bzr-repo
|
||||
touch bzr-repo/bzr-file
|
||||
bzr add bzr-repo/bzr-file
|
||||
bzr commit -m "add bzr-file" bzr-repo/bzr-file
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::";;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
9
t/jobs/config.nix.in
Normal file
9
t/jobs/config.nix.in
Normal file
@ -0,0 +1,9 @@
|
||||
rec {
|
||||
path = "@testPath@";
|
||||
|
||||
mkDerivation = args:
|
||||
derivation ({
|
||||
system = builtins.currentSystem;
|
||||
PATH = path;
|
||||
} // args);
|
||||
}
|
10
t/jobs/darcs-input.nix
Normal file
10
t/jobs/darcs-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "git-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
24
t/jobs/darcs-update.sh
Executable file
24
t/jobs/darcs-update.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#! /bin/sh
|
||||
set -e
|
||||
|
||||
repo="$1"
|
||||
STATE_FILE=$(pwd)/.hg-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 1 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
mkdir darcs-repo
|
||||
darcs init --repodir darcs-repo
|
||||
touch darcs-repo/file
|
||||
darcs add --repodir darcs-repo file
|
||||
darcs record --repodir darcs-repo -a -l -m "add a file" file -A foobar@bar.bar
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::" ;;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
6
t/jobs/deepgit-builder.sh
Normal file
6
t/jobs/deepgit-builder.sh
Normal file
@ -0,0 +1,6 @@
|
||||
#! /bin/sh
|
||||
|
||||
set -e
|
||||
mkdir $out
|
||||
cp -v $src/* $out/
|
||||
git describe --long > $out/Version
|
10
t/jobs/deepgit-input.nix
Normal file
10
t/jobs/deepgit-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "git-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
3
t/jobs/empty-dir-builder.sh
Executable file
3
t/jobs/empty-dir-builder.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#! /bin/sh
|
||||
|
||||
mkdir $out
|
2
t/jobs/fail.sh
Executable file
2
t/jobs/fail.sh
Executable file
@ -0,0 +1,2 @@
|
||||
#! /bin/sh
|
||||
exit 1
|
10
t/jobs/git-input.nix
Normal file
10
t/jobs/git-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "git-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
10
t/jobs/git-rev-input.nix
Normal file
10
t/jobs/git-rev-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "git-rev-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
31
t/jobs/git-rev-update.sh
Executable file
31
t/jobs/git-rev-update.sh
Executable file
@ -0,0 +1,31 @@
|
||||
#! /bin/sh
|
||||
set -e
|
||||
|
||||
repo=git-repo
|
||||
export HOME=$(pwd)
|
||||
export XDG_CONFIG_HOME=$(pwd)/.config
|
||||
STATE_FILE=$(pwd)/.git-rev-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=1
|
||||
rm $STATE_FILE
|
||||
else
|
||||
state=0
|
||||
touch $STATE_FILE
|
||||
fi
|
||||
|
||||
echo "STATE: $state"
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
git init $repo
|
||||
cd $repo
|
||||
git config --global user.email "you@example.com"
|
||||
git config --global user.name "Your Name"
|
||||
|
||||
touch foo
|
||||
git add foo
|
||||
GIT_AUTHOR_DATE="1970-01-01T00:00:00 +0000" GIT_COMMITTER_DATE="1970-01-01T00:00:00 +0000" git commit -m "Add foo"
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::"
|
||||
rm -rf $repo
|
||||
;;
|
||||
esac
|
56
t/jobs/git-update.sh
Executable file
56
t/jobs/git-update.sh
Executable file
@ -0,0 +1,56 @@
|
||||
#! /bin/sh
|
||||
# This script is used both by git & deepgit checks.
|
||||
set -e
|
||||
|
||||
repo=git-repo
|
||||
export HOME=$(pwd)
|
||||
export XDG_CONFIG_HOME=$(pwd)/.config
|
||||
STATE_FILE=$(pwd)/.git-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 3 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
echo "STATE: $state"
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
git init $repo
|
||||
cd $repo
|
||||
git config --global user.email "you@example.com"
|
||||
git config --global user.name "Your Name"
|
||||
|
||||
touch foo
|
||||
git add foo
|
||||
git commit -m "Add foo"
|
||||
git tag -a -m "First Tag." tag0
|
||||
;;
|
||||
(1) echo "::Create new commit. -- continue -- updated::"
|
||||
cd $repo
|
||||
# Increase depth to make sure the tag is not fetched by default.
|
||||
echo 0 > foo
|
||||
git add foo
|
||||
git commit -m "Increase depth 0"
|
||||
echo 1 > foo
|
||||
git add foo
|
||||
git commit -m "Increase depth 1"
|
||||
echo 2 > foo
|
||||
git add foo
|
||||
git commit -m "Increase depth 2"
|
||||
echo 0 > bar
|
||||
git add bar
|
||||
git commit -m "Add bar with 0"
|
||||
;;
|
||||
(2) echo "::Amend commit. (push -f) -- continue -- updated::"
|
||||
cd $repo
|
||||
echo 1 > bar
|
||||
git add bar
|
||||
git commit --amend -m "Add bar with 1"
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::"
|
||||
rm -rf $repo
|
||||
;;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
10
t/jobs/hg-input.nix
Normal file
10
t/jobs/hg-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "hg-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
24
t/jobs/hg-update.sh
Executable file
24
t/jobs/hg-update.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#! /bin/sh
|
||||
set -e
|
||||
|
||||
repo="$1"
|
||||
STATE_FILE=$(pwd)/.hg-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 1 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
hg init hg-repo
|
||||
touch hg-repo/hg-file
|
||||
cd hg-repo
|
||||
hg add hg-file
|
||||
hg commit -m "add hg file" hg-file -u foobar
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::" ;;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
33
t/jobs/runcommand.nix
Normal file
33
t/jobs/runcommand.nix
Normal file
@ -0,0 +1,33 @@
|
||||
with import ./config.nix;
|
||||
{
|
||||
metrics = (
|
||||
mkDerivation {
|
||||
name = "my-build-product";
|
||||
builder = "/bin/sh";
|
||||
outputs = [ "out" "bin" ];
|
||||
args = [
|
||||
(
|
||||
builtins.toFile "builder.sh" ''
|
||||
#! /bin/sh
|
||||
|
||||
echo "$PATH"
|
||||
|
||||
mkdir $bin
|
||||
echo "foo" > $bin/bar
|
||||
|
||||
metrics=$out/nix-support/hydra-metrics
|
||||
mkdir -p "$(dirname "$metrics")"
|
||||
echo "lineCoverage 18 %" >> "$metrics"
|
||||
echo "maxResident 27 KiB" >> "$metrics"
|
||||
''
|
||||
)
|
||||
];
|
||||
}
|
||||
) // {
|
||||
meta = {
|
||||
license = "GPL";
|
||||
description = "An example meta property.";
|
||||
homepage = "https://github.com/NixOS/hydra";
|
||||
};
|
||||
};
|
||||
}
|
3
t/jobs/scm-builder.sh
Executable file
3
t/jobs/scm-builder.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#! /bin/sh
|
||||
mkdir $out
|
||||
cp -v $src/* $out/
|
3
t/jobs/succeed-with-failed.sh
Executable file
3
t/jobs/succeed-with-failed.sh
Executable file
@ -0,0 +1,3 @@
|
||||
#! /bin/sh
|
||||
mkdir -p $out/nix-support
|
||||
touch $out/nix-support/failed
|
10
t/jobs/svn-checkout-input.nix
Normal file
10
t/jobs/svn-checkout-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "bzr-checkout-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
24
t/jobs/svn-checkout-update.sh
Executable file
24
t/jobs/svn-checkout-update.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#! /bin/sh
|
||||
|
||||
repo="$1"
|
||||
STATE_FILE=$(pwd)/.svn-checkout-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 1 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
svnadmin create svn-repo
|
||||
svn co file://$PWD/$repo svn-checkout
|
||||
touch svn-checkout/svn-file
|
||||
svn add svn-checkout/svn-file
|
||||
svn commit -m "add svn file" svn-checkout/svn-file
|
||||
ln -s svn-repo svn-checkout-repo
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::" ;;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
10
t/jobs/svn-input.nix
Normal file
10
t/jobs/svn-input.nix
Normal file
@ -0,0 +1,10 @@
|
||||
with import ./config.nix;
|
||||
{ src }:
|
||||
{
|
||||
copy =
|
||||
mkDerivation {
|
||||
name = "svn-input";
|
||||
builder = ./scm-builder.sh;
|
||||
inherit src;
|
||||
};
|
||||
}
|
24
t/jobs/svn-update.sh
Executable file
24
t/jobs/svn-update.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#! /bin/sh
|
||||
set -e
|
||||
|
||||
repo=svn-repo
|
||||
STATE_FILE=$(pwd)/.svn-state
|
||||
if test -e $STATE_FILE; then
|
||||
state=$(cat $STATE_FILE)
|
||||
test $state -gt 1 && state=0
|
||||
else
|
||||
state=0;
|
||||
fi
|
||||
|
||||
case $state in
|
||||
(0) echo "::Create repo. -- continue -- updated::"
|
||||
svnadmin create svn-repo
|
||||
svn co file://$PWD/$repo svn-checkout
|
||||
touch svn-checkout/svn-file
|
||||
svn add svn-checkout/svn-file
|
||||
svn commit -m "add svn file" svn-checkout/svn-file
|
||||
;;
|
||||
(*) echo "::End. -- stop -- nothing::";;
|
||||
esac
|
||||
|
||||
echo $(($state + 1)) > $STATE_FILE
|
173
t/lib/Setup.pm
Normal file
173
t/lib/Setup.pm
Normal file
@ -0,0 +1,173 @@
|
||||
package Setup;
|
||||
|
||||
use strict;
|
||||
use Exporter;
|
||||
use Test::PostgreSQL;
|
||||
use File::Temp;
|
||||
use File::Path qw(make_path);
|
||||
use File::Basename;
|
||||
use Cwd qw(abs_path getcwd);
|
||||
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(test_init hydra_setup nrBuildsForJobset queuedBuildsForJobset nrQueuedBuildsForJobset createBaseJobset createJobsetWithOneInput evalSucceeds runBuild sendNotifications updateRepository);
|
||||
|
||||
# Set up the environment for running tests.
|
||||
#
|
||||
# Hash Parameters:
|
||||
#
|
||||
# * hydra_config: configuration for the Hydra processes for your test.
|
||||
#
|
||||
# This clears several environment variables and sets them to ephemeral
|
||||
# values: a temporary database, temporary Nix store, temporary Hydra
|
||||
# data directory, etc.
|
||||
#
|
||||
# Note: This function must run _very_ early, before nearly any Hydra
|
||||
# libraries are loaded. To use this, you very likely need to `use Setup`
|
||||
# and then run `test_init`, and then `require` the Hydra libraries you
|
||||
# need.
|
||||
#
|
||||
# It returns a tuple: a handle to a temporary directory and a handle to
|
||||
# the postgres service. If either of these variables go out of scope,
|
||||
# those resources are released and the test environment becomes invalid.
|
||||
#
|
||||
# Look at the top of an existing `.t` file to see how this should be used
|
||||
# in practice.
|
||||
sub test_init {
|
||||
my %opts = @_;
|
||||
|
||||
my $dir = File::Temp->newdir();
|
||||
|
||||
$ENV{'HYDRA_DATA'} = "$dir/hydra-data";
|
||||
mkdir $ENV{'HYDRA_DATA'};
|
||||
$ENV{'NIX_CONF_DIR'} = "$dir/nix/etc/nix";
|
||||
make_path($ENV{'NIX_CONF_DIR'});
|
||||
my $nixconf = "$ENV{'NIX_CONF_DIR'}/nix.conf";
|
||||
open(my $fh, '>', $nixconf) or die "Could not open file '$nixconf' $!";
|
||||
print $fh "sandbox = false\n";
|
||||
close $fh;
|
||||
|
||||
$ENV{'HYDRA_CONFIG'} = "$dir/hydra.conf";
|
||||
|
||||
open(my $fh, '>', $ENV{'HYDRA_CONFIG'}) or die "Could not open file '" . $ENV{'HYDRA_CONFIG'}. " $!";
|
||||
print $fh $opts{'hydra_config'} || "";
|
||||
close $fh;
|
||||
|
||||
$ENV{'NIX_STATE_DIR'} = "$dir/nix/var/nix";
|
||||
|
||||
$ENV{'NIX_MANIFESTS_DIR'} = "$dir/nix/var/nix/manifests";
|
||||
$ENV{'NIX_STORE_DIR'} = "$dir/nix/store";
|
||||
$ENV{'NIX_LOG_DIR'} = "$dir/nix/var/log/nix";
|
||||
|
||||
my $pgsql = Test::PostgreSQL->new(
|
||||
extra_initdb_args => "--locale C.UTF-8"
|
||||
);
|
||||
$ENV{'HYDRA_DBI'} = $pgsql->dsn;
|
||||
system("hydra-init") == 0 or die;
|
||||
return (
|
||||
db => $pgsql,
|
||||
tmpdir => $dir,
|
||||
testdir => abs_path(dirname(__FILE__) . "/.."),
|
||||
jobsdir => abs_path(dirname(__FILE__) . "/../jobs")
|
||||
);
|
||||
}
|
||||
|
||||
sub captureStdoutStderr {
|
||||
# "Lazy"-load Hydra::Helper::Nix to avoid the compile-time
|
||||
# import of Hydra::Model::DB. Early loading of the DB class
|
||||
# causes fixation of the DSN, and we need to fixate it after
|
||||
# the temporary DB is setup.
|
||||
require Hydra::Helper::Nix;
|
||||
return Hydra::Helper::Nix::captureStdoutStderr(@_)
|
||||
}
|
||||
|
||||
sub hydra_setup {
|
||||
my ($db) = @_;
|
||||
$db->resultset('Users')->create({ username => "root", emailaddress => 'root@invalid.org', password => '' });
|
||||
}
|
||||
|
||||
sub nrBuildsForJobset {
|
||||
my ($jobset) = @_;
|
||||
return $jobset->builds->search({},{})->count ;
|
||||
}
|
||||
|
||||
sub queuedBuildsForJobset {
|
||||
my ($jobset) = @_;
|
||||
return $jobset->builds->search({finished => 0});
|
||||
}
|
||||
|
||||
sub nrQueuedBuildsForJobset {
|
||||
my ($jobset) = @_;
|
||||
return queuedBuildsForJobset($jobset)->count ;
|
||||
}
|
||||
|
||||
sub createBaseJobset {
|
||||
my ($jobsetName, $nixexprpath, $jobspath) = @_;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
my $project = $db->resultset('Projects')->update_or_create({name => "tests", displayname => "", owner => "root"});
|
||||
my $jobset = $project->jobsets->create({name => $jobsetName, nixexprinput => "jobs", nixexprpath => $nixexprpath, emailoverride => ""});
|
||||
|
||||
my $jobsetinput;
|
||||
my $jobsetinputals;
|
||||
|
||||
$jobsetinput = $jobset->jobsetinputs->create({name => "jobs", type => "path"});
|
||||
$jobsetinputals = $jobsetinput->jobsetinputalts->create({altnr => 0, value => $jobspath});
|
||||
|
||||
return $jobset;
|
||||
}
|
||||
|
||||
sub createJobsetWithOneInput {
|
||||
my ($jobsetName, $nixexprpath, $name, $type, $uri, $jobspath) = @_;
|
||||
my $jobset = createBaseJobset($jobsetName, $nixexprpath, $jobspath);
|
||||
|
||||
my $jobsetinput;
|
||||
my $jobsetinputals;
|
||||
|
||||
$jobsetinput = $jobset->jobsetinputs->create({name => $name, type => $type});
|
||||
$jobsetinputals = $jobsetinput->jobsetinputalts->create({altnr => 0, value => $uri});
|
||||
|
||||
return $jobset;
|
||||
}
|
||||
|
||||
sub evalSucceeds {
|
||||
my ($jobset) = @_;
|
||||
my ($res, $stdout, $stderr) = captureStdoutStderr(60, ("hydra-eval-jobset", $jobset->project->name, $jobset->name));
|
||||
chomp $stdout; chomp $stderr;
|
||||
print STDERR "Evaluation errors for jobset ".$jobset->project->name.":".$jobset->name.": \n".$jobset->errormsg."\n" if $jobset->errormsg;
|
||||
print STDERR "STDOUT: $stdout\n" if $stdout ne "";
|
||||
print STDERR "STDERR: $stderr\n" if $stderr ne "";
|
||||
return !$res;
|
||||
}
|
||||
|
||||
sub runBuild {
|
||||
my ($build) = @_;
|
||||
my ($res, $stdout, $stderr) = captureStdoutStderr(60, ("hydra-queue-runner", "-vvvv", "--build-one", $build->id));
|
||||
if ($res) {
|
||||
print STDERR "Queue runner stdout: $stdout\n" if $stdout ne "";
|
||||
print STDERR "Queue runner stderr: $stderr\n" if $stderr ne "";
|
||||
}
|
||||
return !$res;
|
||||
}
|
||||
|
||||
sub sendNotifications() {
|
||||
my ($res, $stdout, $stderr) = captureStdoutStderr(60, ("hydra-notify", "--queued-only"));
|
||||
if ($res) {
|
||||
print STDERR "hydra notify stdout: $stdout\n" if $stdout ne "";
|
||||
print STDERR "hydra notify stderr: $stderr\n" if $stderr ne "";
|
||||
}
|
||||
return !$res;
|
||||
}
|
||||
|
||||
sub updateRepository {
|
||||
my ($scm, $update, $scratchdir) = @_;
|
||||
my $curdir = getcwd;
|
||||
chdir "$scratchdir";
|
||||
my ($res, $stdout, $stderr) = captureStdoutStderr(60, ($update, $scm));
|
||||
chdir "$curdir";
|
||||
die "unexpected update error with $scm: $stderr\n" if $res;
|
||||
my ($message, $loop, $status) = $stdout =~ m/::(.*) -- (.*) -- (.*)::/;
|
||||
print STDOUT "Update $scm repository: $message\n";
|
||||
return ($loop eq "continue", $status eq "updated");
|
||||
}
|
||||
|
||||
1;
|
75
t/lib/TestScmInput.pm
Normal file
75
t/lib/TestScmInput.pm
Normal file
@ -0,0 +1,75 @@
|
||||
package TestScmInput;
|
||||
use warnings;
|
||||
use strict;
|
||||
|
||||
use Exporter;
|
||||
use Test2::V0;
|
||||
|
||||
use Setup;
|
||||
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(testScmInput);
|
||||
|
||||
# Generic test for the various SCM types Hydra supports.
|
||||
#
|
||||
# Takes input in the form of:
|
||||
#
|
||||
# (
|
||||
# type => "input type",
|
||||
# name => "jobset name", # defaults to the input's type
|
||||
# uri => "uri",
|
||||
# update => "script for updating the input",
|
||||
# datadir => "data dir", # returned from `test_init()` subroutine
|
||||
# testdir => "the hydra tests directory", # usually just `getcwd`
|
||||
# )
|
||||
#
|
||||
# and runs a test that constructs a jobset from the specified input.
|
||||
sub testScmInput {
|
||||
# Collect named args, dying if a required arg is missing
|
||||
my %args = @_;
|
||||
my $type = $args{type} // die "required arg 'type' missing";
|
||||
my $expr = $args{expr} // die "required arg 'expr' missing";
|
||||
|
||||
# $name is optional and defaults to $type
|
||||
my $name = $args{name} // $type;
|
||||
|
||||
# Get directories
|
||||
my $testdir = $args{testdir} // die "required arg 'testdir' missing";
|
||||
my $datadir = $args{datadir} // die "required arg 'datadir' missing";
|
||||
my $jobsdir = $args{jobsdir} // die "required arg 'jobsdir' missing";
|
||||
|
||||
my $update = $args{update} // die "required arg 'update' missing";
|
||||
$update = "$testdir/$update";
|
||||
|
||||
# Create scratch locations
|
||||
my $scratchdir = "$datadir/scratch";
|
||||
mkdir $scratchdir or die "mkdir($scratchdir): $!\n";
|
||||
|
||||
# $uri and $update are constructed from the directories
|
||||
my $uri = $args{uri} // die "required arg 'uri' missing";
|
||||
$uri = "file://$scratchdir/$uri";
|
||||
|
||||
subtest "With the SCM input named $name" => sub {
|
||||
my $jobset = createJobsetWithOneInput($name, $expr, 'src', $type, $uri, $jobsdir);
|
||||
|
||||
my ($mutations, $queueSize) = (0, 0);
|
||||
|
||||
my ($loop, $updated) = updateRepository($name, $update, $scratchdir);
|
||||
while ($loop) {
|
||||
subtest "Mutation number $mutations" => sub {
|
||||
ok(evalSucceeds($jobset), "Evaluating nix-expression.");
|
||||
|
||||
if ($updated) {
|
||||
$queueSize++;
|
||||
is(nrQueuedBuildsForJobset($jobset), $queueSize, "Expect $queueSize jobs in the queue.");
|
||||
ok(evalSucceeds($jobset), "Evaluating nix-expression again.");
|
||||
}
|
||||
|
||||
is(nrQueuedBuildsForJobset($jobset), $queueSize, "Expect deterministic evaluation.");
|
||||
|
||||
$mutations++;
|
||||
($loop, $updated) = updateRepository($name, $update, $scratchdir);
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
140
t/plugins/runcommand.t
Normal file
140
t/plugins/runcommand.t
Normal file
@ -0,0 +1,140 @@
|
||||
use feature 'unicode_strings';
|
||||
use strict;
|
||||
use warnings;
|
||||
use JSON;
|
||||
use Setup;
|
||||
|
||||
my %ctx = test_init(
|
||||
hydra_config => q|
|
||||
<runcommand>
|
||||
command = cp "$HYDRA_JSON" "$HYDRA_DATA/joboutput.json"
|
||||
</runcommand>
|
||||
|);
|
||||
|
||||
require Hydra::Schema;
|
||||
require Hydra::Model::DB;
|
||||
|
||||
use Test2::V0;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
hydra_setup($db);
|
||||
|
||||
my $project = $db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
||||
|
||||
# Most basic test case, no parameters
|
||||
my $jobset = createBaseJobset("basic", "runcommand.nix", $ctx{jobsdir});
|
||||
|
||||
ok(evalSucceeds($jobset), "Evaluating jobs/runcommand.nix should exit with return code 0");
|
||||
is(nrQueuedBuildsForJobset($jobset), 1, "Evaluating jobs/runcommand.nix should result in 1 build1");
|
||||
|
||||
(my $build) = queuedBuildsForJobset($jobset);
|
||||
|
||||
is($build->job, "metrics", "The only job should be metrics");
|
||||
ok(runBuild($build), "Build should exit with code 0");
|
||||
my $newbuild = $db->resultset('Builds')->find($build->id);
|
||||
is($newbuild->finished, 1, "Build should be finished.");
|
||||
is($newbuild->buildstatus, 0, "Build should have buildstatus 0.");
|
||||
|
||||
ok(sendNotifications(), "Notifications execute successfully.");
|
||||
|
||||
my $dat = do {
|
||||
my $filename = $ENV{'HYDRA_DATA'} . "/joboutput.json";
|
||||
open(my $json_fh, "<", $filename)
|
||||
or die("Can't open \$filename\": $!\n");
|
||||
local $/;
|
||||
my $json = JSON->new;
|
||||
$json->decode(<$json_fh>)
|
||||
};
|
||||
|
||||
use Data::Dumper;
|
||||
print Dumper($dat);
|
||||
|
||||
subtest "Validate the top level fields match" => sub {
|
||||
is($dat->{build}, $newbuild->id, "The build event matches our expected ID.");
|
||||
is($dat->{buildStatus}, 0, "The build status matches.");
|
||||
is($dat->{event}, "buildFinished", "The build event matches.");
|
||||
is($dat->{finished}, 1, "The build finished.");
|
||||
is($dat->{project}, "tests", "The project matches.");
|
||||
is($dat->{jobset}, "basic", "The jobset matches.");
|
||||
is($dat->{job}, "metrics", "The job matches.");
|
||||
is($dat->{nixName}, "my-build-product", "The nixName matches.");
|
||||
is($dat->{system}, $newbuild->system, "The system matches.");
|
||||
is($dat->{drvPath}, $newbuild->drvpath, "The derivation path matches.");
|
||||
is($dat->{timestamp}, $newbuild->timestamp, "The result has a timestamp field.");
|
||||
is($dat->{startTime}, $newbuild->starttime, "The result has a startTime field.");
|
||||
is($dat->{stopTime}, $newbuild->stoptime, "The result has a stopTime field.");
|
||||
is($dat->{homepage}, "https://github.com/NixOS/hydra", "The homepage is passed.");
|
||||
is($dat->{description}, "An example meta property.", "The description is passed.");
|
||||
is($dat->{license}, "GPL", "The license is passed.");
|
||||
};
|
||||
|
||||
subtest "Validate the outputs match" => sub {
|
||||
is(scalar(@{$dat->{outputs}}), 2, "There are exactly two outputs");
|
||||
|
||||
subtest "output: out" => sub {
|
||||
my ($output) = grep { $_->{name} eq "out" } @{$dat->{outputs}};
|
||||
my $expectedoutput = $newbuild->buildoutputs->find({name => "out"});
|
||||
|
||||
is($output->{name}, "out", "Output is named corrrectly");
|
||||
is($output->{path}, $expectedoutput->path, "The output path matches the database's path.");
|
||||
};
|
||||
|
||||
subtest "output: bin" => sub {
|
||||
my ($output) = grep { $_->{name} eq "bin" } @{$dat->{outputs}};
|
||||
my $expectedoutput = $newbuild->buildoutputs->find({name => "bin"});
|
||||
|
||||
is($output->{name}, "bin", "Output is named corrrectly");
|
||||
is($output->{path}, $expectedoutput->path, "The output path matches the database's path.");
|
||||
};
|
||||
};
|
||||
|
||||
subtest "Validate the metrics match" => sub {
|
||||
is(scalar(@{$dat->{metrics}}), 2, "There are exactly two metrics");
|
||||
|
||||
my ($lineCoverage) = grep { $_->{name} eq "lineCoverage" } @{$dat->{metrics}};
|
||||
my ($maxResident) = grep { $_->{name} eq "maxResident" } @{$dat->{metrics}};
|
||||
|
||||
subtest "verifying the lineCoverage metric" => sub {
|
||||
is($lineCoverage->{name}, "lineCoverage", "The name matches.");
|
||||
is($lineCoverage->{value}, 18, "The value matches.");
|
||||
is($lineCoverage->{unit}, "%", "The unit matches.");
|
||||
};
|
||||
|
||||
subtest "verifying the maxResident metric" => sub {
|
||||
is($maxResident->{name}, "maxResident", "The name matches.");
|
||||
is($maxResident->{value}, 27, "The value matches.");
|
||||
is($maxResident->{unit}, "KiB", "The unit matches.");
|
||||
};
|
||||
};
|
||||
|
||||
subtest "Validate the products match" => sub {
|
||||
is(scalar(@{$dat->{outputs}}), 2, "There are exactly two outputs");
|
||||
|
||||
subtest "product: out" => sub {
|
||||
my ($product) = grep { $_->{name} eq "my-build-product" } @{$dat->{products}};
|
||||
my $expectedproduct = $newbuild->buildproducts->find({name => "my-build-product"});
|
||||
|
||||
is($product->{name}, "my-build-product", "The build product is named correctly.");
|
||||
is($product->{subtype}, "", "The subtype is empty.");
|
||||
is($product->{productNr}, $expectedproduct->productnr, "The product number matches.");
|
||||
is($product->{defaultPath}, "", "The default path matches.");
|
||||
is($product->{path}, $expectedproduct->path, "The path matches the output.");
|
||||
is($product->{fileSize}, undef, "The fileSize is undefined for the nix-build output type.");
|
||||
is($product->{sha256hash}, undef, "The sha256hash is undefined for the nix-build output type.");
|
||||
};
|
||||
|
||||
subtest "output: bin" => sub {
|
||||
my ($product) = grep { $_->{name} eq "my-build-product-bin" } @{$dat->{products}};
|
||||
my $expectedproduct = $newbuild->buildproducts->find({name => "my-build-product-bin"});
|
||||
|
||||
is($product->{name}, "my-build-product-bin", "The build product is named correctly.");
|
||||
is($product->{subtype}, "bin", "The subtype matches the output name");
|
||||
is($product->{productNr}, $expectedproduct->productnr, "The product number matches.");
|
||||
is($product->{defaultPath}, "", "The default path matches.");
|
||||
is($product->{path}, $expectedproduct->path, "The path matches the output.");
|
||||
is($product->{fileSize}, undef, "The fileSize is undefined for the nix-build output type.");
|
||||
is($product->{sha256hash}, undef, "The sha256hash is undefined for the nix-build output type.");
|
||||
};
|
||||
};
|
||||
|
||||
done_testing;
|
4
t/s3-backup-test.config
Normal file
4
t/s3-backup-test.config
Normal file
@ -0,0 +1,4 @@
|
||||
<s3backup>
|
||||
jobs = tests:basic:job
|
||||
name = hydra
|
||||
</s3backup>
|
49
t/s3-backup-test.pl
Normal file
49
t/s3-backup-test.pl
Normal file
@ -0,0 +1,49 @@
|
||||
use strict;
|
||||
use File::Basename;
|
||||
use Hydra::Model::DB;
|
||||
use Hydra::Helper::Nix;
|
||||
use Nix::Store;
|
||||
use Cwd;
|
||||
|
||||
my $db = Hydra::Model::DB->new;
|
||||
|
||||
use Test::Simple tests => 6;
|
||||
|
||||
$db->resultset('Users')->create({ username => "root", emailaddress => 'root@invalid.org', password => '' });
|
||||
|
||||
$db->resultset('Projects')->create({name => "tests", displayname => "", owner => "root"});
|
||||
my $project = $db->resultset('Projects')->update_or_create({name => "tests", displayname => "", owner => "root"});
|
||||
my $jobset = $project->jobsets->create({name => "basic", nixexprinput => "jobs", nixexprpath => "default.nix", emailoverride => ""});
|
||||
|
||||
my $jobsetinput;
|
||||
|
||||
$jobsetinput = $jobset->jobsetinputs->create({name => "jobs", type => "path"});
|
||||
$jobsetinput->jobsetinputalts->create({altnr => 0, value => getcwd . "/jobs"});
|
||||
system("hydra-eval-jobset " . $jobset->project->name . " " . $jobset->name);
|
||||
|
||||
my $successful_hash;
|
||||
foreach my $build ($jobset->builds->search({finished => 0})) {
|
||||
system("hydra-build " . $build->id);
|
||||
my @outputs = $build->buildoutputs->all;
|
||||
my $hash = substr basename($outputs[0]->path), 0, 32;
|
||||
if ($build->job->name eq "job") {
|
||||
ok(-e "/tmp/s3/hydra/$hash.nar", "The nar of a successful matched build is uploaded");
|
||||
ok(-e "/tmp/s3/hydra/$hash.narinfo", "The narinfo of a successful matched build is uploaded");
|
||||
$successful_hash = $hash;
|
||||
}
|
||||
}
|
||||
|
||||
system("hydra-s3-backup-collect-garbage");
|
||||
ok(-e "/tmp/s3/hydra/$successful_hash.nar", "The nar of a build that's a root is not removed by gc");
|
||||
ok(-e "/tmp/s3/hydra/$successful_hash.narinfo", "The narinfo of a build that's a root is not removed by gc");
|
||||
|
||||
my $gcRootsDir = getGCRootsDir;
|
||||
opendir DIR, $gcRootsDir or die;
|
||||
while(readdir DIR) {
|
||||
next if $_ eq "." or $_ eq "..";
|
||||
unlink "$gcRootsDir/$_";
|
||||
}
|
||||
closedir DIR;
|
||||
system("hydra-s3-backup-collect-garbage");
|
||||
ok(not -e "/tmp/s3/hydra/$successful_hash.nar", "The nar of a build that's not a root is removed by gc");
|
||||
ok(not -e "/tmp/s3/hydra/$successful_hash.narinfo", "The narinfo of a build that's not a root is removed by gc");
|
56
t/setup-notifications-jobset.pl
Normal file
56
t/setup-notifications-jobset.pl
Normal file
@ -0,0 +1,56 @@
|
||||
use LWP::UserAgent;
|
||||
use JSON;
|
||||
|
||||
my $ua = LWP::UserAgent->new;
|
||||
$ua->cookie_jar({});
|
||||
|
||||
sub request_json {
|
||||
my ($opts) = @_;
|
||||
my $req = HTTP::Request->new;
|
||||
$req->method($opts->{method} or "GET");
|
||||
$req->uri("http://localhost:3000$opts->{uri}");
|
||||
$req->header(Accept => "application/json");
|
||||
$req->header(Referer => "http://localhost:3000/") if $opts->{method} eq "POST";
|
||||
$req->content(encode_json($opts->{data})) if defined $opts->{data};
|
||||
my $res = $ua->request($req);
|
||||
print $res->as_string();
|
||||
return $res;
|
||||
}
|
||||
|
||||
my $result = request_json({
|
||||
uri => "/login",
|
||||
method => "POST",
|
||||
data => {
|
||||
username => "root",
|
||||
password => "foobar"
|
||||
}
|
||||
});
|
||||
|
||||
$result = request_json({
|
||||
uri => '/project/sample',
|
||||
method => 'PUT',
|
||||
data => {
|
||||
displayname => "Sample",
|
||||
enabled => "1",
|
||||
visible => "1",
|
||||
}
|
||||
});
|
||||
|
||||
$result = request_json({
|
||||
uri => '/jobset/sample/default',
|
||||
method => 'PUT',
|
||||
data => {
|
||||
nixexprpath => "default.nix",
|
||||
nixexprinput => "my-src",
|
||||
inputs => {
|
||||
"my-src" => {
|
||||
type => "path",
|
||||
value => "/run/jobset"
|
||||
}
|
||||
},
|
||||
enabled => "1",
|
||||
visible => "1",
|
||||
checkinterval => "5",
|
||||
keepnr => 1
|
||||
}
|
||||
});
|
24
t/test.pl
Normal file
24
t/test.pl
Normal file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env perl
|
||||
# HARNESS-NO-PRELOAD
|
||||
# HARNESS-CAT-LONG
|
||||
# THIS IS A GENERATED YATH RUNNER TEST
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use lib 'lib';
|
||||
BEGIN {
|
||||
use File::Which qw(which);
|
||||
$App::Yath::Script::SCRIPT = which 'yath';
|
||||
}
|
||||
use App::Yath::Util qw/find_yath/;
|
||||
|
||||
system($^X, find_yath(), '-D', 'test', '--default-search' => './', @ARGV);
|
||||
my $exit = $?;
|
||||
|
||||
# This makes sure it works with prove.
|
||||
print "1..1\n";
|
||||
print "not " if $exit;
|
||||
print "ok 1 - Passed tests when run by yath\n";
|
||||
print STDERR "yath exited with $exit" if $exit;
|
||||
|
||||
exit($exit ? 255 : 0);
|
Reference in New Issue
Block a user