Merge branch 'build-ng'
This commit is contained in:
@ -45,7 +45,7 @@ sub clear_queue_non_current : Chained('admin') PathPart('clear-queue-non-current
|
||||
|
||||
sub clearfailedcache : Chained('admin') PathPart('clear-failed-cache') Args(0) {
|
||||
my ($self, $c) = @_;
|
||||
my $r = `nix-store --clear-failed-paths '*'`;
|
||||
$c->model('DB::FailedPaths')->delete;
|
||||
$c->res->redirect($c->request->referer // "/");
|
||||
}
|
||||
|
||||
|
@ -67,10 +67,6 @@ sub build_GET {
|
||||
$c->stash->{available} = all { isValidPath($_->path) } $build->buildoutputs->all;
|
||||
$c->stash->{drvAvailable} = isValidPath $build->drvpath;
|
||||
|
||||
if (!$build->finished && $build->busy) {
|
||||
$c->stash->{logtext} = decode("utf-8", read_file($build->logfile, err_mode => 'quiet') // "");
|
||||
}
|
||||
|
||||
if ($build->finished && $build->iscachedbuild) {
|
||||
my $path = ($build->buildoutputs)[0]->path or die;
|
||||
my $cachedBuildStep = findBuildStepByOutPath($self, $c, $path);
|
||||
@ -123,26 +119,32 @@ sub view_nixlog : Chained('buildChain') PathPart('nixlog') {
|
||||
|
||||
$c->stash->{step} = $step;
|
||||
|
||||
showLog($c, $mode, $step->drvpath, map { $_->path } $step->buildstepoutputs->all);
|
||||
showLog($c, $mode, $step->busy == 0, $step->drvpath,
|
||||
map { $_->path } $step->buildstepoutputs->all);
|
||||
}
|
||||
|
||||
|
||||
sub view_log : Chained('buildChain') PathPart('log') {
|
||||
my ($self, $c, $mode) = @_;
|
||||
showLog($c, $mode, $c->stash->{build}->drvpath, map { $_->path } $c->stash->{build}->buildoutputs->all);
|
||||
showLog($c, $mode, $c->stash->{build}->finished,
|
||||
$c->stash->{build}->drvpath,
|
||||
map { $_->path } $c->stash->{build}->buildoutputs->all);
|
||||
}
|
||||
|
||||
|
||||
sub showLog {
|
||||
my ($c, $mode, $drvPath, @outPaths) = @_;
|
||||
my ($c, $mode, $finished, $drvPath, @outPaths) = @_;
|
||||
|
||||
my $logPath = findLog($c, $drvPath, @outPaths);
|
||||
|
||||
notFound($c, "The build log of derivation ‘$drvPath’ is not available.") unless defined $logPath;
|
||||
|
||||
my $size = stat($logPath)->size;
|
||||
error($c, "This build log is too big to display ($size bytes).")
|
||||
if $size >= 64 * 1024 * 1024;
|
||||
# Don't send logs that we can't stream.
|
||||
my $size = stat($logPath)->size; # FIXME: not so meaningful for compressed logs
|
||||
error($c, "This build log is too big to display ($size bytes).") unless
|
||||
$mode eq "raw"
|
||||
|| (($mode eq "tail" || $mode eq "tail-reload") && $logPath !~ /\.bz2$/)
|
||||
|| $size < 64 * 1024 * 1024;
|
||||
|
||||
if (!$mode) {
|
||||
# !!! quick hack
|
||||
@ -154,12 +156,10 @@ sub showLog {
|
||||
}
|
||||
|
||||
elsif ($mode eq "raw") {
|
||||
if ($logPath !~ /.bz2$/) {
|
||||
$c->serve_static_file($logPath);
|
||||
} else {
|
||||
$c->stash->{'plain'} = { data => (scalar logContents($logPath)) || " " };
|
||||
$c->forward('Hydra::View::Plain');
|
||||
}
|
||||
$c->stash->{logPath} = $logPath;
|
||||
$c->stash->{finished} = $finished;
|
||||
$c->forward('Hydra::View::NixLog');
|
||||
return;
|
||||
}
|
||||
|
||||
elsif ($mode eq "tail-reload") {
|
||||
@ -201,12 +201,18 @@ sub checkPath {
|
||||
|
||||
|
||||
sub download : Chained('buildChain') PathPart {
|
||||
my ($self, $c, $productnr, @path) = @_;
|
||||
my ($self, $c, $productRef, @path) = @_;
|
||||
|
||||
$productnr = 1 if !defined $productnr;
|
||||
$productRef = 1 if !defined $productRef;
|
||||
|
||||
my $product = $c->stash->{build}->buildproducts->find({productnr => $productnr});
|
||||
notFound($c, "Build doesn't have a product #$productnr.") if !defined $product;
|
||||
my $product;
|
||||
if ($productRef =~ /^[0-9]+$/) {
|
||||
$product = $c->stash->{build}->buildproducts->find({productnr => $productRef});
|
||||
} else {
|
||||
$product = $c->stash->{build}->buildproducts->find({name => $productRef});
|
||||
@path = ($productRef, @path);
|
||||
}
|
||||
notFound($c, "Build doesn't have a product $productRef.") if !defined $product;
|
||||
|
||||
notFound($c, "Build product " . $product->path . " has disappeared.") unless -e $product->path;
|
||||
|
||||
@ -473,6 +479,23 @@ sub keep : Chained('buildChain') PathPart Args(1) {
|
||||
}
|
||||
|
||||
|
||||
sub bump : Chained('buildChain') PathPart('bump') {
|
||||
my ($self, $c, $x) = @_;
|
||||
|
||||
my $build = $c->stash->{build};
|
||||
|
||||
requireProjectOwner($c, $build->project); # FIXME: require admin?
|
||||
|
||||
$c->model('DB')->schema->txn_do(sub {
|
||||
$build->update({globalpriority => time()});
|
||||
});
|
||||
|
||||
$c->flash->{successMsg} = "Build has been bumped to the front of the queue.";
|
||||
|
||||
$c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures));
|
||||
}
|
||||
|
||||
|
||||
sub add_to_release : Chained('buildChain') PathPart('add-to-release') Args(0) {
|
||||
my ($self, $c) = @_;
|
||||
|
||||
|
@ -77,6 +77,9 @@ sub overview : Chained('job') PathPart('') Args(0) {
|
||||
, jobset => $c->stash->{jobset}->name
|
||||
, job => $c->stash->{job}->name
|
||||
})->count == 1 if $c->user_exists;
|
||||
|
||||
$c->stash->{metrics} = [ $job->buildmetrics->search(
|
||||
{ }, { select => ["name"], distinct => 1, order_by => "timestamp desc", }) ];
|
||||
}
|
||||
|
||||
|
||||
@ -110,6 +113,20 @@ sub output_sizes : Chained('job') PathPart('output-sizes') Args(0) {
|
||||
}
|
||||
|
||||
|
||||
sub metric : Chained('job') PathPart('metric') Args(1) {
|
||||
my ($self, $c, $metricName) = @_;
|
||||
|
||||
$c->stash->{template} = 'metric.tt';
|
||||
$c->stash->{metricName} = $metricName;
|
||||
|
||||
my @res = $c->stash->{job}->buildmetrics->search(
|
||||
{ name => $metricName },
|
||||
{ order_by => "timestamp", columns => [ "build", "name", "timestamp", "value", "unit" ] });
|
||||
|
||||
$self->status_ok($c, entity => [ map { { id => $_->get_column("build"), timestamp => $_ ->timestamp, value => $_->value, unit => $_->unit } } @res ]);
|
||||
}
|
||||
|
||||
|
||||
# Hydra::Base::Controller::ListBuilds needs this.
|
||||
sub get_builds : Chained('job') PathPart('') CaptureArgs(0) {
|
||||
my ($self, $c) = @_;
|
||||
|
@ -162,7 +162,7 @@ sub edit : Chained('jobsetChain') PathPart Args(0) {
|
||||
requireProjectOwner($c, $c->stash->{project});
|
||||
|
||||
$c->stash->{template} = 'edit-jobset.tt';
|
||||
$c->stash->{edit} = 1;
|
||||
$c->stash->{edit} = !defined $c->stash->{params}->{cloneJobset};
|
||||
$c->stash->{cloneJobset} = defined $c->stash->{params}->{cloneJobset};
|
||||
$c->stash->{totalShares} = getTotalShares($c->model('DB')->schema);
|
||||
}
|
||||
@ -220,6 +220,9 @@ sub updateJobset {
|
||||
my $enabled = int($c->stash->{params}->{enabled});
|
||||
die if $enabled < 0 || $enabled > 2;
|
||||
|
||||
my $shares = int($c->stash->{params}->{schedulingshares} // 1);
|
||||
error($c, "The number of scheduling shares must be positive.") if $shares <= 0;
|
||||
|
||||
$jobset->update(
|
||||
{ name => $jobsetName
|
||||
, description => trim($c->stash->{params}->{"description"})
|
||||
@ -232,7 +235,7 @@ sub updateJobset {
|
||||
, keepnr => int(trim($c->stash->{params}->{keepnr}))
|
||||
, checkinterval => int(trim($c->stash->{params}->{checkinterval}))
|
||||
, triggertime => $enabled ? $jobset->triggertime // time() : undef
|
||||
, schedulingshares => int($c->stash->{params}->{schedulingshares})
|
||||
, schedulingshares => $shares
|
||||
});
|
||||
|
||||
$jobset->project->jobsetrenames->search({ from_ => $jobsetName })->delete;
|
||||
|
@ -180,13 +180,26 @@ sub cancel : Chained('eval') PathPart('cancel') Args(0) {
|
||||
sub restart_aborted : Chained('eval') PathPart('restart-aborted') Args(0) {
|
||||
my ($self, $c) = @_;
|
||||
requireProjectOwner($c, $c->stash->{eval}->project);
|
||||
my $builds = $c->stash->{eval}->builds->search({ finished => 1, buildstatus => { -in => [3, 4] } });
|
||||
my $builds = $c->stash->{eval}->builds->search({ finished => 1, buildstatus => { -in => [3, 4, 9] } });
|
||||
my $n = restartBuilds($c->model('DB')->schema, $builds);
|
||||
$c->flash->{successMsg} = "$n builds have been restarted.";
|
||||
$c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for('view'), $c->req->captures));
|
||||
}
|
||||
|
||||
|
||||
sub bump : Chained('eval') PathPart('bump') Args(0) {
|
||||
my ($self, $c) = @_;
|
||||
requireProjectOwner($c, $c->stash->{eval}->project); # FIXME: require admin?
|
||||
my $builds = $c->stash->{eval}->builds->search({ finished => 0 });
|
||||
my $n = $builds->count();
|
||||
$c->model('DB')->schema->txn_do(sub {
|
||||
$builds->update({globalpriority => time()});
|
||||
});
|
||||
$c->flash->{successMsg} = "$n builds have been bumped to the front of the queue.";
|
||||
$c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for('view'), $c->req->captures));
|
||||
}
|
||||
|
||||
|
||||
# Hydra::Base::Controller::NixChannel needs this.
|
||||
sub nix : Chained('eval') PathPart('channel') CaptureArgs(0) {
|
||||
my ($self, $c) = @_;
|
||||
|
@ -30,7 +30,7 @@ sub begin :Private {
|
||||
$c->stash->{version} = $ENV{"HYDRA_RELEASE"} || "<devel>";
|
||||
$c->stash->{nixVersion} = $ENV{"NIX_RELEASE"} || "<devel>";
|
||||
$c->stash->{curTime} = time;
|
||||
$c->stash->{logo} = ($c->config->{hydra_logo} // $ENV{"HYDRA_LOGO"}) ? "/logo" : "";
|
||||
$c->stash->{logo} = defined $c->config->{hydra_logo} ? "/logo" : "";
|
||||
$c->stash->{tracker} = $ENV{"HYDRA_TRACKER"};
|
||||
$c->stash->{flashMsg} = $c->flash->{flashMsg};
|
||||
$c->stash->{successMsg} = $c->flash->{successMsg};
|
||||
@ -88,7 +88,7 @@ sub queue_GET {
|
||||
$c->stash->{flashMsg} //= $c->flash->{buildMsg};
|
||||
$self->status_ok(
|
||||
$c,
|
||||
entity => [$c->model('DB::Builds')->search({finished => 0}, { order_by => ["id"]})]
|
||||
entity => [$c->model('DB::Builds')->search({finished => 0}, { order_by => ["globalpriority desc", "id"]})]
|
||||
);
|
||||
}
|
||||
|
||||
@ -111,14 +111,6 @@ sub machines :Local Args(0) {
|
||||
# Add entry for localhost.
|
||||
${$machines}{''} //= {};
|
||||
|
||||
# Get the last finished build step for each machine.
|
||||
foreach my $m (keys %{$machines}) {
|
||||
my $idle = $c->model('DB::BuildSteps')->find(
|
||||
{ machine => "$m", stoptime => { '!=', undef } },
|
||||
{ order_by => 'stoptime desc', rows => 1 });
|
||||
${$machines}{$m}{'idle'} = $idle ? $idle->stoptime : 0;
|
||||
}
|
||||
|
||||
$c->stash->{machines} = $machines;
|
||||
$c->stash->{steps} = [ $c->model('DB::BuildSteps')->search(
|
||||
{ finished => 0, 'me.busy' => 1, 'build.busy' => 1, },
|
||||
@ -270,7 +262,7 @@ sub narinfo :LocalRegex('^([a-z0-9]+).narinfo$') :Args(0) {
|
||||
|
||||
sub logo :Local {
|
||||
my ($self, $c) = @_;
|
||||
my $path = $c->config->{hydra_logo} // $ENV{"HYDRA_LOGO"} // die("Logo not set!");
|
||||
my $path = $c->config->{hydra_logo} // die("Logo not set!");
|
||||
$c->serve_static_file($path);
|
||||
}
|
||||
|
||||
@ -293,6 +285,30 @@ sub evals :Local Args(0) {
|
||||
}
|
||||
|
||||
|
||||
sub steps :Local Args(0) {
|
||||
my ($self, $c) = @_;
|
||||
|
||||
$c->stash->{template} = 'steps.tt';
|
||||
|
||||
my $page = int($c->req->param('page') || "1") || 1;
|
||||
|
||||
my $resultsPerPage = 20;
|
||||
|
||||
$c->stash->{page} = $page;
|
||||
$c->stash->{resultsPerPage} = $resultsPerPage;
|
||||
$c->stash->{steps} = [ $c->model('DB::BuildSteps')->search(
|
||||
{ starttime => { '!=', undef },
|
||||
stoptime => { '!=', undef }
|
||||
},
|
||||
{ order_by => [ "stoptime desc" ],
|
||||
rows => $resultsPerPage,
|
||||
offset => ($page - 1) * $resultsPerPage
|
||||
}) ];
|
||||
|
||||
$c->stash->{total} = approxTableSize($c, "IndexBuildStepsOnStopTime");
|
||||
}
|
||||
|
||||
|
||||
sub search :Local Args(0) {
|
||||
my ($self, $c) = @_;
|
||||
$c->stash->{template} = 'search.tt';
|
||||
@ -340,9 +356,9 @@ sub search :Local Args(0) {
|
||||
$c->stash->{buildsdrv} = [ $c->model('DB::Builds')->search(
|
||||
{ "drvpath" => trim($query) },
|
||||
{ order_by => ["id desc"] } ) ];
|
||||
|
||||
}
|
||||
|
||||
|
||||
sub log :Local :Args(1) {
|
||||
my ($self, $c, $path) = @_;
|
||||
|
||||
@ -352,8 +368,8 @@ sub log :Local :Args(1) {
|
||||
my $logPath = findLog($c, $path, @outpaths);
|
||||
notFound($c, "The build log of $path is not available.") unless defined $logPath;
|
||||
|
||||
$c->stash->{'plain'} = { data => (scalar logContents($logPath)) || " " };
|
||||
$c->forward('Hydra::View::Plain');
|
||||
$c->stash->{logPath} = $logPath;
|
||||
$c->forward('Hydra::View::NixLog');
|
||||
}
|
||||
|
||||
|
||||
|
@ -22,20 +22,10 @@ use Hydra::Helper::CatalystUtils;
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(
|
||||
fetchInput evalJobs checkBuild inputsToArgs
|
||||
getReleaseName addBuildProducts restartBuild
|
||||
getPrevJobsetEval
|
||||
restartBuild getPrevJobsetEval
|
||||
);
|
||||
|
||||
|
||||
sub getReleaseName {
|
||||
my ($outPath) = @_;
|
||||
return undef unless -f "$outPath/nix-support/hydra-release-name";
|
||||
my $releaseName = read_file("$outPath/nix-support/hydra-release-name");
|
||||
chomp $releaseName;
|
||||
return $releaseName;
|
||||
}
|
||||
|
||||
|
||||
sub parseJobName {
|
||||
# Parse a job specification of the form `<project>:<jobset>:<job>
|
||||
# [attrs]'. The project, jobset and attrs may be omitted. The
|
||||
@ -299,7 +289,7 @@ sub inputsToArgs {
|
||||
my ($inputInfo, $exprType) = @_;
|
||||
my @res = ();
|
||||
|
||||
foreach my $input (keys %{$inputInfo}) {
|
||||
foreach my $input (sort keys %{$inputInfo}) {
|
||||
push @res, "-I", "$input=$inputInfo->{$input}->[0]->{storePath}"
|
||||
if scalar @{$inputInfo->{$input}} == 1
|
||||
&& defined $inputInfo->{$input}->[0]->{storePath};
|
||||
@ -367,80 +357,6 @@ sub evalJobs {
|
||||
}
|
||||
|
||||
|
||||
sub addBuildProducts {
|
||||
my ($db, $build) = @_;
|
||||
|
||||
my $productnr = 1;
|
||||
my $explicitProducts = 0;
|
||||
my $storeDir = $Nix::Config::storeDir . "/";
|
||||
|
||||
foreach my $output ($build->buildoutputs->all) {
|
||||
my $outPath = $output->path;
|
||||
if (-e "$outPath/nix-support/hydra-build-products") {
|
||||
$explicitProducts = 1;
|
||||
|
||||
open LIST, "$outPath/nix-support/hydra-build-products" or die;
|
||||
while (<LIST>) {
|
||||
/^([\w\-]+)\s+([\w\-]+)\s+("[^"]*"|\S+)(\s+(\S+))?$/ or next;
|
||||
my $type = $1;
|
||||
my $subtype = $2 eq "none" ? "" : $2;
|
||||
my $path = substr($3, 0, 1) eq "\"" ? substr($3, 1, -1) : $3;
|
||||
my $defaultPath = $5;
|
||||
|
||||
# Ensure that the path exists and points into the Nix store.
|
||||
next unless File::Spec->file_name_is_absolute($path);
|
||||
$path = pathIsInsidePrefix($path, $Nix::Config::storeDir);
|
||||
next unless defined $path;
|
||||
next unless -e $path;
|
||||
|
||||
# FIXME: check that the path is in the input closure
|
||||
# of the build?
|
||||
|
||||
my $fileSize, my $sha1, my $sha256;
|
||||
|
||||
if (-f $path) {
|
||||
my $st = stat($path) or die "cannot stat $path: $!";
|
||||
$fileSize = $st->size;
|
||||
$sha1 = hashFile("sha1", 0, $path);
|
||||
$sha256 = hashFile("sha256", 0, $path);
|
||||
}
|
||||
|
||||
my $name = $path eq $outPath ? "" : basename $path;
|
||||
|
||||
$db->resultset('BuildProducts')->create(
|
||||
{ build => $build->id
|
||||
, productnr => $productnr++
|
||||
, type => $type
|
||||
, subtype => $subtype
|
||||
, path => $path
|
||||
, filesize => $fileSize
|
||||
, sha1hash => $sha1
|
||||
, sha256hash => $sha256
|
||||
, name => $name
|
||||
, defaultpath => $defaultPath
|
||||
});
|
||||
}
|
||||
close LIST;
|
||||
}
|
||||
}
|
||||
|
||||
return if $explicitProducts;
|
||||
|
||||
foreach my $output ($build->buildoutputs->all) {
|
||||
my $outPath = $output->path;
|
||||
next unless -d $outPath;
|
||||
$db->resultset('BuildProducts')->create(
|
||||
{ build => $build->id
|
||||
, productnr => $productnr++
|
||||
, type => "nix-build"
|
||||
, subtype => $output->name eq "out" ? "" : $output->name
|
||||
, path => $outPath
|
||||
, name => $build->nixname
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Return the most recent evaluation of the given jobset (that
|
||||
# optionally had new builds), or undefined if no such evaluation
|
||||
# exists.
|
||||
@ -513,40 +429,6 @@ sub checkBuild {
|
||||
|
||||
my $time = time();
|
||||
|
||||
# Are the outputs already in the Nix store? Then add a cached
|
||||
# build.
|
||||
my %extraFlags;
|
||||
my $allValid = 1;
|
||||
my $buildStatus;
|
||||
my $releaseName;
|
||||
foreach my $name (@outputNames) {
|
||||
my $path = $buildInfo->{outputs}->{$name};
|
||||
if (isValidPath($path)) {
|
||||
if (-f "$path/nix-support/failed") {
|
||||
$buildStatus = 6;
|
||||
} else {
|
||||
$buildStatus //= 0;
|
||||
}
|
||||
$releaseName //= getReleaseName($path);
|
||||
} else {
|
||||
$allValid = 0;
|
||||
last;
|
||||
}
|
||||
}
|
||||
|
||||
if ($allValid) {
|
||||
%extraFlags =
|
||||
( finished => 1
|
||||
, iscachedbuild => 1
|
||||
, buildstatus => $buildStatus
|
||||
, starttime => $time
|
||||
, stoptime => $time
|
||||
, releasename => $releaseName
|
||||
);
|
||||
} else {
|
||||
%extraFlags = ( finished => 0 );
|
||||
}
|
||||
|
||||
# Add the build to the database.
|
||||
$build = $job->builds->create(
|
||||
{ timestamp => $time
|
||||
@ -562,10 +444,10 @@ sub checkBuild {
|
||||
, nixexprinput => $jobset->nixexprinput
|
||||
, nixexprpath => $jobset->nixexprpath
|
||||
, priority => $buildInfo->{schedulingPriority}
|
||||
, finished => 0
|
||||
, busy => 0
|
||||
, locker => ""
|
||||
, iscurrent => 1
|
||||
, %extraFlags
|
||||
});
|
||||
|
||||
$build->buildoutputs->create({ name => $_, path => $buildInfo->{outputs}->{$_} })
|
||||
@ -574,13 +456,7 @@ sub checkBuild {
|
||||
$buildMap->{$build->id} = { id => $build->id, jobName => $jobName, new => 1, drvPath => $drvPath };
|
||||
$$jobOutPathMap{$jobName . "\t" . $firstOutputPath} = $build->id;
|
||||
|
||||
if ($build->iscachedbuild) {
|
||||
#print STDERR " marked as cached build ", $build->id, "\n";
|
||||
addBuildProducts($db, $build);
|
||||
notifyBuildFinished($plugins, $build, []);
|
||||
} else {
|
||||
print STDERR "added build ${\$build->id} (${\$jobset->project->name}:${\$jobset->name}:$jobName)\n";
|
||||
}
|
||||
print STDERR "added build ${\$build->id} (${\$jobset->project->name}:${\$jobset->name}:$jobName)\n";
|
||||
});
|
||||
|
||||
return $build;
|
||||
|
@ -23,6 +23,7 @@ our @EXPORT = qw(
|
||||
showStatus
|
||||
getResponsibleAuthors
|
||||
setCacheHeaders
|
||||
approxTableSize
|
||||
);
|
||||
|
||||
|
||||
@ -296,4 +297,11 @@ sub setCacheHeaders {
|
||||
}
|
||||
|
||||
|
||||
sub approxTableSize {
|
||||
my ($c, $name) = @_;
|
||||
return $c->model('DB')->schema->storage->dbh->selectrow_hashref(
|
||||
"select reltuples::int from pg_class where relname = lower(?)", { }, $name)->{"reltuples"};
|
||||
}
|
||||
|
||||
|
||||
1;
|
||||
|
@ -13,7 +13,7 @@ sub sendEmail {
|
||||
my ($config, $to, $subject, $body, $extraHeaders) = @_;
|
||||
|
||||
my $url = getBaseUrl($config);
|
||||
my $sender = $config->{'notification_sender'} // (($ENV{'USER'} // "hydra") . "@" . $url);
|
||||
my $sender = $config->{'notification_sender'} // (($ENV{'USER'} // "hydra") . "@" . hostname_long);
|
||||
|
||||
my @headers = (
|
||||
To => $to,
|
||||
|
@ -133,8 +133,9 @@ sub getDrvLogPath {
|
||||
my $base = basename $drvPath;
|
||||
my $bucketed = substr($base, 0, 2) . "/" . substr($base, 2);
|
||||
my $fn = ($ENV{NIX_LOG_DIR} || "/nix/var/log/nix") . "/drvs/";
|
||||
for ($fn . $bucketed . ".bz2", $fn . $bucketed, $fn . $base . ".bz2", $fn . $base) {
|
||||
return $_ if (-f $_);
|
||||
my $fn2 = Hydra::Model::DB::getHydraPath . "/build-logs/";
|
||||
for ($fn2 . $bucketed, $fn2 . $bucketed . ".bz2", $fn . $bucketed . ".bz2", $fn . $bucketed, $fn . $base . ".bz2", $fn . $base) {
|
||||
return $_ if -f $_;
|
||||
}
|
||||
return undef;
|
||||
}
|
||||
@ -423,7 +424,7 @@ sub getTotalShares {
|
||||
sub cancelBuilds($$) {
|
||||
my ($db, $builds) = @_;
|
||||
return txn_do($db, sub {
|
||||
$builds = $builds->search({ finished => 0, busy => 0 });
|
||||
$builds = $builds->search({ finished => 0 });
|
||||
my $n = $builds->count;
|
||||
my $time = time();
|
||||
$builds->update(
|
||||
@ -448,7 +449,7 @@ sub restartBuilds($$) {
|
||||
|
||||
foreach my $build ($builds->all) {
|
||||
next if !isValidPath($build->drvpath);
|
||||
push @paths, $build->drvpath;
|
||||
push @paths, $_->path foreach $build->buildoutputs->all;
|
||||
push @buildIds, $build->id;
|
||||
registerRoot $build->drvpath;
|
||||
}
|
||||
@ -464,9 +465,10 @@ sub restartBuilds($$) {
|
||||
# !!! Should do this in a trigger.
|
||||
$db->resultset('JobsetEvals')->search({ build => \@buildIds }, { join => 'buildIds' })->update({ nrsucceeded => undef });
|
||||
|
||||
# Clear Nix's negative failure cache.
|
||||
# Clear the failed paths cache.
|
||||
# FIXME: Add this to the API.
|
||||
system("nix-store", "--clear-failed-paths", @paths);
|
||||
# FIXME: clear the dependencies?
|
||||
$db->resultset('FailedPaths')->search({ path => [ @paths ]})->delete;
|
||||
});
|
||||
|
||||
return scalar(@buildIds);
|
||||
|
@ -163,7 +163,7 @@ sub getCommits {
|
||||
|
||||
my ($uri, $branch, $deepClone) = _parseValue($value);
|
||||
|
||||
my $clonePath = $self->_cloneRepo($uri, $branch, $deepClone);
|
||||
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
|
||||
|
||||
my $out = grab(cmd => ["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath);
|
||||
|
||||
|
187
src/lib/Hydra/Schema/BuildMetrics.pm
Normal file
187
src/lib/Hydra/Schema/BuildMetrics.pm
Normal file
@ -0,0 +1,187 @@
|
||||
use utf8;
|
||||
package Hydra::Schema::BuildMetrics;
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader
|
||||
# DO NOT MODIFY THE FIRST PART OF THIS FILE
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Hydra::Schema::BuildMetrics
|
||||
|
||||
=cut
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use base 'DBIx::Class::Core';
|
||||
|
||||
=head1 COMPONENTS LOADED
|
||||
|
||||
=over 4
|
||||
|
||||
=item * L<Hydra::Component::ToJSON>
|
||||
|
||||
=back
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||
|
||||
=head1 TABLE: C<BuildMetrics>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->table("BuildMetrics");
|
||||
|
||||
=head1 ACCESSORS
|
||||
|
||||
=head2 build
|
||||
|
||||
data_type: 'integer'
|
||||
is_foreign_key: 1
|
||||
is_nullable: 0
|
||||
|
||||
=head2 name
|
||||
|
||||
data_type: 'text'
|
||||
is_nullable: 0
|
||||
|
||||
=head2 unit
|
||||
|
||||
data_type: 'text'
|
||||
is_nullable: 1
|
||||
|
||||
=head2 value
|
||||
|
||||
data_type: 'double precision'
|
||||
is_nullable: 0
|
||||
|
||||
=head2 project
|
||||
|
||||
data_type: 'text'
|
||||
is_foreign_key: 1
|
||||
is_nullable: 0
|
||||
|
||||
=head2 jobset
|
||||
|
||||
data_type: 'text'
|
||||
is_foreign_key: 1
|
||||
is_nullable: 0
|
||||
|
||||
=head2 job
|
||||
|
||||
data_type: 'text'
|
||||
is_foreign_key: 1
|
||||
is_nullable: 0
|
||||
|
||||
=head2 timestamp
|
||||
|
||||
data_type: 'integer'
|
||||
is_nullable: 0
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->add_columns(
|
||||
"build",
|
||||
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
|
||||
"name",
|
||||
{ data_type => "text", is_nullable => 0 },
|
||||
"unit",
|
||||
{ data_type => "text", is_nullable => 1 },
|
||||
"value",
|
||||
{ data_type => "double precision", is_nullable => 0 },
|
||||
"project",
|
||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||
"jobset",
|
||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||
"job",
|
||||
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
|
||||
"timestamp",
|
||||
{ data_type => "integer", is_nullable => 0 },
|
||||
);
|
||||
|
||||
=head1 PRIMARY KEY
|
||||
|
||||
=over 4
|
||||
|
||||
=item * L</build>
|
||||
|
||||
=item * L</name>
|
||||
|
||||
=back
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->set_primary_key("build", "name");
|
||||
|
||||
=head1 RELATIONS
|
||||
|
||||
=head2 build
|
||||
|
||||
Type: belongs_to
|
||||
|
||||
Related object: L<Hydra::Schema::Builds>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->belongs_to(
|
||||
"build",
|
||||
"Hydra::Schema::Builds",
|
||||
{ id => "build" },
|
||||
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
|
||||
);
|
||||
|
||||
=head2 job
|
||||
|
||||
Type: belongs_to
|
||||
|
||||
Related object: L<Hydra::Schema::Jobs>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->belongs_to(
|
||||
"job",
|
||||
"Hydra::Schema::Jobs",
|
||||
{ jobset => "jobset", name => "job", project => "project" },
|
||||
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },
|
||||
);
|
||||
|
||||
=head2 jobset
|
||||
|
||||
Type: belongs_to
|
||||
|
||||
Related object: L<Hydra::Schema::Jobsets>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->belongs_to(
|
||||
"jobset",
|
||||
"Hydra::Schema::Jobsets",
|
||||
{ name => "jobset", project => "project" },
|
||||
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },
|
||||
);
|
||||
|
||||
=head2 project
|
||||
|
||||
Type: belongs_to
|
||||
|
||||
Related object: L<Hydra::Schema::Projects>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->belongs_to(
|
||||
"project",
|
||||
"Hydra::Schema::Projects",
|
||||
{ name => "project" },
|
||||
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "CASCADE" },
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:qoPm5/le+sVHigW4Dmum2Q
|
||||
|
||||
sub json_hint {
|
||||
return { columns => ['value', 'unit'] };
|
||||
}
|
||||
|
||||
1;
|
@ -138,6 +138,12 @@ __PACKAGE__->table("Builds");
|
||||
default_value: 0
|
||||
is_nullable: 0
|
||||
|
||||
=head2 globalpriority
|
||||
|
||||
data_type: 'integer'
|
||||
default_value: 0
|
||||
is_nullable: 0
|
||||
|
||||
=head2 busy
|
||||
|
||||
data_type: 'integer'
|
||||
@ -241,6 +247,8 @@ __PACKAGE__->add_columns(
|
||||
{ data_type => "text", is_nullable => 1 },
|
||||
"priority",
|
||||
{ data_type => "integer", default_value => 0, is_nullable => 0 },
|
||||
"globalpriority",
|
||||
{ data_type => "integer", default_value => 0, is_nullable => 0 },
|
||||
"busy",
|
||||
{ data_type => "integer", default_value => 0, is_nullable => 0 },
|
||||
"locker",
|
||||
@ -341,6 +349,21 @@ __PACKAGE__->has_many(
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 buildmetrics
|
||||
|
||||
Type: has_many
|
||||
|
||||
Related object: L<Hydra::Schema::BuildMetrics>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"buildmetrics",
|
||||
"Hydra::Schema::BuildMetrics",
|
||||
{ "foreign.build" => "self.id" },
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 buildoutputs
|
||||
|
||||
Type: has_many
|
||||
@ -401,6 +424,21 @@ __PACKAGE__->has_many(
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 buildsteps_propagatedfroms
|
||||
|
||||
Type: has_many
|
||||
|
||||
Related object: L<Hydra::Schema::BuildSteps>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"buildsteps_propagatedfroms",
|
||||
"Hydra::Schema::BuildSteps",
|
||||
{ "foreign.propagatedfrom" => "self.id" },
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 job
|
||||
|
||||
Type: belongs_to
|
||||
@ -509,19 +547,19 @@ __PACKAGE__->many_to_many(
|
||||
|
||||
Type: many_to_many
|
||||
|
||||
Composing rels: L</aggregateconstituents_constituents> -> constituent
|
||||
Composing rels: L</aggregateconstituents_aggregates> -> constituent
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->many_to_many(
|
||||
"constituents",
|
||||
"aggregateconstituents_constituents",
|
||||
"aggregateconstituents_aggregates",
|
||||
"constituent",
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-09-30 15:38:03
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:kMPje7yi/yDqxGRQcC2I/Q
|
||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-08-10 15:10:41
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:rjifgnPtjY96MaQ7eiGzaA
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"dependents",
|
||||
@ -615,6 +653,7 @@ my %hint = (
|
||||
buildoutputs => 'name',
|
||||
buildinputs_builds => 'name',
|
||||
buildproducts => 'productnr',
|
||||
buildmetrics => 'name',
|
||||
}
|
||||
);
|
||||
|
||||
|
65
src/lib/Hydra/Schema/FailedPaths.pm
Normal file
65
src/lib/Hydra/Schema/FailedPaths.pm
Normal file
@ -0,0 +1,65 @@
|
||||
use utf8;
|
||||
package Hydra::Schema::FailedPaths;
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader
|
||||
# DO NOT MODIFY THE FIRST PART OF THIS FILE
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Hydra::Schema::FailedPaths
|
||||
|
||||
=cut
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use base 'DBIx::Class::Core';
|
||||
|
||||
=head1 COMPONENTS LOADED
|
||||
|
||||
=over 4
|
||||
|
||||
=item * L<Hydra::Component::ToJSON>
|
||||
|
||||
=back
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||
|
||||
=head1 TABLE: C<FailedPaths>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->table("FailedPaths");
|
||||
|
||||
=head1 ACCESSORS
|
||||
|
||||
=head2 path
|
||||
|
||||
data_type: 'text'
|
||||
is_nullable: 0
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->add_columns("path", { data_type => "text", is_nullable => 0 });
|
||||
|
||||
=head1 PRIMARY KEY
|
||||
|
||||
=over 4
|
||||
|
||||
=item * L</path>
|
||||
|
||||
=back
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->set_primary_key("path");
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2015-06-10 14:48:16
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:WFgjfjH+szE6Ntcicmaflw
|
||||
|
||||
|
||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||
1;
|
@ -81,6 +81,25 @@ __PACKAGE__->set_primary_key("project", "jobset", "name");
|
||||
|
||||
=head1 RELATIONS
|
||||
|
||||
=head2 buildmetrics
|
||||
|
||||
Type: has_many
|
||||
|
||||
Related object: L<Hydra::Schema::BuildMetrics>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"buildmetrics",
|
||||
"Hydra::Schema::BuildMetrics",
|
||||
{
|
||||
"foreign.job" => "self.name",
|
||||
"foreign.jobset" => "self.jobset",
|
||||
"foreign.project" => "self.project",
|
||||
},
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 builds
|
||||
|
||||
Type: has_many
|
||||
@ -150,7 +169,7 @@ __PACKAGE__->has_many(
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-09-29 19:41:42
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:lnZSd0gDXgLk8WQeAFqByA
|
||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:vDAo9bzLca+QWfhOb9OLMg
|
||||
|
||||
1;
|
||||
|
@ -184,6 +184,24 @@ __PACKAGE__->set_primary_key("project", "name");
|
||||
|
||||
=head1 RELATIONS
|
||||
|
||||
=head2 buildmetrics
|
||||
|
||||
Type: has_many
|
||||
|
||||
Related object: L<Hydra::Schema::BuildMetrics>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"buildmetrics",
|
||||
"Hydra::Schema::BuildMetrics",
|
||||
{
|
||||
"foreign.jobset" => "self.name",
|
||||
"foreign.project" => "self.project",
|
||||
},
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 builds
|
||||
|
||||
Type: has_many
|
||||
@ -320,8 +338,8 @@ __PACKAGE__->has_many(
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-04-23 23:13:51
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:CO0aE+jrjB+UrwGRzWZLlw
|
||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Coci9FdBAvUO9T3st2NEqA
|
||||
|
||||
my %hint = (
|
||||
columns => [
|
||||
|
@ -106,6 +106,21 @@ __PACKAGE__->set_primary_key("name");
|
||||
|
||||
=head1 RELATIONS
|
||||
|
||||
=head2 buildmetrics
|
||||
|
||||
Type: has_many
|
||||
|
||||
Related object: L<Hydra::Schema::BuildMetrics>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->has_many(
|
||||
"buildmetrics",
|
||||
"Hydra::Schema::BuildMetrics",
|
||||
{ "foreign.project" => "self.name" },
|
||||
undef,
|
||||
);
|
||||
|
||||
=head2 builds
|
||||
|
||||
Type: has_many
|
||||
@ -267,8 +282,8 @@ Composing rels: L</projectmembers> -> username
|
||||
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2014-04-23 23:13:08
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:fkd9ruEoVSBGIktmAj4u4g
|
||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:52:20
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:67kWIE0IGmEJTvOIATAKaw
|
||||
|
||||
my %hint = (
|
||||
columns => [
|
||||
|
75
src/lib/Hydra/Schema/SystemStatus.pm
Normal file
75
src/lib/Hydra/Schema/SystemStatus.pm
Normal file
@ -0,0 +1,75 @@
|
||||
use utf8;
|
||||
package Hydra::Schema::SystemStatus;
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader
|
||||
# DO NOT MODIFY THE FIRST PART OF THIS FILE
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Hydra::Schema::SystemStatus
|
||||
|
||||
=cut
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use base 'DBIx::Class::Core';
|
||||
|
||||
=head1 COMPONENTS LOADED
|
||||
|
||||
=over 4
|
||||
|
||||
=item * L<Hydra::Component::ToJSON>
|
||||
|
||||
=back
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
|
||||
|
||||
=head1 TABLE: C<SystemStatus>
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->table("SystemStatus");
|
||||
|
||||
=head1 ACCESSORS
|
||||
|
||||
=head2 what
|
||||
|
||||
data_type: 'text'
|
||||
is_nullable: 0
|
||||
|
||||
=head2 status
|
||||
|
||||
data_type: 'json'
|
||||
is_nullable: 0
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->add_columns(
|
||||
"what",
|
||||
{ data_type => "text", is_nullable => 0 },
|
||||
"status",
|
||||
{ data_type => "json", is_nullable => 0 },
|
||||
);
|
||||
|
||||
=head1 PRIMARY KEY
|
||||
|
||||
=over 4
|
||||
|
||||
=item * L</what>
|
||||
|
||||
=back
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->set_primary_key("what");
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07043 @ 2015-07-30 16:01:22
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:JCYi4+HwM22iucdFkhBjMg
|
||||
|
||||
|
||||
# You can replace this text with custom code or comments, and it will be preserved on regeneration
|
||||
1;
|
30
src/lib/Hydra/View/NixLog.pm
Normal file
30
src/lib/Hydra/View/NixLog.pm
Normal file
@ -0,0 +1,30 @@
|
||||
package Hydra::View::NixLog;
|
||||
|
||||
use strict;
|
||||
use base qw/Catalyst::View/;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
|
||||
sub process {
|
||||
my ($self, $c) = @_;
|
||||
|
||||
my $logPath = $c->stash->{logPath};
|
||||
|
||||
$c->response->content_type('text/plain');
|
||||
|
||||
my $fh = new IO::Handle;
|
||||
|
||||
if ($logPath =~ /\.bz2$/) {
|
||||
open $fh, "bzip2 -dc < '$logPath' |" or die;
|
||||
} else {
|
||||
open $fh, "<$logPath" or die;
|
||||
}
|
||||
binmode($fh);
|
||||
|
||||
setCacheHeaders($c, 365 * 24 * 60 * 60) if $c->stash->{finished};
|
||||
|
||||
$c->response->body($fh);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
1;
|
@ -9,7 +9,7 @@ __PACKAGE__->config(
|
||||
ENCODING => 'utf-8',
|
||||
PRE_CHOMP => 1,
|
||||
POST_CHOMP => 1,
|
||||
expose_methods => [qw/buildLogExists buildStepLogExists jobExists/]);
|
||||
expose_methods => [qw/buildLogExists buildStepLogExists jobExists stripSSHUser/]);
|
||||
|
||||
sub buildLogExists {
|
||||
my ($self, $c, $build) = @_;
|
||||
@ -23,6 +23,16 @@ sub buildStepLogExists {
|
||||
return defined findLog($c, $step->drvpath, @outPaths);
|
||||
}
|
||||
|
||||
|
||||
sub stripSSHUser {
|
||||
my ($self, $c, $name) = @_;
|
||||
if ($name =~ /^.*@(.*)$/) {
|
||||
return $1;
|
||||
} else {
|
||||
return $name;
|
||||
}
|
||||
}
|
||||
|
||||
# Check whether the given job is a member of the most recent jobset
|
||||
# evaluation.
|
||||
sub jobExists {
|
||||
|
Reference in New Issue
Block a user