Merge branch 'master' into persona

Conflicts:
	src/lib/Hydra/Helper/CatalystUtils.pm
	src/root/layout.tt
	src/root/topbar.tt
	src/root/user.tt
This commit is contained in:
Eelco Dolstra
2013-11-05 11:11:48 +01:00
114 changed files with 3593 additions and 1883 deletions

View File

@ -7,46 +7,6 @@ use Hydra::Helper::Nix;
use Hydra::Helper::CatalystUtils;
sub getJobStatus {
my ($self, $c) = @_;
my $maintainer = $c->request->params->{"maintainer"};
my $latest = $c->stash->{jobStatus}->search(
defined $maintainer ? { maintainers => { like => "%$maintainer%" } } : {},
{ '+select' => ["me.statusChangeId", "me.statusChangeTime"]
, '+as' => ["statusChangeId", "statusChangeTime"]
, order_by => "coalesce(statusChangeTime, 0) desc"
});
return $latest;
}
sub jobstatus : Chained('get_builds') PathPart Args(0) {
my ($self, $c) = @_;
$c->stash->{template} = 'jobstatus.tt';
$c->stash->{latestBuilds} = [getJobStatus($self, $c)->all];
}
# A convenient way to see all the errors - i.e. things demanding
# attention - at a glance.
sub errors : Chained('get_builds') PathPart Args(0) {
my ($self, $c) = @_;
$c->stash->{template} = 'errors.tt';
$c->stash->{brokenJobsets} =
[$c->stash->{allJobsets}->search({errormsg => {'!=' => ''}})]
if defined $c->stash->{allJobsets};
$c->stash->{brokenJobs} =
[$c->stash->{allJobs}->search({errormsg => {'!=' => ''}})]
if defined $c->stash->{allJobs};
$c->stash->{brokenBuilds} =
[getJobStatus($self, $c)->search({buildStatus => {'!=' => 0}})];
}
sub all : Chained('get_builds') PathPart {
my ($self, $c) = @_;
@ -56,13 +16,12 @@ sub all : Chained('get_builds') PathPart {
my $resultsPerPage = 20;
my $nrBuilds = $c->stash->{allBuilds}->search({finished => 1})->count;
$c->stash->{baseUri} = $c->uri_for($self->action_for("all"), $c->req->captures);
$c->stash->{page} = $page;
$c->stash->{resultsPerPage} = $resultsPerPage;
$c->stash->{total} = $nrBuilds;
$c->stash->{total} = $c->stash->{allBuilds}->search({finished => 1})->count
unless defined $c->stash->{total};
$c->stash->{builds} = [ $c->stash->{allBuilds}->search(
{ finished => 1 },
@ -82,6 +41,7 @@ sub nix : Chained('get_builds') PathPart('channel') CaptureArgs(1) {
->search_literal("exists (select 1 from buildproducts where build = me.id and type = 'nix-build')")
->search({}, { columns => [@buildListColumns, 'drvpath', 'description', 'homepage']
, join => ["buildoutputs"]
, order_by => ["me.id", "buildoutputs.name"]
, '+select' => ['buildoutputs.path', 'buildoutputs.name'], '+as' => ['outpath', 'outname'] });
}
else {
@ -120,4 +80,22 @@ sub latest_for : Chained('get_builds') PathPart('latest-for') {
}
# Redirect to the latest successful build in a finished evaluation
# (i.e. an evaluation that has no unfinished builds).
sub latest_finished : Chained('get_builds') PathPart('latest-finished') {
my ($self, $c, @rest) = @_;
my $latest = $c->stash->{allBuilds}->find(
{ finished => 1, buildstatus => 0 },
{ order_by => ["id DESC"], rows => 1, join => ["jobsetevalmembers"]
, where => \
"not exists (select 1 from jobsetevalmembers m2 join builds b2 on jobsetevalmembers.eval = m2.eval and m2.build = b2.id and b2.finished = 0)"
});
notFound($c, "There is no successful build to redirect to.") unless defined $latest;
$c->res->redirect($c->uri_for($c->controller('Build')->action_for("build"), [$latest->id], @rest));
}
1;

View File

@ -14,20 +14,36 @@ sub getChannelData {
my @storePaths = ();
$c->stash->{nixPkgs} = [];
foreach my $build ($c->stash->{channelBuilds}->all) {
my $outPath = $build->get_column("outpath");
my $outName = $build->get_column("outname");
next if $checkValidity && !isValidPath($outPath);
push @storePaths, $outPath;
my $pkgName = $build->nixname . "-" . $build->system . "-" . $build->id . ($outName ne "out" ? "-" . $outName : "");
push @{$c->stash->{nixPkgs}}, { build => $build, name => $pkgName, outPath => $outPath, outName => $outName };
# Put the system type in the manifest (for top-level paths) as
# a hint to the binary patch generator. (It shouldn't try to
# generate patches between builds for different systems.) It
# would be nice if Nix stored this info for every path but it
# doesn't.
$c->stash->{systemForPath}->{$outPath} = $build->system;
};
my @builds = $c->stash->{channelBuilds}->all;
for (my $n = 0; $n < scalar @builds; ) {
# Since channelData is a join of Builds and BuildOutputs, we
# need to gather the rows that belong to a single build.
my $build = $builds[$n++];
my @outputs = ($build);
push @outputs, $builds[$n++] while $n < scalar @builds && $builds[$n]->id == $build->id;
@outputs = grep { $_->get_column("outpath") } @outputs;
my $outputs = {};
foreach my $output (@outputs) {
my $outPath = $output->get_column("outpath");
next if $checkValidity && !isValidPath($outPath);
$outputs->{$output->get_column("outname")} = $outPath;
push @storePaths, $outPath;
# Put the system type in the manifest (for top-level
# paths) as a hint to the binary patch generator. (It
# shouldn't try to generate patches between builds for
# different systems.) It would be nice if Nix stored this
# info for every path but it doesn't.
$c->stash->{systemForPath}->{$outPath} = $build->system;
}
next if !%$outputs;
my $pkgName = $build->nixname . "-" . $build->system . "-" . $build->id;
push @{$c->stash->{nixPkgs}}, { build => $build, name => $pkgName, outputs => $outputs };
}
$c->stash->{storePaths} = [@storePaths];
}

View File

@ -4,8 +4,12 @@ use strict;
use warnings;
use base 'Catalyst::Controller::REST';
# Hack: Erase the map set by C::C::REST
__PACKAGE__->config( map => undef );
__PACKAGE__->config(
map => {
'application/json' => 'JSON',
'text/x-json' => 'JSON',
'text/html' => [ 'View', 'TT' ]
},
default => 'text/html',

View File

@ -8,36 +8,26 @@ use base 'DBIx::Class';
sub TO_JSON {
my $self = shift;
my $json = { $self->get_columns };
my $rs = $self->result_source;
my @relnames = $rs->relationships;
RELLOOP: foreach my $relname (@relnames) {
my $relinfo = $rs->relationship_info($relname);
next unless defined $relinfo->{attrs}->{accessor};
my $accessor = $relinfo->{attrs}->{accessor};
if ($accessor eq "single" and exists $self->{_relationship_data}{$relname}) {
$json->{$relname} = $self->$relname->TO_JSON;
} else {
unless (defined $self->{related_resultsets}{$relname}) {
my $cond = $relinfo->{cond};
if (ref $cond eq 'HASH') {
foreach my $k (keys %{$cond}) {
my $v = $cond->{$k};
$v =~ s/^self\.//;
next RELLOOP unless $self->has_column_loaded($v);
}
} #!!! TODO: Handle ARRAY conditions
}
if (defined $self->related_resultset($relname)->get_cache) {
if ($accessor eq "multi") {
$json->{$relname} = [ map { $_->TO_JSON } $self->$relname ];
} else {
$json->{$relname} = $self->$relname->TO_JSON;
}
}
}
my $hint = $self->json_hint;
my %json = ();
foreach my $column (@{$hint->{columns}}) {
$json{$column} = $self->get_column($column);
}
return $json;
foreach my $relname (keys %{$hint->{relations}}) {
my $key = $hint->{relations}->{$relname};
$json{$relname} = [ map { $_->$key } $self->$relname ];
}
foreach my $relname (keys %{$hint->{eager_relations}}) {
my $key = $hint->{eager_relations}->{$relname};
$json{$relname} = { map { $_->$key => $_ } $self->$relname };
}
return \%json;
}
1;

View File

@ -15,8 +15,6 @@ use Digest::SHA qw(sha256_hex);
use Text::Diff;
use File::Slurp;
# !!! Rewrite this to use View::JSON.
sub api : Chained('/') PathPart('api') CaptureArgs(0) {
my ($self, $c) = @_;
@ -24,32 +22,6 @@ sub api : Chained('/') PathPart('api') CaptureArgs(0) {
}
sub projectToHash {
my ($project) = @_;
return {
name => $project->name,
description => $project->description
};
}
sub projects : Chained('api') PathPart('projects') Args(0) {
my ($self, $c) = @_;
my @projects = $c->model('DB::Projects')->search({hidden => 0}, {order_by => 'name'});
my @list;
foreach my $p (@projects) {
push @list, projectToHash($p);
}
$c->stash->{'plain'} = {
data => scalar (JSON::Any->objToJson(\@list))
};
$c->forward('Hydra::View::Plain');
}
sub buildToHash {
my ($build) = @_;
my $result = {

View File

@ -34,8 +34,12 @@ sub machines : Chained('admin') PathPart('machines') Args(0) {
sub clear_queue_non_current : Chained('admin') PathPart('clear-queue-non-current') Args(0) {
my ($self, $c) = @_;
my $time = time();
$c->model('DB::Builds')->search({finished => 0, iscurrent => 0, busy => 0})->update({ finished => 1, buildstatus => 4, starttime => $time, stoptime => $time });
my $builds = $c->model('DB::Builds')->search(
{ finished => 0, busy => 0
, id => { -not_in => \ "select build from JobsetEvalMembers where eval in (select max(id) from JobsetEvals where hasNewBuilds = 1 group by project, jobset)" }
});
my $n = cancelBuilds($c->model('DB')->schema, $builds);
$c->flash->{successMsg} = "$n builds have been cancelled.";
$c->res->redirect($c->request->referer // "/admin");
}
@ -49,19 +53,11 @@ sub clearfailedcache : Chained('admin') PathPart('clear-failed-cache') Args(0) {
sub clearvcscache : Chained('admin') PathPart('clear-vcs-cache') Args(0) {
my ($self, $c) = @_;
print STDERR "Clearing path cache\n";
$c->model('DB::CachedPathInputs')->delete_all;
print STDERR "Clearing git cache\n";
$c->model('DB::CachedGitInputs')->delete_all;
print STDERR "Clearing subversion cache\n";
$c->model('DB::CachedSubversionInputs')->delete_all;
print STDERR "Clearing bazaar cache\n";
$c->model('DB::CachedBazaarInputs')->delete_all;
$c->model('DB::CachedPathInputs')->delete;
$c->model('DB::CachedGitInputs')->delete;
$c->model('DB::CachedSubversionInputs')->delete;
$c->model('DB::CachedBazaarInputs')->delete;
$c->flash->{successMsg} = "VCS caches have been cleared.";
$c->res->redirect($c->request->referer // "/admin");
}

View File

@ -35,18 +35,18 @@ sub buildChain :Chained('/') :PathPart('build') :CaptureArgs(1) {
sub findBuildStepByOutPath {
my ($self, $c, $path, $status) = @_;
my ($self, $c, $path) = @_;
return $c->model('DB::BuildSteps')->search(
{ path => $path, busy => 0, status => $status },
{ join => ["buildstepoutputs"], order_by => ["stopTime"], limit => 1 })->single;
{ path => $path, busy => 0 },
{ join => ["buildstepoutputs"], order_by => ["status", "stopTime"], rows => 1 })->single;
}
sub findBuildStepByDrvPath {
my ($self, $c, $drvPath, $status) = @_;
my ($self, $c, $drvPath) = @_;
return $c->model('DB::BuildSteps')->search(
{ drvpath => $drvPath, busy => 0, status => $status },
{ order_by => ["stopTime"], limit => 1 })->single;
{ drvpath => $drvPath, busy => 0 },
{ order_by => ["status", "stopTime"], rows => 1 })->single;
}
@ -60,7 +60,6 @@ sub build_GET {
$c->stash->{template} = 'build.tt';
$c->stash->{available} = all { isValidPath($_->path) } $build->buildoutputs->all;
$c->stash->{drvAvailable} = isValidPath $build->drvpath;
$c->stash->{flashMsg} = $c->flash->{buildMsg};
if (!$build->finished && $build->busy) {
$c->stash->{logtext} = read_file($build->logfile, err_mode => 'quiet') // "";
@ -68,8 +67,7 @@ sub build_GET {
if ($build->finished && $build->iscachedbuild) {
my $path = ($build->buildoutputs)[0]->path or die;
my $cachedBuildStep = findBuildStepByOutPath($self, $c, $path,
$build->buildstatus == 0 || $build->buildstatus == 6 ? 0 : 1);
my $cachedBuildStep = findBuildStepByOutPath($self, $c, $path);
$c->stash->{cachedBuild} = $cachedBuildStep->build if defined $cachedBuildStep;
}
@ -95,25 +93,16 @@ sub build_GET {
# Get the first eval of which this build was a part.
($c->stash->{nrEvals}) = $c->stash->{build}->jobsetevals->search({ hasnewbuilds => 1 })->count;
($c->stash->{eval}) = $c->stash->{build}->jobsetevals->search(
$c->stash->{eval} = $c->stash->{build}->jobsetevals->search(
{ hasnewbuilds => 1},
{ limit => 1, order_by => ["id"] });
{ rows => 1, order_by => ["id"] })->single;
$self->status_ok(
$c,
entity => $c->model('DB::Builds')->find($build->id,{
columns => [
'id',
'finished',
'timestamp',
'buildstatus',
'job',
'project',
'jobset',
'starttime',
'stoptime',
]
})
entity => $build
);
# If this is an aggregate build, get its constituents.
$c->stash->{constituents} = [$c->stash->{build}->constituents_->search({}, {order_by => ["job"]})];
}
@ -125,35 +114,43 @@ sub view_nixlog : Chained('buildChain') PathPart('nixlog') {
$c->stash->{step} = $step;
showLog($c, $step->drvpath, $mode);
showLog($c, $mode, $step->drvpath, map { $_->path } $step->buildstepoutputs->all);
}
sub view_log : Chained('buildChain') PathPart('log') {
my ($self, $c, $mode) = @_;
showLog($c, $c->stash->{build}->drvpath, $mode);
showLog($c, $mode, $c->stash->{build}->drvpath, map { $_->path } $c->stash->{build}->buildoutputs->all);
}
sub showLog {
my ($c, $drvPath, $mode) = @_;
my ($c, $mode, $drvPath, @outPaths) = @_;
my $logPath = getDrvLogPath($drvPath);
my $logPath = findLog($c, $drvPath, @outPaths);
notFound($c, "The build log of derivation $drvPath is not available.") unless defined $logPath;
my $size = stat($logPath)->size;
error($c, "This build log is too big to display ($size bytes).")
if $size >= 64 * 1024 * 1024;
if (!$mode) {
# !!! quick hack
my $pipeline = "nix-store -l $drvPath"
my $pipeline = ($logPath =~ /.bz2$/ ? "bzip2 -d < $logPath" : "cat $logPath")
. " | nix-log2xml | xsltproc " . $c->path_to("xsl/mark-errors.xsl") . " -"
. " | xsltproc " . $c->path_to("xsl/log2html.xsl") . " - | tail -n +2";
$c->stash->{template} = 'log.tt';
$c->stash->{logtext} = `$pipeline`;
$c->stash->{logtext} = `ulimit -t 5 ; $pipeline`;
}
elsif ($mode eq "raw") {
$c->stash->{'plain'} = { data => (scalar logContents($drvPath)) || " " };
$c->forward('Hydra::View::Plain');
if ($logPath !~ /.bz2$/) {
$c->serve_static_file($logPath);
} else {
$c->stash->{'plain'} = { data => (scalar logContents($logPath)) || " " };
$c->forward('Hydra::View::Plain');
}
}
elsif ($mode eq "tail-reload") {
@ -162,12 +159,12 @@ sub showLog {
$c->stash->{url} = $url;
$c->stash->{reload} = !$c->stash->{build}->finished && $c->stash->{build}->busy;
$c->stash->{title} = "";
$c->stash->{contents} = (scalar logContents($drvPath, 50)) || " ";
$c->stash->{contents} = (scalar logContents($logPath, 50)) || " ";
$c->stash->{template} = 'plain-reload.tt';
}
elsif ($mode eq "tail") {
$c->stash->{'plain'} = { data => (scalar logContents($drvPath, 50)) || " " };
$c->stash->{'plain'} = { data => (scalar logContents($logPath, 50)) || " " };
$c->forward('Hydra::View::Plain');
}
@ -238,6 +235,21 @@ sub download : Chained('buildChain') PathPart {
}
sub output : Chained('buildChain') PathPart Args(1) {
my ($self, $c, $outputName) = @_;
my $build = $c->stash->{build};
error($c, "This build is not finished yet.") unless $build->finished;
my $output = $build->buildoutputs->find({name => $outputName});
notFound($c, "This build has no output named $outputName") unless defined $output;
error($c, "Output is not available.") unless isValidPath $output->path;
$c->response->header('Content-Disposition', "attachment; filename=\"build-${\$build->id}-${\$outputName}.nar.bz2\"");
$c->stash->{current_view} = 'NixNAR';
$c->stash->{storePath} = $output->path;
}
# Redirect to a download with the given type. Useful when you want to
# link to some build product of the latest build (i.e. in conjunction
# with the .../latest redirect).
@ -269,7 +281,7 @@ sub contents : Chained('buildChain') PathPart Args(1) {
notFound($c, "Product $path has disappeared.") unless -e $path;
# Sanitize $path to prevent shell injection attacks.
$path =~ /^\/[\/[A-Za-z0-9_\-\.=]+$/ or die "Filename contains illegal characters.\n";
$path =~ /^\/[\/[A-Za-z0-9_\-\.=+:]+$/ or die "Filename contains illegal characters.\n";
# FIXME: don't use shell invocations below.
@ -339,8 +351,8 @@ sub getDependencyGraph {
{ path => $path
, name => $name
, buildStep => $runtime
? findBuildStepByOutPath($self, $c, $path, 0)
: findBuildStepByDrvPath($self, $c, $path, 0)
? findBuildStepByOutPath($self, $c, $path)
: findBuildStepByDrvPath($self, $c, $path)
};
$$done{$path} = $node;
my @refs;
@ -409,49 +421,22 @@ sub nix : Chained('buildChain') PathPart('nix') CaptureArgs(0) {
sub restart : Chained('buildChain') PathPart Args(0) {
my ($self, $c) = @_;
my $build = $c->stash->{build};
requireProjectOwner($c, $build->project);
my $drvpath = $build->drvpath;
error($c, "This build cannot be restarted.")
unless $build->finished && -f $drvpath;
restartBuild($c->model('DB')->schema, $build);
$c->flash->{buildMsg} = "Build has been restarted.";
my $n = restartBuilds($c->model('DB')->schema, $c->model('DB::Builds')->search({ id => $build->id }));
error($c, "This build cannot be restarted.") if $n != 1;
$c->flash->{successMsg} = "Build has been restarted.";
$c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures));
}
sub cancel : Chained('buildChain') PathPart Args(0) {
my ($self, $c) = @_;
my $build = $c->stash->{build};
requireProjectOwner($c, $build->project);
txn_do($c->model('DB')->schema, sub {
error($c, "This build cannot be cancelled.")
if $build->finished || $build->busy;
# !!! Actually, it would be nice to be able to cancel busy
# builds as well, but we would have to send a signal or
# something to the build process.
my $time = time();
$build->update(
{ finished => 1, busy => 0
, iscachedbuild => 0, buildstatus => 4 # = cancelled
, starttime => $time
, stoptime => $time
});
});
$c->flash->{buildMsg} = "Build has been cancelled.";
my $n = cancelBuilds($c->model('DB')->schema, $c->model('DB::Builds')->search({ id => $build->id }));
error($c, "This build cannot be cancelled.") if $n != 1;
$c->flash->{successMsg} = "Build has been cancelled.";
$c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures));
}
@ -472,7 +457,7 @@ sub keep : Chained('buildChain') PathPart Args(1) {
$build->update({keep => $keep});
});
$c->flash->{buildMsg} =
$c->flash->{successMsg} =
$keep ? "Build will be kept." : "Build will not be kept.";
$c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures));
@ -502,89 +487,12 @@ sub add_to_release : Chained('buildChain') PathPart('add-to-release') Args(0) {
$release->releasemembers->create({build => $build->id, description => $build->description});
$c->flash->{buildMsg} = "Build added to project <tt>$releaseName</tt>.";
$c->flash->{successMsg} = "Build added to project <tt>$releaseName</tt>.";
$c->res->redirect($c->uri_for($self->action_for("build"), $c->req->captures));
}
sub clone : Chained('buildChain') PathPart('clone') Args(0) {
my ($self, $c) = @_;
my $build = $c->stash->{build};
requireProjectOwner($c, $build->project);
$c->stash->{template} = 'clone-build.tt';
}
sub clone_submit : Chained('buildChain') PathPart('clone/submit') Args(0) {
my ($self, $c) = @_;
my $build = $c->stash->{build};
requireProjectOwner($c, $build->project);
my ($nixExprPath, $nixExprInputName) = Hydra::Controller::Jobset::nixExprPathFromParams $c;
# When the expression is in a .scm file, assume it's a Guile + Guix
# build expression.
my $exprType =
$c->request->params->{"nixexprpath"} =~ /.scm$/ ? "guile" : "nix";
my $jobName = trim $c->request->params->{"jobname"};
error($c, "Invalid job name: $jobName") if $jobName !~ /^$jobNameRE$/;
my $inputInfo = {};
foreach my $param (keys %{$c->request->params}) {
next unless $param =~ /^input-(\w+)-name$/;
my $baseName = $1;
my ($inputName, $inputType) =
Hydra::Controller::Jobset::checkInput($c, $baseName);
my $inputValue = Hydra::Controller::Jobset::checkInputValue(
$c, $inputType, $c->request->params->{"input-$baseName-value"});
eval {
# !!! fetchInput can take a long time, which might cause
# the current HTTP request to time out. So maybe this
# should be done asynchronously. But then error reporting
# becomes harder.
my $info = fetchInput(
$c->hydra_plugins, $c->model('DB'), $build->project, $build->jobset,
$inputName, $inputType, $inputValue);
push @{$$inputInfo{$inputName}}, $info if defined $info;
};
error($c, $@) if $@;
}
my ($jobs, $nixExprInput) = evalJobs($inputInfo, $exprType, $nixExprInputName, $nixExprPath);
my $job;
foreach my $j (@{$jobs->{job}}) {
print STDERR $j->{jobName}, "\n";
if ($j->{jobName} eq $jobName) {
error($c, "Nix expression returned multiple builds for job $jobName.")
if $job;
$job = $j;
}
}
error($c, "Nix expression did not return a job named $jobName.") unless $job;
my %currentBuilds;
my $newBuild = checkBuild(
$c->model('DB'), $build->project, $build->jobset,
$inputInfo, $nixExprInput, $job, \%currentBuilds, undef, {});
error($c, "This build has already been performed.") unless $newBuild;
$c->flash->{buildMsg} = "Build " . $newBuild->id . " added to the queue.";
$c->res->redirect($c->uri_for($c->controller('Root')->action_for('queue')));
}
sub get_info : Chained('buildChain') PathPart('api/get-info') Args(0) {
my ($self, $c) = @_;
my $build = $c->stash->{build};
@ -614,6 +522,22 @@ sub evals : Chained('buildChain') PathPart('evals') Args(0) {
}
# Redirect to the latest finished evaluation that contains this build.
sub eval : Chained('buildChain') PathPart('eval') {
my ($self, $c, @rest) = @_;
my $eval = $c->stash->{build}->jobsetevals->find(
{ hasnewbuilds => 1 },
{ order_by => "id DESC", rows => 1
, "not exists (select 1 from jobsetevalmembers m2 join builds b2 on me.eval = m2.eval and m2.build = b2.id and b2.finished = 0)"
});
notFound($c, "There is no finished evaluation containing this build.") unless defined $eval;
$c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for("view"), [$eval->id], @rest, $c->req->params));
}
sub reproduce : Chained('buildChain') PathPart('reproduce') Args(0) {
my ($self, $c) = @_;
$c->response->content_type('text/x-shellscript');

View File

@ -20,24 +20,52 @@ sub job : Chained('/') PathPart('job') CaptureArgs(3) {
sub overview : Chained('job') PathPart('') Args(0) {
my ($self, $c) = @_;
my $job = $c->stash->{job};
$c->stash->{template} = 'job.tt';
$c->stash->{lastBuilds} =
[ $c->stash->{job}->builds->search({ finished => 1 },
[ $job->builds->search({ finished => 1 },
{ order_by => 'id DESC', rows => 10, columns => [@buildListColumns] }) ];
$c->stash->{queuedBuilds} = [
$c->stash->{job}->builds->search(
$job->builds->search(
{ finished => 0 },
{ join => ['project']
, order_by => ["priority DESC", "id"]
, '+select' => ['project.enabled']
, '+as' => ['enabled']
}
{ order_by => ["priority DESC", "id"] }
) ];
$c->stash->{systems} = [$c->stash->{job}->builds->search({iscurrent => 1}, {select => ["system"], distinct => 1})];
# If this is an aggregate job, then get its constituents.
my @constituents = $c->model('DB::Builds')->search(
{ aggregate => { -in => $job->builds->search({}, { columns => ["id"], order_by => "id desc", rows => 15 })->as_query } },
{ join => 'aggregateconstituents_constituents',
columns => ['id', 'job', 'finished', 'buildstatus'],
+select => ['aggregateconstituents_constituents.aggregate'],
+as => ['aggregate']
});
my $aggregates = {};
my %constituentJobs;
foreach my $b (@constituents) {
my $jobName = $b->get_column('job');
$aggregates->{$b->get_column('aggregate')}->{constituents}->{$jobName} =
{ id => $b->id, finished => $b->finished, buildstatus => $b->buildstatus };
$constituentJobs{$jobName} = 1;
}
foreach my $agg (keys %$aggregates) {
# FIXME: could be done in one query.
$aggregates->{$agg}->{build} =
$c->model('DB::Builds')->find({id => $agg}, {columns => [@buildListColumns]}) or die;
}
$c->stash->{aggregates} = $aggregates;
$c->stash->{constituentJobs} = [sort (keys %constituentJobs)];
$c->stash->{starred} = $c->user->starredjobs(
{ project => $c->stash->{project}->name
, jobset => $c->stash->{jobset}->name
, job => $c->stash->{job}->name
})->count == 1 if $c->user_exists;
}
@ -45,9 +73,6 @@ sub overview : Chained('job') PathPart('') Args(0) {
sub get_builds : Chained('job') PathPart('') CaptureArgs(0) {
my ($self, $c) = @_;
$c->stash->{allBuilds} = $c->stash->{job}->builds;
$c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatusForJob')
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name, $c->stash->{job}->name]});
$c->stash->{allJobs} = $c->stash->{job_};
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJob')
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name, $c->stash->{job}->name]});
$c->stash->{channelBaseName} =
@ -55,4 +80,22 @@ sub get_builds : Chained('job') PathPart('') CaptureArgs(0) {
}
sub star : Chained('job') PathPart('star') Args(0) {
my ($self, $c) = @_;
requirePost($c);
requireUser($c);
my $args =
{ project => $c->stash->{project}->name
, jobset => $c->stash->{jobset}->name
, job => $c->stash->{job}->name
};
if ($c->request->params->{star} eq "1") {
$c->user->starredjobs->update_or_create($args);
} else {
$c->user->starredjobs->find($args)->delete;
}
$c->stash->{resource}->{success} = 1;
}
1;

View File

@ -1,5 +1,6 @@
package Hydra::Controller::Jobset;
use utf8;
use strict;
use warnings;
use base 'Hydra::Base::Controller::ListBuilds';
@ -9,35 +10,18 @@ use Hydra::Helper::CatalystUtils;
sub jobsetChain :Chained('/') :PathPart('jobset') :CaptureArgs(2) {
my ($self, $c, $projectName, $jobsetName) = @_;
$c->stash->{params}->{name} //= $jobsetName;
my $project = $c->model('DB::Projects')->find($projectName);
if ($project) {
$c->stash->{project} = $project;
notFound($c, "Project $projectName doesn't exist.") if !$project;
$c->stash->{jobset_} = $project->jobsets->search({'me.name' => $jobsetName});
my $jobset = $c->stash->{jobset_}->single;
$c->stash->{project} = $project;
if ($jobset) {
$c->stash->{jobset} = $jobset;
} else {
if ($c->action->name eq "jobset" and $c->request->method eq "PUT") {
$c->stash->{jobsetName} = $jobsetName;
} else {
$self->status_not_found(
$c,
message => "Jobset $jobsetName doesn't exist."
);
$c->detach;
}
}
} else {
$self->status_not_found(
$c,
message => "Project $projectName doesn't exist."
);
$c->detach;
}
$c->stash->{jobset} = $project->jobsets->find({ name => $jobsetName });
notFound($c, "Jobset $jobsetName doesn't exist.")
if !$c->stash->{jobset} && !($c->action->name eq "jobset" and $c->request->method eq "PUT");
}
@ -50,26 +34,11 @@ sub jobset_GET {
$c->stash->{evals} = getEvals($self, $c, scalar $c->stash->{jobset}->jobsetevals, 0, 10);
($c->stash->{latestEval}) = $c->stash->{jobset}->jobsetevals->search({}, { limit => 1, order_by => ["id desc"] });
$c->stash->{latestEval} = $c->stash->{jobset}->jobsetevals->search({}, { rows => 1, order_by => ["id desc"] })->single;
$self->status_ok(
$c,
entity => $c->stash->{jobset_}->find({}, {
columns => [
'me.name',
'me.project',
'me.errormsg',
'jobsetinputs.name',
{
'jobsetinputs.jobsetinputalts.altnr' => 'jobsetinputalts.altnr',
'jobsetinputs.jobsetinputalts.value' => 'jobsetinputalts.value'
}
],
join => { 'jobsetinputs' => 'jobsetinputalts' },
collapse => 1,
order_by => "me.name"
})
);
$c->stash->{totalShares} = getTotalShares($c->model('DB')->schema);
$self->status_ok($c, entity => $c->stash->{jobset});
}
sub jobset_PUT {
@ -78,133 +47,91 @@ sub jobset_PUT {
requireProjectOwner($c, $c->stash->{project});
if (defined $c->stash->{jobset}) {
error($c, "Cannot rename jobset `$c->stash->{params}->{oldName}' over existing jobset `$c->stash->{jobset}->name") if defined $c->stash->{params}->{oldName} and $c->stash->{params}->{oldName} ne $c->stash->{jobset}->name;
txn_do($c->model('DB')->schema, sub {
updateJobset($c, $c->stash->{jobset});
});
if ($c->req->looks_like_browser) {
$c->res->redirect($c->uri_for($self->action_for("jobset"),
[$c->stash->{project}->name, $c->stash->{jobset}->name]) . "#tabs-configuration");
} else {
$self->status_no_content($c);
}
} elsif (defined $c->stash->{params}->{oldName}) {
my $jobset = $c->stash->{project}->jobsets->find({'me.name' => $c->stash->{params}->{oldName}});
my $uri = $c->uri_for($self->action_for("jobset"), [$c->stash->{project}->name, $c->stash->{jobset}->name]) . "#tabs-configuration";
$self->status_ok($c, entity => { redirect => "$uri" });
if (defined $jobset) {
txn_do($c->model('DB')->schema, sub {
updateJobset($c, $jobset);
});
my $uri = $c->uri_for($self->action_for("jobset"), [$c->stash->{project}->name, $jobset->name]);
if ($c->req->looks_like_browser) {
$c->res->redirect($uri . "#tabs-configuration");
} else {
$self->status_created(
$c,
location => "$uri",
entity => { name => $jobset->name, uri => "$uri", type => "jobset" }
);
}
} else {
$self->status_not_found(
$c,
message => "Jobset $c->stash->{params}->{oldName} doesn't exist."
);
}
} else {
my $exprType =
$c->stash->{params}->{"nixexprpath"} =~ /.scm$/ ? "guile" : "nix";
error($c, "Invalid jobset name: $c->stash->{jobsetName}") if $c->stash->{jobsetName} !~ /^$jobsetNameRE$/;
$c->flash->{successMsg} = "The jobset configuration has been updated.";
}
else {
my $jobset;
txn_do($c->model('DB')->schema, sub {
# Note: $jobsetName is validated in updateProject, which will
# abort the transaction if the name isn't valid.
$jobset = $c->stash->{project}->jobsets->create(
{name => $c->stash->{jobsetName}, nixexprinput => "", nixexprpath => "", emailoverride => ""});
{name => ".tmp", nixexprinput => "", nixexprpath => "", emailoverride => ""});
updateJobset($c, $jobset);
});
my $uri = $c->uri_for($self->action_for("jobset"), [$c->stash->{project}->name, $jobset->name]);
if ($c->req->looks_like_browser) {
$c->res->redirect($uri . "#tabs-configuration");
} else {
$self->status_created(
$c,
location => "$uri",
entity => { name => $jobset->name, uri => "$uri", type => "jobset" }
);
}
$self->status_created($c,
location => "$uri",
entity => { name => $jobset->name, uri => "$uri", redirect => "$uri", type => "jobset" });
}
}
sub jobset_DELETE {
my ($self, $c) = @_;
requireProjectOwner($c, $c->stash->{project});
txn_do($c->model('DB')->schema, sub {
$c->stash->{jobset}->jobsetevals->delete;
$c->stash->{jobset}->builds->delete;
$c->stash->{jobset}->delete;
});
my $uri = $c->uri_for($c->controller('Project')->action_for("project"), [$c->stash->{project}->name]);
$self->status_ok($c, entity => { redirect => "$uri" });
$c->flash->{successMsg} = "The jobset has been deleted.";
}
sub jobs_tab : Chained('jobsetChain') PathPart('jobs-tab') Args(0) {
my ($self, $c) = @_;
$c->stash->{template} = 'jobset-jobs-tab.tt';
$c->stash->{activeJobs} = [];
$c->stash->{inactiveJobs} = [];
$c->stash->{filter} = $c->request->params->{filter} // "";
my $filter = "%" . $c->stash->{filter} . "%";
(my $latestEval) = $c->stash->{jobset}->jobsetevals->search(
{ hasnewbuilds => 1}, { limit => 1, order_by => ["id desc"] });
my @evals = $c->stash->{jobset}->jobsetevals->search({ hasnewbuilds => 1}, { order_by => "id desc", rows => 20 });
my %activeJobs;
if (defined $latestEval) {
foreach my $build ($latestEval->builds->search({}, { order_by => ["job"], select => ["job"] })) {
my $job = $build->get_column("job");
if (!defined $activeJobs{$job}) {
$activeJobs{$job} = 1;
push @{$c->stash->{activeJobs}}, $job;
}
my $evals = {};
my %jobs;
my $nrBuilds = 0;
foreach my $eval (@evals) {
my @builds = $eval->builds->search(
{ job => { ilike => $filter } },
{ columns => ['id', 'job', 'finished', 'buildstatus'] });
foreach my $b (@builds) {
my $jobName = $b->get_column('job');
$evals->{$eval->id}->{$jobName} =
{ id => $b->id, finished => $b->finished, buildstatus => $b->buildstatus };
$jobs{$jobName} = 1;
$nrBuilds++;
}
last if $nrBuilds >= 10000;
}
if ($c->request->params->{showInactive}) {
$c->stash->{showInactive} = 1;
foreach my $job ($c->stash->{jobset}->jobs->search({ name => { ilike => $filter } })) {
next if defined $jobs{$job->name};
$c->stash->{inactiveJobs}->{$job->name} = $jobs{$job->name} = 1;
}
}
foreach my $job ($c->stash->{jobset}->jobs->search({}, { order_by => ["name"] })) {
if (!defined $activeJobs{$job->name}) {
push @{$c->stash->{inactiveJobs}}, $job->name;
}
}
}
sub status_tab : Chained('jobsetChain') PathPart('status-tab') Args(0) {
my ($self, $c) = @_;
$c->stash->{template} = 'jobset-status-tab.tt';
# FIXME: use latest eval instead of iscurrent.
$c->stash->{systems} =
[ $c->stash->{jobset}->builds->search({ iscurrent => 1 }, { select => ["system"], distinct => 1, order_by => "system" }) ];
# status per system
my @systems = ();
foreach my $system (@{$c->stash->{systems}}) {
push(@systems, $system->system);
}
my @select = ();
my @as = ();
push(@select, "job"); push(@as, "job");
foreach my $system (@systems) {
push(@select, "(select buildstatus from Builds b where b.id = (select max(id) from Builds t where t.project = me.project and t.jobset = me.jobset and t.job = me.job and t.system = '$system' and t.iscurrent = 1 ))");
push(@as, $system);
push(@select, "(select b.id from Builds b where b.id = (select max(id) from Builds t where t.project = me.project and t.jobset = me.jobset and t.job = me.job and t.system = '$system' and t.iscurrent = 1 ))");
push(@as, "$system-build");
}
$c->stash->{activeJobsStatus} = [
$c->model('DB')->resultset('ActiveJobsForJobset')->search(
{},
{ bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]
, select => \@select
, as => \@as
, order_by => ["job"]
}) ];
$c->stash->{evals} = $evals;
my @jobs = sort (keys %jobs);
$c->stash->{nrJobs} = scalar @jobs;
splice @jobs, 250 if $c->stash->{filter} eq "";
$c->stash->{jobs} = [@jobs];
}
@ -212,10 +139,6 @@ sub status_tab : Chained('jobsetChain') PathPart('status-tab') Args(0) {
sub get_builds : Chained('jobsetChain') PathPart('') CaptureArgs(0) {
my ($self, $c) = @_;
$c->stash->{allBuilds} = $c->stash->{jobset}->builds;
$c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatusForJobset')
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]});
$c->stash->{allJobsets} = $c->stash->{jobset_};
$c->stash->{allJobs} = $c->stash->{jobset}->jobs;
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForJobset')
->search({}, {bind => [$c->stash->{project}->name, $c->stash->{jobset}->name]});
$c->stash->{channelBaseName} =
@ -230,31 +153,8 @@ sub edit : Chained('jobsetChain') PathPart Args(0) {
$c->stash->{template} = 'edit-jobset.tt';
$c->stash->{edit} = 1;
}
sub submit : Chained('jobsetChain') PathPart Args(0) {
my ($self, $c) = @_;
requirePost($c);
if (($c->request->params->{submit} // "") eq "delete") {
txn_do($c->model('DB')->schema, sub {
$c->stash->{jobset}->jobsetevals->delete_all;
$c->stash->{jobset}->builds->delete_all;
$c->stash->{jobset}->delete;
});
return $c->res->redirect($c->uri_for($c->controller('Project')->action_for("project"), [$c->stash->{project}->name]));
}
my $newName = trim $c->stash->{params}->{name};
my $oldName = trim $c->stash->{jobset}->name;
unless ($oldName eq $newName) {
$c->stash->{params}->{oldName} = $oldName;
$c->stash->{jobsetName} = $newName;
undef $c->stash->{jobset};
}
jobset_PUT($self, $c);
$c->stash->{clone} = defined $c->stash->{params}->{clone};
$c->stash->{totalShares} = getTotalShares($c->model('DB')->schema);
}
@ -263,10 +163,10 @@ sub nixExprPathFromParams {
# The Nix expression path must be relative and can't contain ".." elements.
my $nixExprPath = trim $c->stash->{params}->{"nixexprpath"};
error($c, "Invalid Nix expression path: $nixExprPath") if $nixExprPath !~ /^$relPathRE$/;
error($c, "Invalid Nix expression path $nixExprPath.") if $nixExprPath !~ /^$relPathRE$/;
my $nixExprInput = trim $c->stash->{params}->{"nixexprinput"};
error($c, "Invalid Nix expression input name: $nixExprInput") unless $nixExprInput =~ /^\w+$/;
error($c, "Invalid Nix expression input name $nixExprInput.") unless $nixExprInput =~ /^[[:alpha:]][\w-]*$/;
return ($nixExprPath, $nixExprInput);
}
@ -275,7 +175,7 @@ sub nixExprPathFromParams {
sub checkInputValue {
my ($c, $type, $value) = @_;
$value = trim $value;
error($c, "Invalid Boolean value: $value") if
error($c, "Invalid Boolean value $value.") if
$type eq "boolean" && !($value eq "true" || $value eq "false");
return $value;
}
@ -284,8 +184,11 @@ sub checkInputValue {
sub updateJobset {
my ($c, $jobset) = @_;
my $jobsetName = $c->stash->{jobsetName} // $jobset->name;
error($c, "Invalid jobset name: $jobsetName") if $jobsetName !~ /^$jobsetNameRE$/;
my $jobsetName = $c->stash->{params}->{name};
error($c, "Invalid jobset identifier $jobsetName.") if $jobsetName !~ /^$jobsetNameRE$/;
error($c, "Cannot rename jobset to $jobsetName since that identifier is already taken.")
if $jobsetName ne $jobset->name && defined $c->stash->{project}->jobsets->find({ name => $jobsetName });
# When the expression is in a .scm file, assume it's a Guile + Guix
# build expression.
@ -294,118 +197,61 @@ sub updateJobset {
my ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
my $enabled = int($c->stash->{params}->{enabled});
die if $enabled < 0 || $enabled > 2;
$jobset->update(
{ name => $jobsetName
, description => trim($c->stash->{params}->{"description"})
, nixexprpath => $nixExprPath
, nixexprinput => $nixExprInput
, enabled => defined $c->stash->{params}->{enabled} ? 1 : 0
, enabled => $enabled
, enableemail => defined $c->stash->{params}->{enableemail} ? 1 : 0
, emailoverride => trim($c->stash->{params}->{emailoverride}) || ""
, hidden => defined $c->stash->{params}->{visible} ? 0 : 1
, keepnr => int(trim($c->stash->{params}->{keepnr})) || 3
, keepnr => int(trim($c->stash->{params}->{keepnr}))
, checkinterval => int(trim($c->stash->{params}->{checkinterval}))
, triggertime => $jobset->triggertime // time()
, triggertime => $enabled ? $jobset->triggertime // time() : undef
, schedulingshares => int($c->stash->{params}->{schedulingshares})
});
# Process the inputs of this jobset.
unless (defined $c->stash->{params}->{inputs}) {
$c->stash->{params}->{inputs} = {};
foreach my $param (keys %{$c->stash->{params}}) {
next unless $param =~ /^input-(\w+)-name$/;
my $baseName = $1;
next if $baseName eq "template";
$c->stash->{params}->{inputs}->{$c->stash->{params}->{$param}} = { type => $c->stash->{params}->{"input-$baseName-type"}, values => $c->stash->{params}->{"input-$baseName-values"} };
unless ($baseName =~ /^\d+$/) { # non-numeric base name is an existing entry
$c->stash->{params}->{inputs}->{$c->stash->{params}->{$param}}->{oldName} = $baseName;
}
}
}
# Set the inputs of this jobset.
$jobset->jobsetinputs->delete;
foreach my $inputName (keys %{$c->stash->{params}->{inputs}}) {
my $inputData = $c->stash->{params}->{inputs}->{$inputName};
error($c, "Invalid input name: $inputName") unless $inputName =~ /^[[:alpha:]]\w*$/;
my $inputType = $inputData->{type};
error($c, "Invalid input type: $inputType") unless
$inputType eq "svn" || $inputType eq "svn-checkout" || $inputType eq "hg" || $inputType eq "tarball" ||
$inputType eq "string" || $inputType eq "path" || $inputType eq "boolean" || $inputType eq "bzr" || $inputType eq "bzr-checkout" ||
$inputType eq "git" || $inputType eq "build" || $inputType eq "sysbuild" ;
my $input;
unless (defined $inputData->{oldName}) {
$input = $jobset->jobsetinputs->update_or_create(
{ name => $inputName
, type => $inputType
});
} else { # it's an existing input
$input = ($jobset->jobsetinputs->search({name => $inputData->{oldName}}))[0];
die unless defined $input;
$input->update({name => $inputName, type => $inputType});
}
# Update the values for this input. Just delete all the
# current ones, then create the new values.
$input->jobsetinputalts->delete_all;
foreach my $name (keys %{$c->stash->{params}->{inputs}}) {
my $inputData = $c->stash->{params}->{inputs}->{$name};
my $type = $inputData->{type};
my $values = $inputData->{values};
$values = [] unless defined $values;
$values = [$values] unless ref($values) eq 'ARRAY';
my $emailresponsible = defined $inputData->{emailresponsible} ? 1 : 0;
error($c, "Invalid input name $name.") unless $name =~ /^[[:alpha:]][\w-]*$/;
error($c, "Invalid input type $type.") unless defined $c->stash->{inputTypes}->{$type};
my $input = $jobset->jobsetinputs->create({
name => $name,
type => $type,
emailresponsible => $emailresponsible
});
# Set the values for this input.
my @values = ref($values) eq 'ARRAY' ? @{$values} : ($values);
my $altnr = 0;
foreach my $value (@{$values}) {
$value = checkInputValue($c, $inputType, $value);
foreach my $value (@values) {
$value = checkInputValue($c, $type, $value);
$input->jobsetinputalts->create({altnr => $altnr++, value => $value});
}
}
# Get rid of deleted inputs.
my @inputs = $jobset->jobsetinputs->all;
foreach my $input (@inputs) {
$input->delete unless defined $c->stash->{params}->{inputs}->{$input->name};
}
}
sub clone : Chained('jobsetChain') PathPart('clone') Args(0) {
my ($self, $c) = @_;
my $jobset = $c->stash->{jobset};
requireProjectOwner($c, $jobset->project);
requireProjectOwner($c, $c->stash->{project});
$c->stash->{template} = 'clone-jobset.tt';
}
sub clone_submit : Chained('jobsetChain') PathPart('clone/submit') Args(0) {
my ($self, $c) = @_;
my $jobset = $c->stash->{jobset};
requireProjectOwner($c, $jobset->project);
requirePost($c);
my $newJobsetName = trim $c->stash->{params}->{"newjobset"};
error($c, "Invalid jobset name: $newJobsetName") unless $newJobsetName =~ /^[[:alpha:]][\w\-]*$/;
my $newJobset;
txn_do($c->model('DB')->schema, sub {
$newJobset = $jobset->project->jobsets->create(
{ name => $newJobsetName
, description => $jobset->description
, nixexprpath => $jobset->nixexprpath
, nixexprinput => $jobset->nixexprinput
, enabled => 0
, enableemail => $jobset->enableemail
, emailoverride => $jobset->emailoverride || ""
});
foreach my $input ($jobset->jobsetinputs) {
my $newinput = $newJobset->jobsetinputs->create({name => $input->name, type => $input->type});
foreach my $inputalt ($input->jobsetinputalts) {
$newinput->jobsetinputalts->create({altnr => $inputalt->altnr, value => $inputalt->value});
}
}
});
$c->res->redirect($c->uri_for($c->controller('Jobset')->action_for("edit"), [$jobset->project->name, $newJobsetName]));
$c->stash->{template} = 'edit-jobset.tt';
$c->stash->{clone} = 1;
$c->stash->{totalShares} = getTotalShares($c->model('DB')->schema);
}
@ -428,24 +274,7 @@ sub evals_GET {
my $offset = ($page - 1) * $resultsPerPage;
$c->stash->{evals} = getEvals($self, $c, $evals, $offset, $resultsPerPage);
my %entity = (
evals => [ $evals->search({ 'me.hasnewbuilds' => 1 }, {
columns => [
'me.hasnewbuilds',
'me.id',
'jobsetevalinputs.name',
'jobsetevalinputs.altnr',
'jobsetevalinputs.revision',
'jobsetevalinputs.type',
'jobsetevalinputs.uri',
'jobsetevalinputs.dependency',
'jobsetevalmembers.build',
],
join => [ 'jobsetevalinputs', 'jobsetevalmembers' ],
collapse => 1,
rows => $resultsPerPage,
offset => $offset,
order_by => "me.id DESC",
}) ],
evals => [ map { $_->{eval} } @{$c->stash->{evals}} ],
first => "?page=1",
last => "?page=" . POSIX::ceil($c->stash->{total}/$resultsPerPage)
);

View File

@ -26,6 +26,9 @@ sub view : Chained('eval') PathPart('') Args(0) {
my $eval = $c->stash->{eval};
$c->stash->{filter} = $c->request->params->{filter} // "";
my $filter = $c->stash->{filter} eq "" ? {} : { job => { ilike => "%" . $c->stash->{filter} . "%" } };
my $compare = $c->req->params->{compare};
my $eval2;
@ -36,6 +39,11 @@ sub view : Chained('eval') PathPart('') Args(0) {
if ($compare =~ /^\d+$/) {
$eval2 = $c->model('DB::JobsetEvals')->find($compare)
or notFound($c, "Evaluation $compare doesn't exist.");
} elsif ($compare =~ /^-(\d+)$/) {
my $t = int($1);
$eval2 = $c->stash->{jobset}->jobsetevals->find(
{ hasnewbuilds => 1, timestamp => {'<=', $eval->timestamp - $t} },
{ order_by => "timestamp desc", rows => 1});
} elsif (defined $compare && $compare =~ /^($jobsetNameRE)$/) {
my $j = $c->stash->{project}->jobsets->find({name => $compare})
or notFound($c, "Jobset $compare doesn't exist.");
@ -51,10 +59,17 @@ sub view : Chained('eval') PathPart('') Args(0) {
$c->stash->{otherEval} = $eval2 if defined $eval2;
my @builds = $eval->builds->search({}, { order_by => ["job", "system", "id"], columns => [@buildListColumns] });
my @builds2 = defined $eval2
? $eval2->builds->search({}, { order_by => ["job", "system", "id"], columns => [@buildListColumns] })
: ();
sub cmpBuilds {
my ($a, $b) = @_;
return $a->get_column('job') cmp $b->get_column('job')
|| $a->get_column('system') cmp $b->get_column('system')
}
my @builds = $eval->builds->search($filter, { columns => [@buildListColumns] });
my @builds2 = defined $eval2 ? $eval2->builds->search($filter, { columns => [@buildListColumns] }) : ();
@builds = sort { cmpBuilds($a, $b) } @builds;
@builds2 = sort { cmpBuilds($a, $b) } @builds2;
$c->stash->{stillSucceed} = [];
$c->stash->{stillFail} = [];
@ -63,15 +78,19 @@ sub view : Chained('eval') PathPart('') Args(0) {
$c->stash->{new} = [];
$c->stash->{removed} = [];
$c->stash->{unfinished} = [];
$c->stash->{aborted} = [];
my $n = 0;
foreach my $build (@builds) {
if ($build->finished != 0 && ($build->buildstatus == 3 || $build->buildstatus == 4)) {
push @{$c->stash->{aborted}}, $build;
next;
}
my $d;
my $found = 0;
while ($n < scalar(@builds2)) {
my $build2 = $builds2[$n];
my $d = $build->get_column('job') cmp $build2->get_column('job')
|| $build->get_column('system') cmp $build2->get_column('system');
my $d = cmpBuilds($build, $build2);
last if $d == -1;
if ($d == 0) {
$n++;
@ -135,6 +154,25 @@ sub release : Chained('eval') PathPart('release') Args(0) {
}
sub cancel : Chained('eval') PathPart('cancel') Args(0) {
my ($self, $c) = @_;
requireProjectOwner($c, $c->stash->{eval}->project);
my $n = cancelBuilds($c->model('DB')->schema, $c->stash->{eval}->builds);
$c->flash->{successMsg} = "$n builds have been cancelled.";
$c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for('view'), $c->req->captures));
}
sub restart_aborted : Chained('eval') PathPart('restart-aborted') Args(0) {
my ($self, $c) = @_;
requireProjectOwner($c, $c->stash->{eval}->project);
my $builds = $c->stash->{eval}->builds->search({ finished => 1, buildstatus => { -in => [3, 4] } });
my $n = restartBuilds($c->model('DB')->schema, $builds);
$c->flash->{successMsg} = "$n builds have been restarted.";
$c->res->redirect($c->uri_for($c->controller('JobsetEval')->action_for('view'), $c->req->captures));
}
# Hydra::Base::Controller::NixChannel needs this.
sub nix : Chained('eval') PathPart('channel') CaptureArgs(0) {
my ($self, $c) = @_;
@ -144,8 +182,20 @@ sub nix : Chained('eval') PathPart('channel') CaptureArgs(0) {
->search({ finished => 1, buildstatus => 0 },
{ columns => [@buildListColumns, 'drvpath', 'description', 'homepage']
, join => ["buildoutputs"]
, order_by => ["build.id", "buildoutputs.name"]
, '+select' => ['buildoutputs.path', 'buildoutputs.name'], '+as' => ['outpath', 'outname'] });
}
sub job : Chained('eval') PathPart('job') {
my ($self, $c, $job, @rest) = @_;
my $build = $c->stash->{eval}->builds->find({job => $job});
notFound($c, "This evaluation has no job with the specified name.") unless defined $build;
$c->res->redirect($c->uri_for($c->controller('Build')->action_for("build"), [$build->id], @rest));
}
1;

View File

@ -1,5 +1,6 @@
package Hydra::Controller::Project;
use utf8;
use strict;
use warnings;
use base 'Hydra::Base::Controller::ListBuilds';
@ -9,35 +10,15 @@ use Hydra::Helper::CatalystUtils;
sub projectChain :Chained('/') :PathPart('project') :CaptureArgs(1) {
my ($self, $c, $projectName) = @_;
$c->stash->{params}->{name} //= $projectName;
my $project = $c->model('DB::Projects')->find($projectName, { columns => [
"me.name",
"me.displayName",
"me.description",
"me.enabled",
"me.hidden",
"me.homepage",
"owner.username",
"owner.fullname",
"views.name",
"releases.name",
"releases.timestamp",
"jobsets.name",
], join => [ 'owner', 'views', 'releases', 'jobsets' ], order_by => { -desc => "releases.timestamp" }, collapse => 1 });
$c->stash->{project} = $c->model('DB::Projects')->find($projectName, {
join => [ 'releases' ],
order_by => { -desc => "releases.timestamp" },
});
if ($project) {
$c->stash->{project} = $project;
} else {
if ($c->action->name eq "project" and $c->request->method eq "PUT") {
$c->stash->{projectName} = $projectName;
} else {
$self->status_not_found(
$c,
message => "Project $projectName doesn't exist."
);
$c->detach;
}
}
notFound($c, "Project $projectName doesn't exist.")
if !$c->stash->{project} && !($c->action->name eq "project" and $c->request->method eq "PUT");
}
@ -53,55 +34,27 @@ sub project_GET {
$c->stash->{releases} = [$c->stash->{project}->releases->search({},
{order_by => ["timestamp DESC"]})];
$self->status_ok(
$c,
entity => $c->stash->{project}
);
$self->status_ok($c, entity => $c->stash->{project});
}
sub project_PUT {
my ($self, $c) = @_;
if (defined $c->stash->{project}) {
error($c, "Cannot rename project `$c->stash->{params}->{oldName}' over existing project `$c->stash->{project}->name") if defined $c->stash->{params}->{oldName};
requireProjectOwner($c, $c->stash->{project});
txn_do($c->model('DB')->schema, sub {
updateProject($c, $c->stash->{project});
});
if ($c->req->looks_like_browser) {
$c->res->redirect($c->uri_for($self->action_for("project"), [$c->stash->{project}->name]) . "#tabs-configuration");
} else {
$self->status_no_content($c);
}
} elsif (defined $c->stash->{params}->{oldName}) {
my $project = $c->model('DB::Projects')->find($c->stash->{params}->{oldName});
if (defined $project) {
requireProjectOwner($c, $project);
txn_do($c->model('DB')->schema, sub {
updateProject($c, $project);
});
my $uri = $c->uri_for($self->action_for("project"), [$c->stash->{project}->name]) . "#tabs-configuration";
$self->status_ok($c, entity => { redirect => "$uri" });
my $uri = $c->uri_for($self->action_for("project"), [$project->name]);
$c->flash->{successMsg} = "The project configuration has been updated.";
}
if ($c->req->looks_like_browser) {
$c->res->redirect($uri . "#tabs-configuration");
} else {
$self->status_created(
$c,
location => "$uri",
entity => { name => $project->name, uri => "$uri", type => "project" }
);
}
} else {
$self->status_not_found(
$c,
message => "Project $c->stash->{params}->{oldName} doesn't exist."
);
}
} else {
else {
requireMayCreateProjects($c);
error($c, "Invalid project name: $c->stash->{projectName}") if $c->stash->{projectName} !~ /^$projectNameRE$/;
my $project;
txn_do($c->model('DB')->schema, sub {
@ -110,23 +63,34 @@ sub project_PUT {
# valid. Idem for the owner.
my $owner = $c->user->username;
$project = $c->model('DB::Projects')->create(
{name => $c->stash->{projectName}, displayname => "", owner => $owner});
{ name => ".tmp", displayname => "", owner => $owner });
updateProject($c, $project);
});
my $uri = $c->uri_for($self->action_for("project"), [$project->name]);
if ($c->req->looks_like_browser) {
$c->res->redirect($uri . "#tabs-configuration");
} else {
$self->status_created(
$c,
location => "$uri",
entity => { name => $project->name, uri => "$uri", type => "project" }
);
}
$self->status_created($c,
location => "$uri",
entity => { name => $project->name, uri => "$uri", redirect => "$uri", type => "project" });
}
}
sub project_DELETE {
my ($self, $c) = @_;
requireProjectOwner($c, $c->stash->{project});
txn_do($c->model('DB')->schema, sub {
$c->stash->{project}->jobsetevals->delete;
$c->stash->{project}->builds->delete;
$c->stash->{project}->delete;
});
my $uri = $c->res->redirect($c->uri_for("/"));
$self->status_ok($c, entity => { redirect => "$uri" });
$c->flash->{successMsg} = "The project has been deleted.";
}
sub edit : Chained('projectChain') PathPart Args(0) {
my ($self, $c) = @_;
@ -138,36 +102,10 @@ sub edit : Chained('projectChain') PathPart Args(0) {
}
sub submit : Chained('projectChain') PathPart Args(0) {
my ($self, $c) = @_;
requirePost($c);
if (($c->request->params->{submit} // "") eq "delete") {
txn_do($c->model('DB')->schema, sub {
$c->stash->{project}->jobsetevals->delete_all;
$c->stash->{project}->builds->delete_all;
$c->stash->{project}->delete;
});
return $c->res->redirect($c->uri_for("/"));
}
my $newName = trim $c->stash->{params}->{name};
my $oldName = trim $c->stash->{project}->name;
unless ($oldName eq $newName) {
$c->stash->{params}->{oldName} = $oldName;
$c->stash->{projectName} = $newName;
undef $c->stash->{project};
}
project_PUT($self, $c);
}
sub requireMayCreateProjects {
my ($c) = @_;
requireLogin($c) if !$c->user_exists;
error($c, "Only administrators or authorised users can perform this operation.")
requireUser($c);
accessDenied($c, "Only administrators or authorised users can perform this operation.")
unless $c->check_user_roles('admin') || $c->check_user_roles('create-projects');
}
@ -183,15 +121,6 @@ sub create : Path('/create-project') {
}
sub create_submit : Path('/create-project/submit') {
my ($self, $c) = @_;
$c->stash->{projectName} = trim $c->stash->{params}->{name};
project_PUT($self, $c);
}
sub create_jobset : Chained('projectChain') PathPart('create-jobset') Args(0) {
my ($self, $c) = @_;
@ -200,15 +129,7 @@ sub create_jobset : Chained('projectChain') PathPart('create-jobset') Args(0) {
$c->stash->{template} = 'edit-jobset.tt';
$c->stash->{create} = 1;
$c->stash->{edit} = 1;
}
sub create_jobset_submit : Chained('projectChain') PathPart('create-jobset/submit') Args(0) {
my ($self, $c) = @_;
$c->stash->{jobsetName} = trim $c->stash->{params}->{name};
Hydra::Controller::Jobset::jobset_PUT($self, $c);
$c->stash->{totalShares} = getTotalShares($c->model('DB')->schema);
}
@ -218,15 +139,18 @@ sub updateProject {
my $owner = $project->owner;
if ($c->check_user_roles('admin') and defined $c->stash->{params}->{owner}) {
$owner = trim $c->stash->{params}->{owner};
error($c, "Invalid owner: $owner")
unless defined $c->model('DB::Users')->find({username => $owner});
error($c, "The user name $owner does not exist.")
unless defined $c->model('DB::Users')->find($owner);
}
my $projectName = $c->stash->{projectName} or $project->name;
error($c, "Invalid project name: $projectName") if $projectName !~ /^$projectNameRE$/;
my $projectName = $c->stash->{params}->{name};
error($c, "Invalid project identifier $projectName.") if $projectName !~ /^$projectNameRE$/;
error($c, "Cannot rename project to $projectName since that identifier is already taken.")
if $projectName ne $project->name && defined $c->model('DB::Projects')->find($projectName);
my $displayName = trim $c->stash->{params}->{displayname};
error($c, "Invalid display name: $displayName") if $displayName eq "";
error($c, "You must specify a display name.") if $displayName eq "";
$project->update(
{ name => $projectName
@ -244,10 +168,6 @@ sub updateProject {
sub get_builds : Chained('projectChain') PathPart('') CaptureArgs(0) {
my ($self, $c) = @_;
$c->stash->{allBuilds} = $c->stash->{project}->builds;
$c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatusForProject')
->search({}, {bind => [$c->stash->{project}->name]});
$c->stash->{allJobsets} = $c->stash->{project}->jobsets;
$c->stash->{allJobs} = $c->stash->{project}->jobs;
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceededForProject')
->search({}, {bind => [$c->stash->{project}->name]});
$c->stash->{channelBaseName} = $c->stash->{project}->name;

View File

@ -38,7 +38,7 @@ sub updateRelease {
, description => trim $c->request->params->{description}
});
$release->releasemembers->delete_all;
$release->releasemembers->delete;
foreach my $param (keys %{$c->request->params}) {
next unless $param =~ /^member-(\d+)-description$/;
my $buildId = $1;
@ -72,7 +72,7 @@ sub submit : Chained('release') PathPart('submit') Args(0) {
txn_do($c->model('DB')->schema, sub {
updateRelease($c, $c->stash->{release});
});
$c->res->redirect($c->uri_for($self->action_for("project"),
$c->res->redirect($c->uri_for($self->action_for("view"),
[$c->stash->{project}->name, $c->stash->{release}->name]));
}
}

View File

@ -8,6 +8,7 @@ use Hydra::Helper::CatalystUtils;
use Digest::SHA1 qw(sha1_hex);
use Nix::Store;
use Nix::Config;
use Encode;
# Put this controller at top-level.
__PACKAGE__->config->{namespace} = '';
@ -33,6 +34,7 @@ sub begin :Private {
$c->stash->{inputTypes} = {
'string' => 'String value',
'boolean' => 'Boolean',
'nix' => 'Nix expression',
'build' => 'Build output',
'sysbuild' => 'Build output (same system)'
};
@ -54,12 +56,8 @@ sub index :Path :Args(0) {
$c->stash->{template} = 'overview.tt';
$c->stash->{projects} = [$c->model('DB::Projects')->search(isAdmin($c) ? {} : {hidden => 0}, {order_by => 'name'})];
$c->stash->{newsItems} = [$c->model('DB::NewsItems')->search({}, { order_by => ['createtime DESC'], rows => 5 })];
$self->status_ok(
$c,
entity => [$c->model('DB::Projects')->search(isAdmin($c) ? {} : {hidden => 0}, {
order_by => 'name',
columns => [ 'name', 'displayname' ]
})]
$self->status_ok($c,
entity => $c->stash->{projects}
);
}
@ -72,8 +70,7 @@ sub queue_GET {
$c->stash->{flashMsg} //= $c->flash->{buildMsg};
$self->status_ok(
$c,
entity => [$c->model('DB::Builds')->search(
{finished => 0}, { join => ['project'], order_by => ["priority DESC", "id"], columns => [@buildListColumns], '+select' => ['project.enabled'], '+as' => ['enabled'] })]
entity => [$c->model('DB::Builds')->search({finished => 0}, { order_by => ["priority DESC", "id"]})]
);
}
@ -100,22 +97,7 @@ sub status_GET {
$c,
entity => [ $c->model('DB::BuildSteps')->search(
{ 'me.busy' => 1, 'build.finished' => 0, 'build.busy' => 1 },
{ join => { build => [ 'project', 'job', 'jobset' ] },
columns => [
'me.machine',
'me.system',
'me.stepnr',
'me.drvpath',
'me.starttime',
'build.id',
{
'build.project.name' => 'project.name',
'build.jobset.name' => 'jobset.name',
'build.job.name' => 'job.name'
}
],
order_by => [ 'machine' ]
}
{ order_by => [ 'machine' ], join => [ 'build' ] }
) ]
);
}
@ -150,11 +132,9 @@ sub machines :Local Args(0) {
sub get_builds : Chained('/') PathPart('') CaptureArgs(0) {
my ($self, $c) = @_;
$c->stash->{allBuilds} = $c->model('DB::Builds');
$c->stash->{jobStatus} = $c->model('DB')->resultset('JobStatus');
$c->stash->{allJobsets} = $c->model('DB::Jobsets');
$c->stash->{allJobs} = $c->model('DB::Jobs');
$c->stash->{latestSucceeded} = $c->model('DB')->resultset('LatestSucceeded');
$c->stash->{channelBaseName} = "everything";
$c->stash->{total} = $c->model('DB::NrBuilds')->find('finished')->count;
}
@ -213,35 +193,32 @@ sub default :Path {
sub end : ActionClass('RenderView') {
my ($self, $c) = @_;
my @errors = map { encode_utf8($_); } @{$c->error};
if (defined $c->stash->{json}) {
if (scalar @{$c->error}) {
$c->stash->{json}->{error} = join "\n", @{$c->error};
if (scalar @errors) {
$c->stash->{json}->{error} = join "\n", @errors;
$c->clear_errors;
}
$c->forward('View::JSON');
}
if (scalar @{$c->error}) {
$c->stash->{resource} = { errors => "$c->error" };
elsif (scalar @{$c->error}) {
$c->stash->{resource} = { error => join "\n", @{$c->error} };
$c->stash->{template} = 'error.tt';
$c->stash->{errors} = $c->error;
$c->stash->{errors} = [@errors];
$c->response->status(500) if $c->response->status == 200;
if ($c->response->status >= 300) {
$c->stash->{httpStatus} =
$c->response->status . " " . HTTP::Status::status_message($c->response->status);
}
$c->clear_errors;
} elsif (defined $c->stash->{resource} and
(ref $c->stash->{resource} eq ref {}) and
defined $c->stash->{resource}->{error}) {
$c->stash->{template} = 'error.tt';
$c->stash->{httpStatus} =
$c->response->status . " " . HTTP::Status::status_message($c->response->status);
}
$c->forward('serialize');
$c->forward('serialize') if defined $c->stash->{resource};
}
sub serialize : ActionClass('Serialize') { }
@ -282,6 +259,7 @@ sub narinfo :LocalRegex('^([a-z0-9]+).narinfo$') :Args(0) {
my $path = queryPathFromHashPart($hash);
if (!$path) {
$c->response->status(404);
$c->response->content_type('text/plain');
$c->stash->{plain}->{data} = "does not exist\n";
$c->forward('Hydra::View::Plain');

View File

@ -182,15 +182,11 @@ sub currentUser :Path('/current-user') :ActionClass('REST') { }
sub currentUser_GET {
my ($self, $c) = @_;
requireLogin($c) if !$c->user_exists;
requireUser($c);
$self->status_ok(
$c,
entity => $c->model('DB::Users')->find({ 'me.username' => $c->user->username}, {
columns => [ "me.fullname", "me.emailaddress", "me.username", "userroles.role" ]
, join => [ "userroles" ]
, collapse => 1
})
entity => $c->model("DB::Users")->find($c->user->username)
);
}
@ -198,9 +194,9 @@ sub currentUser_GET {
sub user :Chained('/') PathPart('user') CaptureArgs(1) {
my ($self, $c, $userName) = @_;
requireLogin($c) if !$c->user_exists;
requireUser($c);
error($c, "You do not have permission to edit other users.")
accessDenied($c, "You do not have permission to edit other users.")
if $userName ne $c->user->username && !isAdmin($c);
$c->stash->{user} = $c->model('DB::Users')->find($userName)
@ -287,7 +283,7 @@ sub edit_POST {
}
if (isAdmin($c)) {
$user->userroles->delete_all;
$user->userroles->delete;
$user->userroles->create({ role => $_})
foreach paramToList($c, "roles");
}
@ -303,4 +299,19 @@ sub edit_POST {
}
sub dashboard :Chained('user') :Args(0) {
my ($self, $c) = @_;
$c->stash->{template} = 'dashboard.tt';
# Get the N most recent builds for each starred job.
$c->stash->{starredJobs} = [];
foreach my $j ($c->stash->{user}->starredjobs->search({}, { order_by => ['project', 'jobset', 'job'] })) {
my @builds = $j->job->builds->search(
{ },
{ rows => 20, order_by => "id desc" });
push $c->stash->{starredJobs}, { job => $j->job, builds => [@builds] };
}
}
1;

View File

@ -41,7 +41,7 @@ sub updateView {
{ name => $viewName
, description => trim $c->request->params->{description} });
$view->viewjobs->delete_all;
$view->viewjobs->delete;
foreach my $param (keys %{$c->request->params}) {
next unless $param =~ /^job-(\d+)-name$/;

View File

@ -2,6 +2,7 @@ package Hydra::Helper::AddBuilds;
use strict;
use feature 'switch';
use utf8;
use XML::Simple;
use IPC::Run;
use Nix::Store;
@ -15,6 +16,7 @@ use File::Path;
use File::Temp;
use File::Spec;
use File::Slurp;
use Hydra::Helper::PluginHooks;
our @ISA = qw(Exporter);
our @EXPORT = qw(
@ -86,10 +88,7 @@ sub fetchInputBuild {
{ order_by => "me.id DESC", rows => 1
, where => \ attrsToSQL($attrs, "me.id") });
if (!defined $prevBuild || !isValidPath(getMainOutput($prevBuild)->path)) {
print STDERR "input `", $name, "': no previous build available\n";
return undef;
}
return () if !defined $prevBuild || !isValidPath(getMainOutput($prevBuild)->path);
#print STDERR "input `", $name, "': using build ", $prevBuild->id, "\n";
@ -148,9 +147,8 @@ sub fetchInputSystemBuild {
return @inputs;
}
sub fetchInput {
my ($plugins, $db, $project, $jobset, $name, $type, $value) = @_;
my ($plugins, $db, $project, $jobset, $name, $type, $value, $emailresponsible) = @_;
my @inputs;
if ($type eq "build") {
@ -159,7 +157,7 @@ sub fetchInput {
elsif ($type eq "sysbuild") {
@inputs = fetchInputSystemBuild($db, $project, $jobset, $name, $value);
}
elsif ($type eq "string") {
elsif ($type eq "string" || $type eq "nix") {
die unless defined $value;
@inputs = { value => $value };
}
@ -170,7 +168,7 @@ sub fetchInput {
else {
my $found = 0;
foreach my $plugin (@{$plugins}) {
@inputs = $plugin->fetchInput($type, $name, $value);
@inputs = $plugin->fetchInput($type, $name, $value, $project, $jobset);
if (defined $inputs[0]) {
$found = 1;
last;
@ -179,7 +177,10 @@ sub fetchInput {
die "input `$name' has unknown type `$type'." unless $found;
}
$_->{type} = $type foreach @inputs;
foreach my $input (@inputs) {
$input->{type} = $type;
$input->{emailresponsible} = $emailresponsible;
}
return @inputs;
}
@ -243,6 +244,9 @@ sub inputsToArgs {
when ("boolean") {
push @res, "--arg", $input, booleanToString($exprType, $alt->{value});
}
when ("nix") {
push @res, "--arg", $input, $alt->{value};
}
default {
push @res, "--arg", $input, buildInputToString($exprType, $alt);
}
@ -287,17 +291,25 @@ sub evalJobs {
my $validJob = 1;
foreach my $arg (@{$job->{arg}}) {
my $input = $inputInfo->{$arg->{name}}->[$arg->{altnr}];
if ($input->{type} eq "sysbuild" && $input->{system} ne $job->{system}) {
$validJob = 0;
}
}
if ($validJob) {
push(@filteredJobs, $job);
$validJob = 0 if $input->{type} eq "sysbuild" && $input->{system} ne $job->{system};
}
push(@filteredJobs, $job) if $validJob;
}
$jobs->{job} = \@filteredJobs;
return ($jobs, $nixExprInput);
my %jobNames;
my $errors;
foreach my $job (@{$jobs->{job}}) {
$jobNames{$job->{jobName}}++;
if ($jobNames{$job->{jobName}} == 2) {
$errors .= "warning: there are multiple jobs named $job->{jobName}; support for this will go away soon!\n\n";
}
}
# Handle utf-8 characters in error messages. No idea why this works.
utf8::decode($_->{msg}) foreach @{$jobs->{error}};
return ($jobs, $nixExprInput, $errors);
}
@ -389,7 +401,7 @@ sub getPrevJobsetEval {
# Check whether to add the build described by $buildInfo.
sub checkBuild {
my ($db, $project, $jobset, $inputInfo, $nixExprInput, $buildInfo, $buildIds, $prevEval, $jobOutPathMap) = @_;
my ($db, $jobset, $inputInfo, $nixExprInput, $buildInfo, $buildMap, $prevEval, $jobOutPathMap, $plugins) = @_;
my @outputNames = sort keys %{$buildInfo->{output}};
die unless scalar @outputNames;
@ -410,9 +422,7 @@ sub checkBuild {
my $build;
txn_do($db, sub {
my $job = $jobset->jobs->update_or_create(
{ name => $jobName
});
my $job = $jobset->jobs->update_or_create({ name => $jobName });
# Don't add a build that has already been scheduled for this
# job, or has been built but is still a "current" build for
@ -433,19 +443,19 @@ sub checkBuild {
# semantically unnecessary (because they're implied by
# the eval), but they give a factor 1000 speedup on
# the Nixpkgs jobset with PostgreSQL.
{ project => $project->name, jobset => $jobset->name, job => $job->name,
{ project => $jobset->project->name, jobset => $jobset->name, job => $jobName,
name => $firstOutputName, path => $firstOutputPath },
{ rows => 1, columns => ['id'], join => ['buildoutputs'] });
if (defined $prevBuild) {
print STDERR " already scheduled/built as build ", $prevBuild->id, "\n";
$buildIds->{$prevBuild->id} = 0;
$buildMap->{$prevBuild->id} = { id => $prevBuild->id, jobName => $jobName, new => 0, drvPath => $drvPath };
return;
}
}
# Prevent multiple builds with the same (job, outPath) from
# being added.
my $prev = $$jobOutPathMap{$job->name . "\t" . $firstOutputPath};
my $prev = $$jobOutPathMap{$jobName . "\t" . $firstOutputPath};
if (defined $prev) {
print STDERR " already scheduled as build ", $prev, "\n";
return;
@ -511,12 +521,13 @@ sub checkBuild {
$build->buildoutputs->create({ name => $_, path => $buildInfo->{output}->{$_}->{path} })
foreach @outputNames;
$buildIds->{$build->id} = 1;
$$jobOutPathMap{$job->name . "\t" . $firstOutputPath} = $build->id;
$buildMap->{$build->id} = { id => $build->id, jobName => $jobName, new => 1, drvPath => $drvPath };
$$jobOutPathMap{$jobName . "\t" . $firstOutputPath} = $build->id;
if ($build->iscachedbuild) {
print STDERR " marked as cached build ", $build->id, "\n";
addBuildProducts($db, $build);
notifyBuildFinished($plugins, $build, []);
} else {
print STDERR " added to queue as build ", $build->id, "\n";
}
@ -545,6 +556,7 @@ sub checkBuild {
, uri => $input->{uri}
, revision => $input->{revision}
, value => $input->{value}
, emailresponsible => $input->{emailresponsible}
, dependency => $input->{id}
, path => $input->{storePath} || "" # !!! temporary hack
, sha256hash => $input->{sha256hash}
@ -556,29 +568,4 @@ sub checkBuild {
};
sub restartBuild {
my ($db, $build) = @_;
txn_do($db, sub {
my @paths;
push @paths, $build->drvpath;
push @paths, $_->drvpath foreach $build->buildsteps;
my $r = `nix-store --clear-failed-paths @paths`;
$build->update(
{ finished => 0
, busy => 0
, locker => ""
, iscachedbuild => 0
});
$build->buildproducts->delete_all;
# Reset the stats for the evals to which this build belongs.
# !!! Should do this in a trigger.
foreach my $m ($build->jobsetevalmembers->all) {
$m->eval->update({nrsucceeded => undef});
}
});
}
1;

View File

@ -15,8 +15,8 @@ use feature qw/switch/;
our @ISA = qw(Exporter);
our @EXPORT = qw(
getBuild getPreviousBuild getNextBuild getPreviousSuccessfulBuild
error notFound
requireLogin requireProjectOwner requireAdmin requirePost isAdmin isProjectOwner
error notFound accessDenied
forceLogin requireUser requireProjectOwner requireAdmin requirePost isAdmin isProjectOwner
trim
getLatestFinishedEval
sendEmail
@ -27,6 +27,7 @@ our @EXPORT = qw(
parseJobsetName
showJobName
showStatus
getResponsibleAuthors
);
@ -102,6 +103,12 @@ sub notFound {
}
sub accessDenied {
my ($c, $msg) = @_;
error($c, $msg, 403);
}
sub backToReferer {
my ($c) = @_;
$c->response->redirect($c->session->{referer} || $c->uri_for('/'));
@ -110,26 +117,33 @@ sub backToReferer {
}
sub requireLogin {
sub forceLogin {
my ($c) = @_;
$c->session->{referer} = $c->request->uri;
error($c, "This page requires you to sign in.", 403);
accessDenied($c, "This page requires you to sign in.");
}
sub requireUser {
my ($c) = @_;
forceLogin($c) if !$c->user_exists;
}
sub isProjectOwner {
my ($c, $project) = @_;
return $c->user_exists && ($c->check_user_roles('admin') || $c->user->username eq $project->owner->username || defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username }));
return
$c->user_exists &&
(isAdmin($c) ||
$c->user->username eq $project->owner->username ||
defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username }));
}
sub requireProjectOwner {
my ($c, $project) = @_;
requireLogin($c) if !$c->user_exists;
error($c, "Only the project members or administrators can perform this operation.", 403)
requireUser($c);
accessDenied($c, "Only the project members or administrators can perform this operation.")
unless isProjectOwner($c, $project);
}
@ -142,8 +156,8 @@ sub isAdmin {
sub requireAdmin {
my ($c) = @_;
requireLogin($c) if !$c->user_exists;
error($c, "Only administrators can perform this operation.", 403)
requireUser($c);
accessDenied($c, "Only administrators can perform this operation.")
unless isAdmin($c);
}
@ -206,12 +220,12 @@ sub paramToList {
# Security checking of filenames.
Readonly our $pathCompRE => "(?:[A-Za-z0-9-\+\._\$][A-Za-z0-9-\+\._\$]*)";
Readonly our $pathCompRE => "(?:[A-Za-z0-9-\+\._\$][A-Za-z0-9-\+\._\$:]*)";
Readonly our $relPathRE => "(?:$pathCompRE(?:/$pathCompRE)*)";
Readonly our $relNameRE => "(?:[A-Za-z0-9-_][A-Za-z0-9-\._]*)";
Readonly our $attrNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
Readonly our $projectNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
Readonly our $jobsetNameRE => "(?:[A-Za-z_][A-Za-z0-9-_]*)";
Readonly our $jobsetNameRE => "(?:[A-Za-z_][A-Za-z0-9-_\.]*)";
Readonly our $jobNameRE => "(?:$attrNameRE(?:\\.$attrNameRE)*)";
Readonly our $systemRE => "(?:[a-z0-9_]+-[a-z0-9_]+)";
Readonly our $userNameRE => "(?:[a-z][a-z0-9_\.]*)";
@ -246,4 +260,42 @@ sub showStatus {
}
# Determine who broke/fixed the build.
sub getResponsibleAuthors {
my ($build, $plugins) = @_;
my $prevBuild = getPreviousBuild($build);
my $nrCommits = 0;
my %authors;
my @emailable_authors;
if ($prevBuild) {
foreach my $curInput ($build->buildinputs_builds) {
next unless ($curInput->type eq "git" || $curInput->type eq "hg");
my $prevInput = $prevBuild->buildinputs_builds->find({ name => $curInput->name });
next unless defined $prevInput;
next if $curInput->type ne $prevInput->type;
next if $curInput->uri ne $prevInput->uri;
next if $curInput->revision eq $prevInput->revision;
my @commits;
foreach my $plugin (@{$plugins}) {
push @commits, @{$plugin->getCommits($curInput->type, $curInput->uri, $prevInput->revision, $curInput->revision)};
}
foreach my $commit (@commits) {
#print STDERR "$commit->{revision} by $commit->{author}\n";
$authors{$commit->{author}} = $commit->{email};
push @emailable_authors, $commit->{email} if $curInput->emailresponsible;
$nrCommits++;
}
}
}
return (\%authors, $nrCommits, \@emailable_authors);
}
1;

View File

@ -7,6 +7,7 @@ use File::Basename;
use Config::General;
use Hydra::Helper::CatalystUtils;
use Hydra::Model::DB;
use Nix::Store;
our @ISA = qw(Exporter);
our @EXPORT = qw(
@ -16,11 +17,13 @@ our @EXPORT = qw(
getPrimaryBuildsForView
getPrimaryBuildTotal
getViewResult getLatestSuccessfulViewResult
jobsetOverview removeAsciiEscapes getDrvLogPath logContents
jobsetOverview removeAsciiEscapes getDrvLogPath findLog logContents
getMainOutput
getEvals getMachines
pathIsInsidePrefix
captureStdoutStderr);
captureStdoutStderr run grab
getTotalShares
cancelBuilds restartBuilds);
sub getHydraHome {
@ -42,11 +45,12 @@ sub getHydraConfig {
# doesn't work.
sub txn_do {
my ($db, $coderef) = @_;
my $res;
while (1) {
eval {
$db->txn_do($coderef);
$res = $db->txn_do($coderef);
};
last if !$@;
return $res if !$@;
die $@ unless $@ =~ "database is locked";
}
}
@ -253,21 +257,46 @@ sub getLatestSuccessfulViewResult {
sub getDrvLogPath {
my ($drvPath) = @_;
my $base = basename $drvPath;
my $fn =
($ENV{NIX_LOG_DIR} || "/nix/var/log/nix") . "/drvs/"
. substr($base, 0, 2) . "/"
. substr($base, 2);
return $fn if -f $fn;
$fn .= ".bz2";
return $fn if -f $fn;
my $bucketed = substr($base, 0, 2) . "/" . substr($base, 2);
my $fn = ($ENV{NIX_LOG_DIR} || "/nix/var/log/nix") . "/drvs/";
for ($fn . $bucketed . ".bz2", $fn . $bucketed, $fn . $base . ".bz2", $fn . $base) {
return $_ if (-f $_);
}
return undef;
}
# Find the log of the derivation denoted by $drvPath. It it doesn't
# exist, try other derivations that produced its outputs (@outPaths).
sub findLog {
my ($c, $drvPath, @outPaths) = @_;
if (defined $drvPath) {
my $logPath = getDrvLogPath($drvPath);
return $logPath if defined $logPath;
}
return undef if scalar @outPaths == 0;
my @steps = $c->model('DB::BuildSteps')->search(
{ path => { -in => [@outPaths] } },
{ select => ["drvpath"]
, distinct => 1
, join => "buildstepoutputs"
});
foreach my $step (@steps) {
next unless defined $step->drvpath;
my $logPath = getDrvLogPath($step->drvpath);
return $logPath if defined $logPath;
}
return undef;
}
sub logContents {
my ($drvPath, $tail) = @_;
my $logPath = getDrvLogPath($drvPath);
die unless defined $logPath;
my ($logPath, $tail) = @_;
my $cmd;
if ($logPath =~ /.bz2$/) {
$cmd = "bzip2 -d < $logPath";
@ -381,7 +410,7 @@ sub getEvals {
}
sub getMachines {
my $machinesConf = $ENV{"NIX_REMOTE_SYSTEMS"} || "/etc/nix.machines";
my $machinesConf = $ENV{"NIX_REMOTE_SYSTEMS"} || "/etc/nix/machines";
# Read the list of machines.
my %machines = ();
@ -472,4 +501,102 @@ sub captureStdoutStderr {
}
sub run {
my (%args) = @_;
my $res = { stdout => "", stderr => "" };
my $stdin = "";
eval {
local $SIG{ALRM} = sub { die "timeout\n" }; # NB: \n required
alarm $args{timeout} if defined $args{timeout};
my @x = ($args{cmd}, \$stdin, \$res->{stdout});
push @x, \$res->{stderr} if $args{grabStderr} // 1;
IPC::Run::run(@x,
init => sub { chdir $args{dir} or die "changing to $args{dir}" if defined $args{dir}; });
alarm 0;
};
if ($@) {
die unless $@ eq "timeout\n"; # propagate unexpected errors
$res->{status} = -1;
$res->{stderr} = "timeout\n";
} else {
$res->{status} = $?;
chomp $res->{stdout} if $args{chomp} // 0;
}
return $res;
}
sub grab {
my (%args) = @_;
my $res = run(%args, grabStderr => 0);
die "command `@{$args{cmd}}' failed with exit status $res->{status}" if $res->{status};
return $res->{stdout};
}
sub getTotalShares {
my ($db) = @_;
return $db->resultset('Jobsets')->search(
{ 'project.enabled' => 1, 'me.enabled' => { '!=' => 0 } },
{ join => 'project', select => { sum => 'schedulingshares' }, as => 'sum' })->single->get_column('sum');
}
sub cancelBuilds($$) {
my ($db, $builds) = @_;
return txn_do($db, sub {
$builds = $builds->search({ finished => 0, busy => 0 });
my $n = $builds->count;
my $time = time();
$builds->update(
{ finished => 1,
, iscachedbuild => 0, buildstatus => 4 # = cancelled
, starttime => $time
, stoptime => $time
});
return $n;
});
}
sub restartBuilds($$) {
my ($db, $builds) = @_;
my $n = 0;
txn_do($db, sub {
my @paths;
$builds = $builds->search({ finished => 1 });
foreach my $build ($builds->all) {
next if !isValidPath($build->drvpath);
push @paths, $build->drvpath;
push @paths, $_->drvpath foreach $build->buildsteps;
registerRoot $build->drvpath;
$build->update(
{ finished => 0
, busy => 0
, locker => ""
, iscachedbuild => 0
});
$n++;
# Reset the stats for the evals to which this build belongs.
# !!! Should do this in a trigger.
$build->jobsetevals->update({nrsucceeded => undef});
}
# Clear Nix's negative failure cache.
# FIXME: Add this to the API.
system("nix-store", "--clear-failed-paths", @paths);
});
return $n;
}
1;

View File

@ -0,0 +1,22 @@
package Hydra::Helper::PluginHooks;
use strict;
use Exporter;
our @ISA = qw(Exporter);
our @EXPORT = qw(
notifyBuildFinished);
sub notifyBuildFinished {
my ($plugins, $build, $dependents) = @_;
foreach my $plugin (@{$plugins}) {
eval {
$plugin->buildFinished($build, $dependents);
};
if ($@) {
print STDERR "$plugin->buildFinished: $@\n";
}
}
}
1;

View File

@ -38,7 +38,7 @@ sub supportedInputTypes {
# Called to fetch an input of type $type. $value is the input
# location, typically the repository URL.
sub fetchInput {
my ($self, $type, $name, $value) = @_;
my ($self, $type, $name, $value, $project, $jobset) = @_;
return undef;
}

View File

@ -25,21 +25,8 @@ sub fetchInput {
my $stdout; my $stderr;
my $cacheDir = getSCMCacheDir . "/bzr";
mkpath($cacheDir);
my $clonePath = $cacheDir . "/" . sha256_hex($uri);
if (! -d $clonePath) {
(my $res, $stdout, $stderr) = captureStdoutStderr(600, "bzr", "branch", $uri, $clonePath);
die "error cloning bazaar branch at `$uri':\n$stderr" if $res;
}
chdir $clonePath or die $!;
(my $res, $stdout, $stderr) = captureStdoutStderr(600, "bzr", "pull");
die "error pulling latest change bazaar branch at `$uri':\n$stderr" if $res;
# First figure out the last-modified revision of the URI.
my @cmd = (["bzr", "revno"], "|", ["sed", 's/^ *\([0-9]*\).*/\1/']);
my @cmd = (["bzr", "revno", $uri], "|", ["sed", 's/^ *\([0-9]*\).*/\1/']);
IPC::Run::run(@cmd, \$stdout, \$stderr);
die "cannot get head revision of Bazaar branch at `$uri':\n$stderr" if $?;
@ -61,7 +48,7 @@ sub fetchInput {
$ENV{"NIX_PREFETCH_BZR_LEAVE_DOT_BZR"} = $type eq "bzr-checkout" ? "1" : "0";
(my $res, $stdout, $stderr) = captureStdoutStderr(600,
"nix-prefetch-bzr", $clonePath, $revision);
"nix-prefetch-bzr", $uri, $revision);
die "cannot check out Bazaar branch `$uri':\n$stderr" if $res;
($sha256, $storePath) = split ' ', $stdout;

View File

@ -0,0 +1,104 @@
package Hydra::Plugin::DarcsInput;
use strict;
use parent 'Hydra::Plugin';
use Digest::SHA qw(sha256_hex);
use File::Path;
use Hydra::Helper::Nix;
use Nix::Store;
sub supportedInputTypes {
my ($self, $inputTypes) = @_;
$inputTypes->{'darcs'} = 'Darcs checkout';
}
sub fetchInput {
my ($self, $type, $name, $uri) = @_;
return undef if $type ne "darcs";
my $timestamp = time;
my $sha256;
my $storePath;
my $revCount;
my $cacheDir = getSCMCacheDir . "/git";
mkpath($cacheDir);
my $clonePath = $cacheDir . "/" . sha256_hex($uri);
$uri =~ s|^file://||; # darcs wants paths, not file:// uris
my $stdout = ""; my $stderr = ""; my $res;
if (! -d $clonePath) {
# Clone the repository.
$res = run(timeout => 600,
cmd => ["darcs", "get", "--lazy", $uri, $clonePath],
dir => $ENV{"TMPDIR"});
die "Error getting darcs repo at `$uri':\n$stderr" if $res->{status};
}
# Update the repository to match $uri.
($res, $stdout, $stderr) = captureStdoutStderr(600,
("darcs", "pull", "-a", "--repodir", $clonePath, "$uri"));
die "Error fetching latest change from darcs repo at `$uri':\n$stderr" if $res;
($res, $stdout, $stderr) = captureStdoutStderr(600,
("darcs", "changes", "--last", "1", "--xml", "--repodir", $clonePath));
die "Error getting revision ID of darcs repo at `$uri':\n$stderr" if $res;
$stdout =~ /^<patch.*hash='([0-9a-fA-F-]+)'/sm; # sigh.
my $revision = $1;
die "Error obtaining revision from output: $stdout\nstderr = $stderr)" unless $revision =~ /^[0-9a-fA-F-]+$/;
die "Error getting a revision identifier at `$uri':\n$stderr" if $res;
# Some simple caching: don't check a uri/revision more than once.
my $cachedInput ;
($cachedInput) = $self->{db}->resultset('CachedDarcsInputs')->search(
{uri => $uri, revision => $revision},
{rows => 1});
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
$revision = $cachedInput->revision;
$revCount = $cachedInput->revcount;
} else {
# Then download this revision into the store.
print STDERR "checking out darcs repo $uri\n";
my $tmpDir = File::Temp->newdir("hydra-darcs-export.XXXXXX", CLEANUP => 1, TMPDIR => 1) or die;
(system "darcs", "get", "--lazy", $clonePath, "$tmpDir/export", "--quiet",
"--to-match", "hash $revision") == 0
or die "darcs export failed";
$revCount = `darcs changes --count --repodir $tmpDir/export`; chomp $revCount;
die "darcs changes --count failed" if $? != 0;
system "rm", "-rf", "$tmpDir/export/_darcs";
$storePath = addToStore("$tmpDir/export", 1, "sha256");
$sha256 = queryPathHash($storePath);
$sha256 =~ s/sha256://;
txn_do($self->{db}, sub {
$self->{db}->resultset('CachedDarcsInputs')->update_or_create(
{ uri => $uri
, revision => $revision
, revcount => $revCount
, sha256hash => $sha256
, storepath => $storePath
});
});
}
$revision =~ /^([0-9]+)/;
my $shortRev = $1;
return
{ uri => $uri
, storePath => $storePath
, sha256hash => $sha256
, revision => $revision
, revCount => int($revCount)
, shortRev => $shortRev
};
}
1;

View File

@ -28,6 +28,10 @@ The following dependent jobs also failed:
* [% showJobName(b) %] ([% baseurl %]/build/[% b.id %])
[% END -%]
[% END -%]
[% IF nrCommits > 0 -%]
This is likely due to [% IF nrCommits > 1 -%][% nrCommits %] commits by [% END -%][% authorList %].
[% END -%]
[% IF build.buildstatus == 0 -%]
Yay!
@ -66,7 +70,7 @@ sub buildFinished {
my $to = $b->jobset->emailoverride ne "" ? $b->jobset->emailoverride : $b->maintainers;
foreach my $address (split ",", $to) {
foreach my $address (split ",", ($to // "")) {
$address = trim $address;
$addresses{$address} //= { builds => [] };
@ -74,6 +78,14 @@ sub buildFinished {
}
}
my ($authors, $nrCommits, $emailable_authors) = getResponsibleAuthors($build, $self->{plugins});
my $authorList;
if (scalar keys %{$authors} > 0) {
my @x = map { "$_ <$authors->{$_}>" } (sort keys %{$authors});
$authorList = join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]): (), $x[-1]);
$addresses{$_} = { builds => [ $build ] } foreach (@{$emailable_authors});
}
# Send an email to each interested address.
# !!! should use the Template Toolkit here.
@ -89,6 +101,8 @@ sub buildFinished {
, baseurl => $self->{config}->{'base_uri'} || "http://localhost:3000"
, showJobName => \&showJobName, showStatus => \&showStatus
, showSystem => index($build->job->name, $build->system) == -1
, nrCommits => $nrCommits
, authorList => $authorList
};
my $body;

View File

@ -20,39 +20,34 @@ sub _cloneRepo {
mkpath($cacheDir);
my $clonePath = $cacheDir . "/" . sha256_hex($uri);
my $stdout = ""; my $stderr = ""; my $res;
my $res;
if (! -d $clonePath) {
# Clone everything and fetch the branch.
# TODO: Optimize the first clone by using "git init $clonePath" and "git remote add origin $uri".
($res, $stdout, $stderr) = captureStdoutStderr(600, "git", "clone", "--branch", $branch, $uri, $clonePath);
die "error cloning git repo at `$uri':\n$stderr" if $res;
$res = run(cmd => ["git", "clone", "--branch", $branch, $uri, $clonePath], timeout => 600);
die "error cloning git repo at `$uri':\n$res->{stderr}" if $res->{status};
}
chdir $clonePath or die $!; # !!! urgh, shouldn't do a chdir
# This command forces the update of the local branch to be in the same as
# the remote branch for whatever the repository state is. This command mirrors
# only one branch of the remote repository.
($res, $stdout, $stderr) = captureStdoutStderr(600,
"git", "fetch", "-fu", "origin", "+$branch:$branch");
($res, $stdout, $stderr) = captureStdoutStderr(600,
"git", "fetch", "-fu", "origin") if $res;
die "error fetching latest change from git repo at `$uri':\n$stderr" if $res;
$res = run(cmd => ["git", "fetch", "-fu", "origin", "+$branch:$branch"], dir => $clonePath, timeout => 600);
$res = run(cmd => ["git", "fetch", "-fu", "origin"], dir => $clonePath, timeout => 600) if $res->{status};
die "error fetching latest change from git repo at `$uri':\n$res->{stderr}" if $res->{status};
# If deepClone is defined, then we look at the content of the repository
# to determine if this is a top-git branch.
if (defined $deepClone) {
# Checkout the branch to look at its content.
($res, $stdout, $stderr) = captureStdoutStderr(600, "git", "checkout", "$branch");
die "error checking out Git branch '$branch' at `$uri':\n$stderr" if $res;
$res = run(cmd => ["git", "checkout", "$branch"], dir => $clonePath);
die "error checking out Git branch '$branch' at `$uri':\n$res->{stderr}" if $res->{status};
if (-f ".topdeps") {
# This is a TopGit branch. Fetch all the topic branches so
# that builders can run "tg patch" and similar.
($res, $stdout, $stderr) = captureStdoutStderr(600,
"tg", "remote", "--populate", "origin");
print STDERR "warning: `tg remote --populate origin' failed:\n$stderr" if $res;
$res = run(cmd => ["tg", "remote", "--populate", "origin"], dir => $clonePath, timeout => 600);
print STDERR "warning: `tg remote --populate origin' failed:\n$res->{stderr}" if $res->{status};
}
}
@ -64,7 +59,6 @@ sub _parseValue {
(my $uri, my $branch, my $deepClone) = split ' ', $value;
$branch = defined $branch ? $branch : "master";
return ($uri, $branch, $deepClone);
}
sub fetchInput {
@ -80,19 +74,13 @@ sub fetchInput {
my $sha256;
my $storePath;
my ($res, $stdout, $stderr) = captureStdoutStderr(600,
("git", "rev-parse", "$branch"));
die "error getting revision number of Git branch '$branch' at `$uri':\n$stderr" if $res;
my ($revision) = split /\n/, $stdout;
die "error getting a well-formated revision number of Git branch '$branch' at `$uri':\n$stdout"
my $revision = grab(cmd => ["git", "rev-parse", "$branch"], dir => $clonePath, chomp => 1);
die "did not get a well-formated revision number of Git branch '$branch' at `$uri'"
unless $revision =~ /^[0-9a-fA-F]+$/;
my $ref = "refs/heads/$branch";
# Some simple caching: don't check a uri/branch/revision more than once.
# TODO: Fix case where the branch is reset to a previous commit.
my $cachedInput ;
my $cachedInput;
($cachedInput) = $self->{db}->resultset('CachedGitInputs')->search(
{uri => $uri, branch => $branch, revision => $revision},
{rows => 1});
@ -123,10 +111,7 @@ sub fetchInput {
$ENV{"NIX_PREFETCH_GIT_DEEP_CLONE"} = "1";
}
($res, $stdout, $stderr) = captureStdoutStderr(600, "nix-prefetch-git", $clonePath, $revision);
die "cannot check out Git repository branch '$branch' at `$uri':\n$stderr" if $res;
($sha256, $storePath) = split ' ', $stdout;
($sha256, $storePath) = split ' ', grab(cmd => ["nix-prefetch-git", $clonePath, $revision], chomp => 1);
txn_do($self->{db}, sub {
$self->{db}->resultset('CachedGitInputs')->update_or_create(
@ -143,12 +128,9 @@ sub fetchInput {
# number of commits in the history of this revision (revCount)
# the output of git-describe (gitTag), and the abbreviated
# revision (shortRev).
my $revCount = `git rev-list $revision | wc -l`; chomp $revCount;
die "git rev-list failed" if $? != 0;
my $gitTag = `git describe --always $revision`; chomp $gitTag;
die "git describe failed" if $? != 0;
my $shortRev = `git rev-parse --short $revision`; chomp $shortRev;
die "git rev-parse failed" if $? != 0;
my $revCount = scalar(split '\n', grab(cmd => ["git", "rev-list", "$revision"], dir => $clonePath));
my $gitTag = grab(cmd => ["git", "describe", "--always", "$revision"], dir => $clonePath, chomp => 1);
my $shortRev = grab(cmd => ["git", "rev-parse", "--short", "$revision"], dir => $clonePath, chomp => 1);
return
{ uri => $uri
@ -172,9 +154,7 @@ sub getCommits {
my $clonePath = $self->_cloneRepo($uri, $branch, $deepClone);
my $out;
IPC::Run::run(["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], \undef, \$out)
or die "cannot get git logs: $?";
my $out = grab(cmd => ["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath);
my $res = [];
foreach my $line (split /\n/, $out) {

View File

@ -9,7 +9,7 @@ sub buildFinished {
my ($self, $build, $dependents) = @_;
my $cfg = $self->{config}->{hipchat};
my @config = ref $cfg eq "ARRAY" ? @$cfg : ($cfg);
my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : ();
my $baseurl = $self->{config}->{'base_uri'} || "http://localhost:3000";
@ -37,33 +37,7 @@ sub buildFinished {
return if scalar keys %rooms == 0;
# Determine who broke/fixed the build.
my $prevBuild = getPreviousBuild($build);
my $nrCommits = 0;
my %authors;
if ($prevBuild) {
foreach my $curInput ($build->buildinputs_builds) {
next unless $curInput->type eq "git";
my $prevInput = $prevBuild->buildinputs_builds->find({ name => $curInput->name });
next unless defined $prevInput;
next if $curInput->type ne $prevInput->type;
next if $curInput->uri ne $prevInput->uri;
my @commits;
foreach my $plugin (@{$self->{plugins}}) {
push @commits, @{$plugin->getCommits($curInput->type, $curInput->uri, $prevInput->revision, $curInput->revision)};
}
foreach my $commit (@commits) {
print STDERR "$commit->{revision} by $commit->{author}\n";
$authors{$commit->{author}} = $commit->{email};
$nrCommits++;
}
}
}
my ($authors, $nrCommits) = getResponsibleAuthors($build, $self->{plugins});
# Send a message to each room.
foreach my $roomId (keys %rooms) {
@ -83,16 +57,15 @@ sub buildFinished {
$msg .= " (and ${\scalar @deps} others)" if scalar @deps > 0;
$msg .= ": <a href='$baseurl/build/${\$build->id}'>" . showStatus($build) . "</a>";
if (scalar keys %authors > 0) {
if (scalar keys %{$authors} > 0) {
# FIXME: HTML escaping
my @x = map { "<a href='mailto:$authors{$_}'>$_</a>" } (sort keys %authors);
my @x = map { "<a href='mailto:$authors->{$_}'>$_</a>" } (sort keys %{$authors});
$msg .= ", likely due to ";
$msg .= "$nrCommits commits by " if $nrCommits > 1;
$msg .= join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]) : (), $x[-1]);
}
print STDERR "sending hipchat notification to room $roomId: $msg\n";
next;
my $ua = LWP::UserAgent->new();
my $resp = $ua->post('https://api.hipchat.com/v1/rooms/message?format=json&auth_token=' . $room->{room}->{token}, {

View File

@ -12,21 +12,33 @@ sub supportedInputTypes {
$inputTypes->{'hg'} = 'Mercurial checkout';
}
sub _parseValue {
my ($value) = @_;
(my $uri, my $id) = split ' ', $value;
$id = defined $id ? $id : "default";
return ($uri, $id);
}
sub _clonePath {
my ($uri) = @_;
my $cacheDir = getSCMCacheDir . "/hg";
mkpath($cacheDir);
return $cacheDir . "/" . sha256_hex($uri);
}
sub fetchInput {
my ($self, $type, $name, $value) = @_;
return undef if $type ne "hg";
(my $uri, my $id) = split ' ', $value;
(my $uri, my $id) = _parseValue($value);
$id = defined $id ? $id : "default";
# init local hg clone
my $stdout = ""; my $stderr = "";
my $cacheDir = getSCMCacheDir . "/hg";
mkpath($cacheDir);
my $clonePath = $cacheDir . "/" . sha256_hex($uri);
my $clonePath = _clonePath($uri);
if (! -d $clonePath) {
(my $res, $stdout, $stderr) = captureStdoutStderr(600,
@ -85,4 +97,32 @@ sub fetchInput {
};
}
sub getCommits {
my ($self, $type, $value, $rev1, $rev2) = @_;
return [] if $type ne "hg";
return [] unless $rev1 =~ /^[0-9a-f]+$/;
return [] unless $rev2 =~ /^[0-9a-f]+$/;
my ($uri, $id) = _parseValue($value);
my $clonePath = _clonePath($uri);
chdir $clonePath or die $!;
my $out;
IPC::Run::run(["hg", "log", "--template", "{node|short}\t{author|person}\t{author|email}\n", "-r", "$rev1:$rev2", $clonePath], \undef, \$out)
or die "cannot get mercurial logs: $?";
my $res = [];
foreach my $line (split /\n/, $out) {
if ($line ne "") {
my ($revision, $author, $email) = split "\t", $line;
push @$res, { revision => $revision, author => $author, email => $email };
}
}
return $res;
}
1;

View File

@ -34,8 +34,13 @@ sub fetchInput {
} else {
print STDERR "copying input ", $name, " from $uri\n";
$storePath = `nix-store --add "$uri"`
or die "cannot copy path $uri to the Nix store.\n";
if ( $uri =~ /^\// ) {
$storePath = `nix-store --add "$uri"`
or die "cannot copy path $uri to the Nix store.\n";
} else {
$storePath = `PRINT_PATH=1 nix-prefetch-url "$uri" | tail -n 1`
or die "cannot fetch $uri to the Nix store.\n";
}
chomp $storePath;
$sha256 = (queryPathInfo($storePath, 0))[1] or die;

View File

@ -0,0 +1,149 @@
package Hydra::Plugin::S3Backup;
use strict;
use parent 'Hydra::Plugin';
use File::Temp;
use File::Basename;
use Fcntl;
use IO::File;
use Net::Amazon::S3;
use Net::Amazon::S3::Client;
use Digest::SHA;
use Nix::Config;
use Nix::Store;
use Hydra::Model::DB;
use Hydra::Helper::CatalystUtils;
my $client;
my %compressors = (
xz => "| $Nix::Config::xz",
bzip2 => "| $Nix::Config::bzip2",
none => ""
);
my $lockfile = Hydra::Model::DB::getHydraPath . "/.hydra-s3backup.lock";
sub buildFinished {
my ($self, $build, $dependents) = @_;
return unless $build->buildstatus == 0 or $build->buildstatus == 6;
my $jobName = showJobName $build;
my $job = $build->job;
my $cfg = $self->{config}->{s3backup};
my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : ();
my @matching_configs = ();
foreach my $bucket_config (@config) {
push @matching_configs, $bucket_config if $jobName =~ /^$bucket_config->{jobs}$/;
}
return unless @matching_configs;
unless (defined $client) {
$client = Net::Amazon::S3::Client->new( s3 => Net::Amazon::S3->new( retry => 1 ) );
}
# !!! Maybe should do per-bucket locking?
my $lockhandle = IO::File->new;
open($lockhandle, "+>", $lockfile) or die "Opening $lockfile: $!";
flock($lockhandle, Fcntl::LOCK_SH) or die "Read-locking $lockfile: $!";
my @needed_paths = ();
foreach my $output ($build->buildoutputs) {
push @needed_paths, $output->path;
}
my %narinfos = ();
my %compression_types = ();
foreach my $bucket_config (@matching_configs) {
my $compression_type =
exists $bucket_config->{compression_type} ? $bucket_config->{compression_type} : "bzip2";
die "Unsupported compression type $compression_type" unless exists $compressors{$compression_type};
if (exists $compression_types{$compression_type}) {
push @{$compression_types{$compression_type}}, $bucket_config;
} else {
$compression_types{$compression_type} = [ $bucket_config ];
$narinfos{$compression_type} = [];
}
}
my $build_id = $build->id;
my $tempdir = File::Temp->newdir("s3-backup-nars-$build_id" . "XXXXX");
my %seen = ();
# Upload nars and build narinfos
while (@needed_paths) {
my $path = shift @needed_paths;
next if exists $seen{$path};
$seen{$path} = undef;
my $hash = substr basename($path), 0, 32;
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0);
my $system;
if (defined $deriver and isValidPath($deriver)) {
$system = derivationFromPath($deriver)->{platform};
}
foreach my $reference (@{$refs}) {
push @needed_paths, $reference;
}
while (my ($compression_type, $configs) = each %compression_types) {
my @incomplete_buckets = ();
# Don't do any work if all the buckets have this path
foreach my $bucket_config (@{$configs}) {
my $bucket = $client->bucket( name => $bucket_config->{name} );
my $prefix = exists $bucket_config->{prefix} ? $bucket_config->{prefix} : "";
push @incomplete_buckets, $bucket_config
unless $bucket->object( key => $prefix . "$hash.narinfo" )->exists;
}
next unless @incomplete_buckets;
my $compressor = $compressors{$compression_type};
system("$Nix::Config::binDir/nix-store --export $path $compressor > $tempdir/nar") == 0 or die;
my $digest = Digest::SHA->new(256);
$digest->addfile("$tempdir/nar");
my $file_hash = $digest->hexdigest;
my @stats = stat "$tempdir/nar" or die "Couldn't stat $tempdir/nar";
my $file_size = $stats[7];
my $narinfo = "";
$narinfo .= "StorePath: $path\n";
$narinfo .= "URL: $hash.nar\n";
$narinfo .= "Compression: $compression_type\n";
$narinfo .= "FileHash: sha256:$file_hash\n";
$narinfo .= "FileSize: $file_size\n";
$narinfo .= "NarHash: $narHash\n";
$narinfo .= "NarSize: $narSize\n";
$narinfo .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
if (defined $deriver) {
$narinfo .= "Deriver: " . basename $deriver . "\n";
if (defined $system) {
$narinfo .= "System: $system\n";
}
}
push @{$narinfos{$compression_type}}, { hash => $hash, info => $narinfo };
foreach my $bucket_config (@incomplete_buckets) {
my $bucket = $client->bucket( name => $bucket_config->{name} );
my $prefix = exists $bucket_config->{prefix} ? $bucket_config->{prefix} : "";
my $nar_object = $bucket->object(
key => $prefix . "$hash.nar",
content_type => "application/x-nix-archive"
);
$nar_object->put_filename("$tempdir/nar");
}
}
}
# Upload narinfos
while (my ($compression_type, $infos) = each %narinfos) {
foreach my $bucket_config (@{$compression_types{$compression_type}}) {
foreach my $info (@{$infos}) {
my $bucket = $client->bucket( name => $bucket_config->{name} );
my $prefix = exists $bucket_config->{prefix} ? $bucket_config->{prefix} : "";
my $narinfo_object = $bucket->object(
key => $prefix . $info->{hash} . ".narinfo",
content_type => "text/x-nix-narinfo"
);
$narinfo_object->put($info->{info}) unless $narinfo_object->exists;
}
}
}
}
1;

View File

@ -0,0 +1,111 @@
use utf8;
package Hydra::Schema::AggregateConstituents;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Hydra::Schema::AggregateConstituents
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 COMPONENTS LOADED
=over 4
=item * L<Hydra::Component::ToJSON>
=back
=cut
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
=head1 TABLE: C<AggregateConstituents>
=cut
__PACKAGE__->table("AggregateConstituents");
=head1 ACCESSORS
=head2 aggregate
data_type: 'integer'
is_foreign_key: 1
is_nullable: 0
=head2 constituent
data_type: 'integer'
is_foreign_key: 1
is_nullable: 0
=cut
__PACKAGE__->add_columns(
"aggregate",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
"constituent",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
);
=head1 PRIMARY KEY
=over 4
=item * L</aggregate>
=item * L</constituent>
=back
=cut
__PACKAGE__->set_primary_key("aggregate", "constituent");
=head1 RELATIONS
=head2 aggregate
Type: belongs_to
Related object: L<Hydra::Schema::Builds>
=cut
__PACKAGE__->belongs_to(
"aggregate",
"Hydra::Schema::Builds",
{ id => "aggregate" },
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
);
=head2 constituent
Type: belongs_to
Related object: L<Hydra::Schema::Builds>
=cut
__PACKAGE__->belongs_to(
"constituent",
"Hydra::Schema::Builds",
{ id => "constituent" },
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "NO ACTION" },
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-15 00:20:01
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:TLNenyPLIWw2gWsOVhplZw
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View File

@ -72,6 +72,12 @@ __PACKAGE__->table("BuildInputs");
data_type: 'text'
is_nullable: 1
=head2 emailresponsible
data_type: 'integer'
default_value: 0
is_nullable: 0
=head2 dependency
data_type: 'integer'
@ -105,6 +111,8 @@ __PACKAGE__->add_columns(
{ data_type => "text", is_nullable => 1 },
"value",
{ data_type => "text", is_nullable => 1 },
"emailresponsible",
{ data_type => "integer", default_value => 0, is_nullable => 0 },
"dependency",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
"path",
@ -168,7 +176,7 @@ __PACKAGE__->belongs_to(
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:tKZAybbNaRIMs9n5tHkqPw
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-08 13:08:15
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OaJPzRM+8XGsu3eIkqeYEw
1;

View File

@ -97,6 +97,14 @@ __PACKAGE__->belongs_to(
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:V8MbzKvZNEaeHBJV67+ZMQ
my %hint = (
columns => [
'path'
],
);
sub json_hint {
return \%hint;
}
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View File

@ -169,4 +169,21 @@ __PACKAGE__->has_many(
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OZsXJniZ/7EB2iSz7p5y4A
my %hint = (
columns => [
"machine",
"system",
"stepnr",
"drvpath",
"starttime",
],
eager_relations => {
build => 'id'
}
);
sub json_hint {
return \%hint;
}
1;

View File

@ -288,6 +288,36 @@ __PACKAGE__->set_primary_key("id");
=head1 RELATIONS
=head2 aggregateconstituents_aggregates
Type: has_many
Related object: L<Hydra::Schema::AggregateConstituents>
=cut
__PACKAGE__->has_many(
"aggregateconstituents_aggregates",
"Hydra::Schema::AggregateConstituents",
{ "foreign.aggregate" => "self.id" },
undef,
);
=head2 aggregateconstituents_constituents
Type: has_many
Related object: L<Hydra::Schema::AggregateConstituents>
=cut
__PACKAGE__->has_many(
"aggregateconstituents_constituents",
"Hydra::Schema::AggregateConstituents",
{ "foreign.constituent" => "self.id" },
undef,
);
=head2 buildinputs_builds
Type: has_many
@ -468,9 +498,37 @@ __PACKAGE__->has_many(
undef,
);
=head2 aggregates
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:isCEXACY/PwkvgKHcXvAIg
Type: many_to_many
Composing rels: L</aggregateconstituents_constituents> -> aggregate
=cut
__PACKAGE__->many_to_many(
"aggregates",
"aggregateconstituents_constituents",
"aggregate",
);
=head2 constituents
Type: many_to_many
Composing rels: L</aggregateconstituents_constituents> -> constituent
=cut
__PACKAGE__->many_to_many(
"constituents",
"aggregateconstituents_constituents",
"constituent",
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-15 00:20:01
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:U1j/qm0vslb6Jvgu5mGMtw
__PACKAGE__->has_many(
"dependents",
@ -502,6 +560,8 @@ __PACKAGE__->has_many(
__PACKAGE__->many_to_many("jobsetevals", "jobsetevalmembers", "eval");
__PACKAGE__->many_to_many("constituents_", "aggregateconstituents_aggregates", "constituent");
sub makeSource {
my ($name, $query) = @_;
my $source = __PACKAGE__->result_source_instance();
@ -516,36 +576,6 @@ sub makeQueries {
my $activeJobs = "(select distinct project, jobset, job, system from Builds where isCurrent = 1 $constraint)";
makeSource(
"JobStatus$name",
# Urgh, can't use "*" in the "select" here because of the status change join.
<<QUERY
select x.*, b.id as statusChangeId, b.timestamp as statusChangeTime
from
(select
(select max(b.id) from Builds b
where
project = activeJobs.project and jobset = activeJobs.jobset
and job = activeJobs.job and system = activeJobs.system
and finished = 1
) as id
from $activeJobs as activeJobs
) as latest
join Builds x using (id)
left join Builds b on
b.id =
(select max(c.id) from Builds c
where
c.finished = 1 and
x.project = c.project and x.jobset = c.jobset and x.job = c.job and x.system = c.system and
x.id > c.id and
((x.buildStatus = 0 and c.buildStatus != 0) or
(x.buildStatus != 0 and c.buildStatus = 0)))
QUERY
);
makeSource("ActiveJobs$name", "select distinct project, jobset, job from Builds where isCurrent = 1 $constraint");
makeSource(
"LatestSucceeded$name",
<<QUERY
@ -571,4 +601,30 @@ makeQueries('ForJobset', "and project = ? and jobset = ?");
makeQueries('ForJob', "and project = ? and jobset = ? and job = ?");
my %hint = (
columns => [
'id',
'finished',
'timestamp',
'starttime',
'stoptime',
'project',
'jobset',
'job',
'nixname',
'system',
'priority',
'busy',
'buildstatus',
'releasename'
],
eager_relations => {
buildoutputs => 'name'
}
);
sub json_hint {
return \%hint;
}
1;

View File

@ -0,0 +1,98 @@
use utf8;
package Hydra::Schema::CachedDarcsInputs;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Hydra::Schema::CachedDarcsInputs
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 COMPONENTS LOADED
=over 4
=item * L<Hydra::Component::ToJSON>
=back
=cut
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
=head1 TABLE: C<CachedDarcsInputs>
=cut
__PACKAGE__->table("CachedDarcsInputs");
=head1 ACCESSORS
=head2 uri
data_type: 'text'
is_nullable: 0
=head2 revision
data_type: 'text'
is_nullable: 0
=head2 sha256hash
data_type: 'text'
is_nullable: 0
=head2 storepath
data_type: 'text'
is_nullable: 0
=head2 revcount
data_type: 'integer'
is_nullable: 0
=cut
__PACKAGE__->add_columns(
"uri",
{ data_type => "text", is_nullable => 0 },
"revision",
{ data_type => "text", is_nullable => 0 },
"sha256hash",
{ data_type => "text", is_nullable => 0 },
"storepath",
{ data_type => "text", is_nullable => 0 },
"revcount",
{ data_type => "integer", is_nullable => 0 },
);
=head1 PRIMARY KEY
=over 4
=item * L</uri>
=item * L</revision>
=back
=cut
__PACKAGE__->set_primary_key("uri", "revision");
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-09-20 11:08:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Yl1slt3SAizijgu0KUTn0A
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View File

@ -137,8 +137,27 @@ __PACKAGE__->belongs_to(
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
);
=head2 starredjobs
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:t2CCfUjFEz/lO4szROz1AQ
Type: has_many
Related object: L<Hydra::Schema::StarredJobs>
=cut
__PACKAGE__->has_many(
"starredjobs",
"Hydra::Schema::StarredJobs",
{
"foreign.job" => "self.name",
"foreign.jobset" => "self.jobset",
"foreign.project" => "self.project",
},
undef,
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:uYKWjewvKBEAuK53u7vKuw
1;

View File

@ -169,6 +169,16 @@ __PACKAGE__->belongs_to(
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:1Dp8B58leBLh4GK0GPw2zg
my %hint = (
columns => [
"revision",
"type",
"uri"
],
);
sub json_hint {
return \%hint;
}
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View File

@ -199,4 +199,22 @@ __PACKAGE__->has_many(
__PACKAGE__->many_to_many(builds => 'buildIds', 'build');
my %hint = (
columns => [
"hasnewbuilds",
"id"
],
relations => {
"builds" => "id"
},
eager_relations => {
# altnr? Does anyone care?
jobsetevalinputs => "name"
}
);
sub json_hint {
return \%hint;
}
1;

View File

@ -57,6 +57,12 @@ __PACKAGE__->table("JobsetInputs");
data_type: 'text'
is_nullable: 0
=head2 emailresponsible
data_type: 'integer'
default_value: 0
is_nullable: 0
=cut
__PACKAGE__->add_columns(
@ -68,6 +74,8 @@ __PACKAGE__->add_columns(
{ data_type => "text", is_nullable => 0 },
"type",
{ data_type => "text", is_nullable => 0 },
"emailresponsible",
{ data_type => "integer", default_value => 0, is_nullable => 0 },
);
=head1 PRIMARY KEY
@ -142,7 +150,17 @@ __PACKAGE__->has_many(
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:UXBzqO0vHPql4LYyXpgEQg
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-08 13:06:15
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:+mZZqLjQNwblb/EWW1alLQ
my %hint = (
relations => {
"jobsetinputalts" => "value"
}
);
sub json_hint {
return \%hint;
}
1;

View File

@ -118,6 +118,17 @@ __PACKAGE__->table("Jobsets");
default_value: 300
is_nullable: 0
=head2 schedulingshares
data_type: 'integer'
default_value: 100
is_nullable: 0
=head2 fetcherrormsg
data_type: 'text'
is_nullable: 1
=cut
__PACKAGE__->add_columns(
@ -151,6 +162,10 @@ __PACKAGE__->add_columns(
{ data_type => "integer", default_value => 3, is_nullable => 0 },
"checkinterval",
{ data_type => "integer", default_value => 300, is_nullable => 0 },
"schedulingshares",
{ data_type => "integer", default_value => 100, is_nullable => 0 },
"fetcherrormsg",
{ data_type => "text", is_nullable => 1 },
);
=head1 PRIMARY KEY
@ -271,8 +286,42 @@ __PACKAGE__->belongs_to(
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
);
=head2 starredjobs
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:tsGR8MhZRIUeNwpcVczMUw
Type: has_many
Related object: L<Hydra::Schema::StarredJobs>
=cut
__PACKAGE__->has_many(
"starredjobs",
"Hydra::Schema::StarredJobs",
{
"foreign.jobset" => "self.name",
"foreign.project" => "self.project",
},
undef,
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:DTAGxP5RFvcNxP/ciJGo4Q
my %hint = (
columns => [
"enabled",
"errormsg",
"fetcherrormsg",
"emailoverride"
],
eager_relations => {
jobsetinputs => "name"
}
);
sub json_hint {
return \%hint;
}
1;

View File

@ -0,0 +1,75 @@
use utf8;
package Hydra::Schema::NrBuilds;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Hydra::Schema::NrBuilds
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 COMPONENTS LOADED
=over 4
=item * L<Hydra::Component::ToJSON>
=back
=cut
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
=head1 TABLE: C<NrBuilds>
=cut
__PACKAGE__->table("NrBuilds");
=head1 ACCESSORS
=head2 what
data_type: 'text'
is_nullable: 0
=head2 count
data_type: 'integer'
is_nullable: 0
=cut
__PACKAGE__->add_columns(
"what",
{ data_type => "text", is_nullable => 0 },
"count",
{ data_type => "integer", is_nullable => 0 },
);
=head1 PRIMARY KEY
=over 4
=item * L</what>
=back
=cut
__PACKAGE__->set_primary_key("what");
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-08-12 17:59:18
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:CK8eJGC803nGj0wnete9xg
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View File

@ -226,6 +226,21 @@ __PACKAGE__->has_many(
undef,
);
=head2 starredjobs
Type: has_many
Related object: L<Hydra::Schema::StarredJobs>
=cut
__PACKAGE__->has_many(
"starredjobs",
"Hydra::Schema::StarredJobs",
{ "foreign.project" => "self.name" },
undef,
);
=head2 viewjobs
Type: has_many
@ -267,15 +282,26 @@ Composing rels: L</projectmembers> -> username
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:RffghAo9jAaqYk41y1Sdqw
# These lines were loaded from '/home/rbvermaa/src/hydra/src/lib/Hydra/Schema/Projects.pm' found in @INC.
# They are now part of the custom portion of this file
# for you to hand-edit. If you do not either delete
# this section or remove that file from @INC, this section
# will be repeated redundantly when you re-create this
# file again via Loader! See skip_load_external to disable
# this feature.
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PdNQ2mf5azBB6nI+iAm8fQ
my %hint = (
columns => [
"name",
"displayname",
"description",
"enabled",
"hidden",
"owner"
],
relations => {
releases => "name",
jobsets => "name"
}
);
sub json_hint {
return \%hint;
}
# You can replace this text with custom content, and it will be preserved on regeneration
1;

View File

@ -0,0 +1,161 @@
use utf8;
package Hydra::Schema::StarredJobs;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Hydra::Schema::StarredJobs
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 COMPONENTS LOADED
=over 4
=item * L<Hydra::Component::ToJSON>
=back
=cut
__PACKAGE__->load_components("+Hydra::Component::ToJSON");
=head1 TABLE: C<StarredJobs>
=cut
__PACKAGE__->table("StarredJobs");
=head1 ACCESSORS
=head2 username
data_type: 'text'
is_foreign_key: 1
is_nullable: 0
=head2 project
data_type: 'text'
is_foreign_key: 1
is_nullable: 0
=head2 jobset
data_type: 'text'
is_foreign_key: 1
is_nullable: 0
=head2 job
data_type: 'text'
is_foreign_key: 1
is_nullable: 0
=cut
__PACKAGE__->add_columns(
"username",
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
"project",
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
"jobset",
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
"job",
{ data_type => "text", is_foreign_key => 1, is_nullable => 0 },
);
=head1 PRIMARY KEY
=over 4
=item * L</username>
=item * L</project>
=item * L</jobset>
=item * L</job>
=back
=cut
__PACKAGE__->set_primary_key("username", "project", "jobset", "job");
=head1 RELATIONS
=head2 job
Type: belongs_to
Related object: L<Hydra::Schema::Jobs>
=cut
__PACKAGE__->belongs_to(
"job",
"Hydra::Schema::Jobs",
{ jobset => "jobset", name => "job", project => "project" },
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
);
=head2 jobset
Type: belongs_to
Related object: L<Hydra::Schema::Jobsets>
=cut
__PACKAGE__->belongs_to(
"jobset",
"Hydra::Schema::Jobsets",
{ name => "jobset", project => "project" },
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
);
=head2 project
Type: belongs_to
Related object: L<Hydra::Schema::Projects>
=cut
__PACKAGE__->belongs_to(
"project",
"Hydra::Schema::Projects",
{ name => "project" },
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
);
=head2 username
Type: belongs_to
Related object: L<Hydra::Schema::Users>
=cut
__PACKAGE__->belongs_to(
"username",
"Hydra::Schema::Users",
{ username => "username" },
{ is_deferrable => 0, on_delete => "CASCADE", on_update => "CASCADE" },
);
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:naj5aKWuw8hLE6klmvW9Eg
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View File

@ -135,6 +135,21 @@ __PACKAGE__->has_many(
undef,
);
=head2 starredjobs
Type: has_many
Related object: L<Hydra::Schema::StarredJobs>
=cut
__PACKAGE__->has_many(
"starredjobs",
"Hydra::Schema::StarredJobs",
{ "foreign.username" => "self.username" },
undef,
);
=head2 userroles
Type: has_many
@ -161,14 +176,22 @@ Composing rels: L</projectmembers> -> project
__PACKAGE__->many_to_many("projects", "projectmembers", "project");
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-06-13 01:54:50
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:hy3MKvFxfL+1bTc7Hcb1zA
# These lines were loaded from '/home/rbvermaa/src/hydra/src/lib/Hydra/Schema/Users.pm' found in @INC.
# They are now part of the custom portion of this file
# for you to hand-edit. If you do not either delete
# this section or remove that file from @INC, this section
# will be repeated redundantly when you re-create this
# file again via Loader! See skip_load_external to disable
# this feature.
# Created by DBIx::Class::Schema::Loader v0.07033 @ 2013-10-14 15:46:29
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Hv9Ukqud0d3uIUot0ErKeg
my %hint = (
columns => [
"fullname",
"emailaddress",
"username"
],
relations => {
userroles => "role"
}
);
sub json_hint {
return \%hint;
}
1;

View File

@ -19,31 +19,83 @@ sub escape {
sub process {
my ($self, $c) = @_;
my $res = "[\n";
my %perSystem;
foreach my $pkg (@{$c->stash->{nixPkgs}}) {
my $build = $pkg->{build};
$res .= " # $pkg->{name}\n";
$res .= " { type = \"derivation\";\n";
$res .= " name = " . escape ($build->get_column("releasename") or $build->nixname) . ";\n";
$res .= " system = " . (escape $build->system) . ";\n";
$res .= " outPath = " . (escape $pkg->{outPath}) . ";\n";
$res .= " meta = {\n";
$res .= " description = " . (escape $build->description) . ";\n"
if $build->description;
$res .= " longDescription = " . (escape $build->longdescription) . ";\n"
if $build->longdescription;
$res .= " license = " . (escape $build->license) . ";\n"
if $build->license;
$res .= " };\n";
$res .= " }\n";
$perSystem{$build->system}->{$build->get_column('job')} = $pkg;
}
$res .= "]\n";
my $res = <<EOF;
{ system ? builtins.currentSystem }:
let
mkFakeDerivation = attrs: outputs:
let
outputNames = builtins.attrNames outputs;
common = attrs // outputsSet //
{ type = "derivation";
outputs = outputNames;
all = outputsList;
};
outputToAttrListElement = outputName:
{ name = outputName;
value = common // {
inherit outputName;
outPath = builtins.getAttr outputName outputs;
};
};
outputsList = map outputToAttrListElement outputNames;
outputsSet = builtins.listToAttrs outputsList;
in outputsSet;
in
EOF
my $first = 1;
foreach my $system (keys %perSystem) {
$res .= "else " if !$first;
$res .= "if system == ${\escape $system} then {\n\n";
foreach my $job (keys $perSystem{$system}) {
my $pkg = $perSystem{$system}->{$job};
my $build = $pkg->{build};
$res .= " # Hydra build ${\$build->id}\n";
my $attr = $build->get_column('job');
$attr =~ s/\./-/g;
$res .= " ${\escape $attr} = (mkFakeDerivation {\n";
$res .= " type = \"derivation\";\n";
$res .= " name = ${\escape ($build->get_column('releasename') or $build->nixname)};\n";
$res .= " system = ${\escape $build->system};\n";
$res .= " meta = {\n";
$res .= " description = ${\escape $build->description};\n"
if $build->description;
$res .= " longDescription = ${\escape $build->longdescription};\n"
if $build->longdescription;
$res .= " license = ${\escape $build->license};\n"
if $build->license;
$res .= " maintainers = ${\escape $build->maintainers};\n"
if $build->maintainers;
$res .= " };\n";
$res .= " } {\n";
my @outputNames = sort (keys $pkg->{outputs});
$res .= " ${\escape $_} = ${\escape $pkg->{outputs}->{$_}};\n" foreach @outputNames;
my $out = defined $pkg->{outputs}->{"out"} ? "out" : $outputNames[0];
$res .= " }).$out;\n\n";
}
$res .= "}\n\n";
$first = 0;
}
$res .= "else " if !$first;
$res .= "{}\n";
my $tar = Archive::Tar->new;
$tar->add_data("channel/channel-name", ($c->stash->{channelName} or "unnamed-channel"), {mtime => 0});
$tar->add_data("channel/default.nix", $res, {mtime => 0});
$tar->add_data("channel/channel-name", ($c->stash->{channelName} or "unnamed-channel"), {mtime => 1});
$tar->add_data("channel/default.nix", $res, {mtime => 1});
my $tardata = $tar->write;
my $bzip2data;

View File

@ -8,7 +8,7 @@ sub process {
my ($self, $c) = @_;
$c->response->content_encoding("utf-8");
$c->response->content_type('text/plain') unless $c->response->content_type() ne "";
$self->SUPER::process($c);
$c->response->body($c->stash->{plain}->{data});
}
1;

View File

@ -8,17 +8,18 @@ __PACKAGE__->config(
TEMPLATE_EXTENSION => '.tt',
PRE_CHOMP => 1,
POST_CHOMP => 1,
expose_methods => [qw/log_exists ellipsize/]);
expose_methods => [qw/buildLogExists buildStepLogExists/]);
sub log_exists {
my ($self, $c, $drvPath) = @_;
my $x = getDrvLogPath($drvPath);
return defined $x;
sub buildLogExists {
my ($self, $c, $build) = @_;
my @outPaths = map { $_->path } $build->buildoutputs->all;
return defined findLog($c, $build->drvpath, @outPaths);
}
sub ellipsize {
my ($self, $c, $s, $n) = @_;
return length $s <= $n ? $s : substr($s, 0, $n - 3) . "...";
sub buildStepLogExists {
my ($self, $c, $step) = @_;
my @outPaths = map { $_->path } $step->buildstepoutputs->all;
return defined findLog($c, $step->drvpath, @outPaths);
}
1;