Merge branch 'master' into fix/local-store-detection

This commit is contained in:
Janne Heß
2025-07-16 18:25:54 +02:00
committed by GitHub
200 changed files with 6519 additions and 3440 deletions

View File

@@ -4,7 +4,6 @@ use strict;
use warnings;
use base 'Hydra::Base::Controller::REST';
use List::SomeUtils qw(any);
use Nix::Store;
use Hydra::Helper::Nix;
use Hydra::Helper::CatalystUtils;
@@ -30,7 +29,7 @@ sub getChannelData {
my $outputs = {};
foreach my $output (@outputs) {
my $outPath = $output->get_column("outpath");
next if $checkValidity && !isValidPath($outPath);
next if $checkValidity && !$MACHINE_LOCAL_STORE->isValidPath($outPath);
$outputs->{$output->get_column("outname")} = $outPath;
push @storePaths, $outPath;
# Put the system type in the manifest (for top-level

View File

@@ -2,7 +2,167 @@ package Hydra::Config;
use strict;
use warnings;
use Config::General;
use List::SomeUtils qw(none);
use YAML qw(LoadFile);
our @ISA = qw(Exporter);
our @EXPORT = qw(
getHydraConfig
getLDAPConfig
getLDAPConfigAmbient
);
our %configGeneralOpts = (-UseApacheInclude => 1, -IncludeAgain => 1, -IncludeRelative => 1);
my $hydraConfigCache;
sub getHydraConfig {
return $hydraConfigCache if defined $hydraConfigCache;
my $conf;
if ($ENV{"HYDRA_CONFIG"}) {
$conf = $ENV{"HYDRA_CONFIG"};
} else {
require Hydra::Model::DB;
$conf = Hydra::Model::DB::getHydraPath() . "/hydra.conf"
};
if (-f $conf) {
$hydraConfigCache = loadConfig($conf);
} else {
$hydraConfigCache = {};
}
return $hydraConfigCache;
}
sub loadConfig {
my ($sourceFile) = @_;
my %opts = (%configGeneralOpts, -ConfigFile => $sourceFile);
return { Config::General->new(%opts)->getall };
}
sub is_ldap_in_legacy_mode {
my ($config, %env) = @_;
my $legacy_defined = defined $env{"HYDRA_LDAP_CONFIG"};
if (defined $config->{"ldap"}) {
if ($legacy_defined) {
die "The legacy environment variable HYDRA_LDAP_CONFIG is set, but config is also specified in hydra.conf. Please unset the environment variable.";
}
return 0;
} elsif ($legacy_defined) {
warn "Hydra is configured to use LDAP via the HYDRA_LDAP_CONFIG, a deprecated method. Please see the docs about configuring LDAP in the hydra.conf.";
return 1;
} else {
return 0;
}
}
sub getLDAPConfigAmbient {
return getLDAPConfig(getHydraConfig(), %ENV);
}
sub getLDAPConfig {
my ($config, %env) = @_;
my $ldap_config;
if (is_ldap_in_legacy_mode($config, %env)) {
$ldap_config = get_legacy_ldap_config($env{"HYDRA_LDAP_CONFIG"});
} else {
$ldap_config = $config->{"ldap"};
}
$ldap_config->{"role_mapping"} = normalize_ldap_role_mappings($ldap_config->{"role_mapping"});
return $ldap_config;
}
sub get_legacy_ldap_config {
my ($ldap_yaml_file) = @_;
return {
config => LoadFile($ldap_yaml_file),
role_mapping => {
"hydra_admin" => [ "admin" ],
"hydra_bump-to-front" => [ "bump-to-front" ],
"hydra_cancel-build" => [ "cancel-build" ],
"hydra_create-projects" => [ "create-projects" ],
"hydra_eval-jobset" => [ "eval-jobset" ],
"hydra_restart-jobs" => [ "restart-jobs" ],
},
};
}
sub normalize_ldap_role_mappings {
my ($input_map) = @_;
my $mapping = {};
my @errors;
for my $group (keys %{$input_map}) {
my $input = $input_map->{$group};
if (ref $input eq "ARRAY") {
$mapping->{$group} = $input;
} elsif (ref $input eq "") {
$mapping->{$group} = [ $input ];
} else {
push @errors, "On group '$group': the value is of type ${\ref $input}. Only strings and lists are acceptable.";
$mapping->{$group} = [ ];
}
eval {
validate_roles($mapping->{$group});
};
if ($@) {
push @errors, "On group '$group': $@";
}
}
if (@errors) {
die "Failed to normalize LDAP role mappings:\n" . (join "\n", @errors);
}
return $mapping;
}
sub validate_roles {
my ($roles) = @_;
my @invalid;
my $valid = valid_roles();
for my $role (@$roles) {
if (none { $_ eq $role } @$valid) {
push @invalid, "'$role'";
}
}
if (@invalid) {
die "Invalid roles: ${\join ', ', @invalid}. Valid roles are: ${\join ', ', @$valid}.";
}
return 1;
}
sub valid_roles {
return [
"admin",
"bump-to-front",
"cancel-build",
"create-projects",
"eval-jobset",
"restart-jobs",
];
}
1;

View File

@@ -216,8 +216,8 @@ sub scmdiff : Path('/api/scmdiff') Args(0) {
} elsif ($type eq "git") {
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
die if ! -d $clonePath;
$diff .= `(cd $clonePath; git log $rev1..$rev2)`;
$diff .= `(cd $clonePath; git diff $rev1..$rev2)`;
$diff .= `(cd $clonePath; git --git-dir .git log $rev1..$rev2)`;
$diff .= `(cd $clonePath; git --git-dir .git diff $rev1..$rev2)`;
}
$c->stash->{'plain'} = { data => (scalar $diff) || " " };
@@ -239,6 +239,8 @@ sub triggerJobset {
sub push : Chained('api') PathPart('push') Args(0) {
my ($self, $c) = @_;
requirePost($c);
$c->{stash}->{json}->{jobsetsTriggered} = [];
my $force = exists $c->request->query_params->{force};
@@ -246,19 +248,24 @@ sub push : Chained('api') PathPart('push') Args(0) {
foreach my $s (@jobsets) {
my ($p, $j) = parseJobsetName($s);
my $jobset = $c->model('DB::Jobsets')->find($p, $j);
requireEvalJobsetPrivileges($c, $jobset->project);
next unless defined $jobset && ($force || ($jobset->project->enabled && $jobset->enabled));
triggerJobset($self, $c, $jobset, $force);
}
my @repos = split /,/, ($c->request->query_params->{repos} // "");
foreach my $r (@repos) {
triggerJobset($self, $c, $_, $force) foreach $c->model('DB::Jobsets')->search(
my @jobsets = $c->model('DB::Jobsets')->search(
{ 'project.enabled' => 1, 'me.enabled' => 1 },
{
join => 'project',
where => \ [ 'exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value = ?)', [ 'value', $r ] ],
order_by => 'me.id DESC'
});
foreach my $jobset (@jobsets) {
requireEvalJobsetPrivileges($c, $jobset->project);
triggerJobset($self, $c, $jobset, $force)
}
}
$self->status_ok(
@@ -285,6 +292,23 @@ sub push_github : Chained('api') PathPart('push-github') Args(0) {
$c->response->body("");
}
sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) {
my ($self, $c) = @_;
$c->{stash}->{json}->{jobsetsTriggered} = [];
my $in = $c->request->{data};
my $url = $in->{repository}->{clone_url} or die;
$url =~ s/.git$//;
print STDERR "got push from Gitea repository $url\n";
triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search(
{ 'project.enabled' => 1, 'me.enabled' => 1 },
{ join => 'project'
, where => \ [ 'me.flake like ? or exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value like ?)', [ 'flake', "%$url%"], [ 'value', "%$url%" ] ]
});
$c->response->body("");
}
1;

View File

@@ -7,15 +7,15 @@ use base 'Hydra::Base::Controller::NixChannel';
use Hydra::Helper::Nix;
use Hydra::Helper::CatalystUtils;
use File::Basename;
use File::LibMagic;
use File::stat;
use Data::Dump qw(dump);
use Nix::Store;
use Nix::Config;
use List::SomeUtils qw(all);
use Encode;
use MIME::Types;
use JSON::PP;
use WWW::Form::UrlEncoded::PP qw();
use feature 'state';
sub buildChain :Chained('/') :PathPart('build') :CaptureArgs(1) {
my ($self, $c, $id) = @_;
@@ -38,6 +38,17 @@ sub buildChain :Chained('/') :PathPart('build') :CaptureArgs(1) {
$c->stash->{jobset} = $c->stash->{build}->jobset;
$c->stash->{job} = $c->stash->{build}->job;
$c->stash->{runcommandlogs} = [$c->stash->{build}->runcommandlogs->search({}, {order_by => ["id DESC"]})];
$c->stash->{runcommandlogProblem} = undef;
if ($c->stash->{job} =~ qr/^runCommandHook\..*/) {
if (!$c->config->{dynamicruncommand}->{enable}) {
$c->stash->{runcommandlogProblem} = "disabled-server";
} elsif (!$c->stash->{project}->enable_dynamic_run_command) {
$c->stash->{runcommandlogProblem} = "disabled-project";
} elsif (!$c->stash->{jobset}->enable_dynamic_run_command) {
$c->stash->{runcommandlogProblem} = "disabled-jobset";
}
}
}
@@ -66,14 +77,16 @@ sub build_GET {
$c->stash->{template} = 'build.tt';
$c->stash->{isLocalStore} = isLocalStore();
# XXX: If the derivation is content-addressed then this will always return
# false because `$_->path` will be empty
$c->stash->{available} =
$c->stash->{isLocalStore}
? all { isValidPath($_->path) } $build->buildoutputs->all
? all { $_->path && $MACHINE_LOCAL_STORE->isValidPath($_->path) } $build->buildoutputs->all
: 1;
$c->stash->{drvAvailable} = isValidPath $build->drvpath;
$c->stash->{drvAvailable} = $MACHINE_LOCAL_STORE->isValidPath($build->drvpath);
if ($build->finished && $build->iscachedbuild) {
my $path = ($build->buildoutputs)[0]->path or die;
my $path = ($build->buildoutputs)[0]->path or undef;
my $cachedBuildStep = findBuildStepByOutPath($self, $c, $path);
if (defined $cachedBuildStep) {
$c->stash->{cachedBuild} = $cachedBuildStep->build;
@@ -127,7 +140,7 @@ sub view_nixlog : Chained('buildChain') PathPart('nixlog') {
$c->stash->{step} = $step;
my $drvPath = $step->drvpath;
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [basename($drvPath)]);
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [WWW::Form::UrlEncoded::PP::url_encode(basename($drvPath))]);
showLog($c, $mode, $log_uri);
}
@@ -136,7 +149,7 @@ sub view_log : Chained('buildChain') PathPart('log') {
my ($self, $c, $mode) = @_;
my $drvPath = $c->stash->{build}->drvpath;
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [basename($drvPath)]);
my $log_uri = $c->uri_for($c->controller('Root')->action_for("log"), [WWW::Form::UrlEncoded::PP::url_encode(basename($drvPath))]);
showLog($c, $mode, $log_uri);
}
@@ -221,17 +234,24 @@ sub serveFile {
}
elsif ($ls->{type} eq "regular") {
# Have the hosted data considered its own origin to avoid being a giant
# XSS hole.
$c->response->header('Content-Security-Policy' => 'sandbox allow-scripts');
$c->stash->{'plain'} = { data => grab(cmd => ["nix", "--experimental-features", "nix-command",
"cat-store", "--store", getStoreUri(), "$path"]) };
$c->stash->{'plain'} = { data => readIntoSocket(cmd => ["nix", "--experimental-features", "nix-command",
"store", "cat", "--store", getStoreUri(), "$path"]) };
# Detect MIME type. Borrowed from Catalyst::Plugin::Static::Simple.
# Detect MIME type.
my $type = "text/plain";
if ($path =~ /.*\.(\S{1,})$/xms) {
my $ext = $1;
my $mimeTypes = MIME::Types->new(only_complete => 1);
my $t = $mimeTypes->mimeTypeOf($ext);
$type = ref $t ? $t->type : $t if $t;
} else {
state $magic = File::LibMagic->new(follow_symlinks => 1);
my $info = $magic->info_from_filename($path);
$type = $info->{mime_with_encoding};
}
$c->response->content_type($type);
$c->forward('Hydra::View::Plain');
@@ -277,29 +297,7 @@ sub download : Chained('buildChain') PathPart {
my $path = $product->path;
$path .= "/" . join("/", @path) if scalar @path > 0;
if (isLocalStore) {
notFound($c, "File '" . $product->path . "' does not exist.") unless -e $product->path;
# Make sure the file is in the Nix store.
$path = checkPath($self, $c, $path);
# If this is a directory but no "/" is attached, then redirect.
if (-d $path && substr($c->request->uri, -1) ne "/") {
return $c->res->redirect($c->request->uri . "/");
}
$path = "$path/index.html" if -d $path && -e "$path/index.html";
notFound($c, "File '$path' does not exist.") if !-e $path;
notFound($c, "Path '$path' is a directory.") if -d $path;
$c->serve_static_file($path);
} else {
serveFile($c, $path);
}
serveFile($c, $path);
$c->response->headers->last_modified($c->stash->{build}->stoptime);
}
@@ -312,7 +310,7 @@ sub output : Chained('buildChain') PathPart Args(1) {
error($c, "This build is not finished yet.") unless $build->finished;
my $output = $build->buildoutputs->find({name => $outputName});
notFound($c, "This build has no output named $outputName") unless defined $output;
gone($c, "Output is no longer available.") unless isValidPath $output->path;
gone($c, "Output is no longer available.") unless $MACHINE_LOCAL_STORE->isValidPath($output->path);
$c->response->header('Content-Disposition', "attachment; filename=\"build-${\$build->id}-${\$outputName}.nar.bz2\"");
$c->stash->{current_view} = 'NixNAR';
@@ -355,7 +353,7 @@ sub contents : Chained('buildChain') PathPart Args(1) {
# FIXME: don't use shell invocations below.
# FIXME: use nix cat-store
# FIXME: use nix store cat
my $res;
@@ -429,7 +427,7 @@ sub getDependencyGraph {
};
$$done{$path} = $node;
my @refs;
foreach my $ref (queryReferences($path)) {
foreach my $ref ($MACHINE_LOCAL_STORE->queryReferences($path)) {
next if $ref eq $path;
next unless $runtime || $ref =~ /\.drv$/;
getDependencyGraph($self, $c, $runtime, $done, $ref);
@@ -437,7 +435,7 @@ sub getDependencyGraph {
}
# Show in reverse topological order to flatten the graph.
# Should probably do a proper BFS.
my @sorted = reverse topoSortPaths(@refs);
my @sorted = reverse $MACHINE_LOCAL_STORE->topoSortPaths(@refs);
$node->{refs} = [map { $$done{$_} } @sorted];
}
@@ -450,7 +448,7 @@ sub build_deps : Chained('buildChain') PathPart('build-deps') {
my $build = $c->stash->{build};
my $drvPath = $build->drvpath;
error($c, "Derivation no longer available.") unless isValidPath $drvPath;
error($c, "Derivation no longer available.") unless $MACHINE_LOCAL_STORE->isValidPath($drvPath);
$c->stash->{buildTimeGraph} = getDependencyGraph($self, $c, 0, {}, $drvPath);
@@ -465,7 +463,7 @@ sub runtime_deps : Chained('buildChain') PathPart('runtime-deps') {
requireLocalStore($c);
error($c, "Build outputs no longer available.") unless all { isValidPath($_) } @outPaths;
error($c, "Build outputs no longer available.") unless all { $MACHINE_LOCAL_STORE->isValidPath($_) } @outPaths;
my $done = {};
$c->stash->{runtimeGraph} = [ map { getDependencyGraph($self, $c, 1, $done, $_) } @outPaths ];
@@ -485,7 +483,7 @@ sub nix : Chained('buildChain') PathPart('nix') CaptureArgs(0) {
if (isLocalStore) {
foreach my $out ($build->buildoutputs) {
notFound($c, "Path " . $out->path . " is no longer available.")
unless isValidPath($out->path);
unless $MACHINE_LOCAL_STORE->isValidPath($out->path);
}
}

View File

@@ -69,7 +69,7 @@ sub prometheus : Chained('job') PathPart('prometheus') Args(0) {
my $lastBuild = $c->stash->{jobset}->builds->find(
{ job => $c->stash->{job}, finished => 1 },
{ order_by => 'id DESC', rows => 1, columns => [@buildListColumns] }
{ order_by => 'id DESC', rows => 1, columns => ["stoptime", "buildstatus", "closuresize", "size"] }
);
$prometheus->new_counter(
@@ -92,6 +92,26 @@ sub prometheus : Chained('job') PathPart('prometheus') Args(0) {
$c->stash->{job},
)->inc($lastBuild->buildstatus > 0);
$prometheus->new_gauge(
name => "hydra_build_closure_size",
help => "Closure size of the last job's build in bytes",
labels => [ "project", "jobset", "job" ]
)->labels(
$c->stash->{project}->name,
$c->stash->{jobset}->name,
$c->stash->{job},
)->inc($lastBuild->closuresize);
$prometheus->new_gauge(
name => "hydra_build_output_size",
help => "Output size of the last job's build in bytes",
labels => [ "project", "jobset", "job" ]
)->labels(
$c->stash->{project}->name,
$c->stash->{jobset}->name,
$c->stash->{job},
)->inc($lastBuild->size);
$c->stash->{'plain'} = { data => $prometheus->render };
$c->forward('Hydra::View::Plain');
}

View File

@@ -261,6 +261,14 @@ sub updateJobset {
my $checkinterval = int(trim($c->stash->{params}->{checkinterval}));
my $enable_dynamic_run_command = defined $c->stash->{params}->{enable_dynamic_run_command} ? 1 : 0;
if ($enable_dynamic_run_command
&& !($c->config->{dynamicruncommand}->{enable}
&& $jobset->project->enable_dynamic_run_command))
{
badRequest($c, "Dynamic RunCommand is not enabled by the server or the parent project.");
}
$jobset->update(
{ name => $jobsetName
, description => trim($c->stash->{params}->{"description"})
@@ -268,6 +276,7 @@ sub updateJobset {
, nixexprinput => $nixExprInput
, enabled => $enabled
, enableemail => defined $c->stash->{params}->{enableemail} ? 1 : 0
, enable_dynamic_run_command => $enable_dynamic_run_command
, emailoverride => trim($c->stash->{params}->{emailoverride}) || ""
, hidden => defined $c->stash->{params}->{visible} ? 0 : 1
, keepnr => int(trim($c->stash->{params}->{keepnr} // "0"))
@@ -355,6 +364,21 @@ sub evals_GET {
);
}
sub errors :Chained('jobsetChain') :PathPart('errors') :Args(0) :ActionClass('REST') { }
sub errors_GET {
my ($self, $c) = @_;
$c->stash->{template} = 'eval-error.tt';
my $jobsetName = $c->stash->{params}->{name};
$c->stash->{jobset} = $c->stash->{project}->jobsets->find(
{ name => $jobsetName },
{ '+columns' => { 'errormsg' => 'errormsg' } }
);
$self->status_ok($c, entity => $c->stash->{jobset});
}
# Redirect to the latest finished evaluation of this jobset.
sub latest_eval : Chained('jobsetChain') PathPart('latest-eval') {

View File

@@ -76,7 +76,9 @@ sub view_GET {
$c->stash->{removed} = $diff->{removed};
$c->stash->{unfinished} = $diff->{unfinished};
$c->stash->{aborted} = $diff->{aborted};
$c->stash->{failed} = $diff->{failed};
$c->stash->{totalAborted} = $diff->{totalAborted};
$c->stash->{totalFailed} = $diff->{totalFailed};
$c->stash->{totalQueued} = $diff->{totalQueued};
$c->stash->{full} = ($c->req->params->{full} || "0") eq "1";
@@ -86,6 +88,17 @@ sub view_GET {
);
}
sub errors :Chained('evalChain') :PathPart('errors') :Args(0) :ActionClass('REST') { }
sub errors_GET {
my ($self, $c) = @_;
$c->stash->{template} = 'eval-error.tt';
$c->stash->{eval} = $c->model('DB::JobsetEvals')->find($c->stash->{eval}->id, { prefetch => 'evaluationerror' });
$self->status_ok($c, entity => $c->stash->{eval});
}
sub create_jobset : Chained('evalChain') PathPart('create-jobset') Args(0) {
my ($self, $c) = @_;

View File

@@ -149,6 +149,11 @@ sub updateProject {
my $displayName = trim $c->stash->{params}->{displayname};
error($c, "You must specify a display name.") if $displayName eq "";
my $enable_dynamic_run_command = defined $c->stash->{params}->{enable_dynamic_run_command} ? 1 : 0;
if ($enable_dynamic_run_command && !$c->config->{dynamicruncommand}->{enable}) {
badRequest($c, "Dynamic RunCommand is not enabled by the server.");
}
$project->update(
{ name => $projectName
, displayname => $displayName
@@ -157,6 +162,7 @@ sub updateProject {
, enabled => defined $c->stash->{params}->{enabled} ? 1 : 0
, hidden => defined $c->stash->{params}->{visible} ? 0 : 1
, owner => $owner
, enable_dynamic_run_command => $enable_dynamic_run_command
, declfile => trim($c->stash->{params}->{declarative}->{file})
, decltype => trim($c->stash->{params}->{declarative}->{type})
, declvalue => trim($c->stash->{params}->{declarative}->{value})

View File

@@ -16,8 +16,11 @@ use List::Util qw[min max];
use List::SomeUtils qw{any};
use Net::Prometheus;
use Types::Standard qw/StrMatch/;
use WWW::Form::UrlEncoded::PP qw();
use constant NARINFO_REGEX => qr{^([a-z0-9]{32})\.narinfo$};
# e.g.: https://hydra.example.com/realisations/sha256:a62128132508a3a32eef651d6467695944763602f226ac630543e947d9feb140!out.doi
use constant REALISATIONS_REGEX => qr{^(sha256:[a-z0-9]{64}![a-z]+)\.doi$};
# Put this controller at top-level.
__PACKAGE__->config->{namespace} = '';
@@ -32,6 +35,7 @@ sub noLoginNeeded {
return $whitelisted ||
$c->request->path eq "api/push-github" ||
$c->request->path eq "api/push-gitea" ||
$c->request->path eq "google-login" ||
$c->request->path eq "github-redirect" ||
$c->request->path eq "github-login" ||
@@ -47,6 +51,7 @@ sub begin :Private {
$c->stash->{curUri} = $c->request->uri;
$c->stash->{version} = $ENV{"HYDRA_RELEASE"} || "<devel>";
$c->stash->{nixVersion} = $ENV{"NIX_RELEASE"} || "<devel>";
$c->stash->{nixEvalJobsVersion} = $ENV{"NIX_EVAL_JOBS_RELEASE"} || "<devel>";
$c->stash->{curTime} = time;
$c->stash->{logo} = defined $c->config->{hydra_logo} ? "/logo" : "";
$c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : "";
@@ -78,7 +83,7 @@ sub begin :Private {
$_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins};
# XSRF protection: require POST requests to have the same origin.
if ($c->req->method eq "POST" && $c->req->path ne "api/push-github") {
if ($c->req->method eq "POST" && $c->req->path ne "api/push-github" && $c->req->path ne "api/push-gitea") {
my $referer = $c->req->header('Referer');
$referer //= $c->req->header('Origin');
my $base = $c->req->base;
@@ -158,7 +163,7 @@ sub status_GET {
{ "buildsteps.busy" => { '!=', 0 } },
{ order_by => ["globalpriority DESC", "id"],
join => "buildsteps",
columns => [@buildListColumns]
columns => [@buildListColumns, 'buildsteps.drvpath', 'buildsteps.type']
})]
);
}
@@ -327,7 +332,7 @@ sub nar :Local :Args(1) {
else {
$path = $Nix::Config::storeDir . "/$path";
gone($c, "Path " . $path . " is no longer available.") unless isValidPath($path);
gone($c, "Path " . $path . " is no longer available.") unless $MACHINE_LOCAL_STORE->isValidPath($path);
$c->stash->{current_view} = 'NixNAR';
$c->stash->{storePath} = $path;
@@ -356,6 +361,33 @@ sub nix_cache_info :Path('nix-cache-info') :Args(0) {
}
sub realisations :Path('realisations') :Args(StrMatch[REALISATIONS_REGEX]) {
my ($self, $c, $realisation) = @_;
if (!isLocalStore) {
notFound($c, "There is no binary cache here.");
}
else {
my ($rawDrvOutput) = $realisation =~ REALISATIONS_REGEX;
my $rawRealisation = $MACHINE_LOCAL_STORE->queryRawRealisation($rawDrvOutput);
if (!$rawRealisation) {
$c->response->status(404);
$c->response->content_type('text/plain');
$c->stash->{plain}->{data} = "does not exist\n";
$c->forward('Hydra::View::Plain');
setCacheHeaders($c, 60 * 60);
return;
}
$c->response->content_type('text/plain');
$c->stash->{plain}->{data} = $rawRealisation;
$c->forward('Hydra::View::Plain');
}
}
sub narinfo :Path :Args(StrMatch[NARINFO_REGEX]) {
my ($self, $c, $narinfo) = @_;
@@ -367,7 +399,7 @@ sub narinfo :Path :Args(StrMatch[NARINFO_REGEX]) {
my ($hash) = $narinfo =~ NARINFO_REGEX;
die("Hash length was not 32") if length($hash) != 32;
my $path = queryPathFromHashPart($hash);
my $path = $MACHINE_LOCAL_STORE->queryPathFromHashPart($hash);
if (!$path) {
$c->response->status(404);
@@ -525,7 +557,7 @@ sub log :Local :Args(1) {
my $logPrefix = $c->config->{log_prefix};
if (defined $logPrefix) {
$c->res->redirect($logPrefix . "log/" . basename($drvPath));
$c->res->redirect($logPrefix . "log/" . WWW::Form::UrlEncoded::PP::url_encode(basename($drvPath)));
} else {
notFound($c, "The build log of $drvPath is not available.");
}

View File

@@ -7,6 +7,7 @@ use base 'Hydra::Base::Controller::REST';
use File::Slurper qw(read_text);
use Crypt::RandPasswd;
use Digest::SHA1 qw(sha1_hex);
use Hydra::Config qw(getLDAPConfigAmbient);
use Hydra::Helper::Nix;
use Hydra::Helper::CatalystUtils;
use Hydra::Helper::Email;
@@ -56,10 +57,10 @@ sub logout_POST {
sub doLDAPLogin {
my ($self, $c, $username) = @_;
my $user = $c->find_user({ username => $username });
my $LDAPUser = $c->find_user({ username => $username }, 'ldap');
my @LDAPRoles = grep { (substr $_, 0, 6) eq "hydra_" } $LDAPUser->roles;
my @LDAPRoles = $LDAPUser->roles;
my $role_mapping = getLDAPConfigAmbient()->{"role_mapping"};
if (!$user) {
$c->model('DB::Users')->create(
@@ -79,8 +80,13 @@ sub doLDAPLogin {
});
}
$user->userroles->delete;
if (@LDAPRoles) {
$user->userroles->create({ role => (substr $_, 6) }) for @LDAPRoles;
foreach my $ldap_role (@LDAPRoles) {
if (defined($role_mapping->{$ldap_role})) {
my $roles = $role_mapping->{$ldap_role};
for my $mapped_role (@$roles) {
$user->userroles->create({ role => $mapped_role });
}
}
}
$c->set_authenticated($user);
}
@@ -457,7 +463,7 @@ sub my_jobs_tab :Chained('dashboard_base') :PathPart('my-jobs-tab') :Args(0) {
, "jobset.enabled" => 1
},
{ order_by => ["project", "jobset", "job"]
, join => ["project", "jobset"]
, join => {"jobset" => "project"}
})];
}

View File

@@ -19,14 +19,16 @@ use Hydra::Helper::CatalystUtils;
our @ISA = qw(Exporter);
our @EXPORT = qw(
validateDeclarativeJobset
createJobsetInputsRowAndData
updateDeclarativeJobset
handleDeclarativeJobsetBuild
handleDeclarativeJobsetJson
);
sub updateDeclarativeJobset {
my ($db, $project, $jobsetName, $declSpec) = @_;
sub validateDeclarativeJobset {
my ($config, $project, $jobsetName, $declSpec) = @_;
my @allowed_keys = qw(
enabled
@@ -39,6 +41,7 @@ sub updateDeclarativeJobset {
checkinterval
schedulingshares
enableemail
enable_dynamic_run_command
emailoverride
keepnr
);
@@ -61,16 +64,39 @@ sub updateDeclarativeJobset {
}
}
my $enable_dynamic_run_command = defined $update{enable_dynamic_run_command} ? 1 : 0;
if ($enable_dynamic_run_command
&& !($config->{dynamicruncommand}->{enable}
&& $project->enable_dynamic_run_command))
{
die "Dynamic RunCommand is not enabled by the server or the parent project.";
}
return %update;
}
sub createJobsetInputsRowAndData {
my ($name, $declSpec) = @_;
my $data = $declSpec->{"inputs"}->{$name};
my $row = {
name => $name,
type => $data->{type}
};
$row->{emailresponsible} = $data->{emailresponsible} // 0;
return ($row, $data);
}
sub updateDeclarativeJobset {
my ($config, $db, $project, $jobsetName, $declSpec) = @_;
my %update = validateDeclarativeJobset($config, $project, $jobsetName, $declSpec);
$db->txn_do(sub {
my $jobset = $project->jobsets->update_or_create(\%update);
$jobset->jobsetinputs->delete;
foreach my $name (keys %{$declSpec->{"inputs"}}) {
my $data = $declSpec->{"inputs"}->{$name};
my $row = {
name => $name,
type => $data->{type}
};
$row->{emailresponsible} = $data->{emailresponsible} // 0;
my ($row, $data) = createJobsetInputsRowAndData($name, $declSpec);
my $input = $jobset->jobsetinputs->create($row);
$input->jobsetinputalts->create({altnr => 0, value => $data->{value}});
}
@@ -81,6 +107,7 @@ sub updateDeclarativeJobset {
sub handleDeclarativeJobsetJson {
my ($db, $project, $declSpec) = @_;
my $config = getHydraConfig();
$db->txn_do(sub {
my @kept = keys %$declSpec;
push @kept, ".jobsets";
@@ -88,7 +115,7 @@ sub handleDeclarativeJobsetJson {
foreach my $jobsetName (keys %$declSpec) {
my $spec = $declSpec->{$jobsetName};
eval {
updateDeclarativeJobset($db, $project, $jobsetName, $spec);
updateDeclarativeJobset($config, $db, $project, $jobsetName, $spec);
1;
} or do {
print STDERR "ERROR: failed to process declarative jobset ", $project->name, ":${jobsetName}, ", $@, "\n";

View File

@@ -32,12 +32,26 @@ sub buildDiff {
removed => [],
unfinished => [],
aborted => [],
failed => [],
# These summary counters cut across the categories to determine whether
# actions such as "Restart all failed" or "Bump queue" are available.
totalAborted => 0,
totalFailed => 0,
totalQueued => 0,
};
my $n = 0;
foreach my $build (@{$builds}) {
my $aborted = $build->finished != 0 && ($build->buildstatus == 3 || $build->buildstatus == 4);
my $aborted = $build->finished != 0 && (
# aborted
$build->buildstatus == 3
# cancelled
|| $build->buildstatus == 4
# timeout
|| $build->buildstatus == 7
# log limit exceeded
|| $build->buildstatus == 10
);
my $d;
my $found = 0;
while ($n < scalar(@{$builds2})) {
@@ -71,12 +85,19 @@ sub buildDiff {
} else {
push @{$ret->{new}}, $build if !$found;
}
if (defined $build->buildstatus && $build->buildstatus != 0) {
push @{$ret->{failed}}, $build;
if ($build->finished != 0 && $build->buildstatus != 0) {
if ($aborted) {
++$ret->{totalAborted};
} else {
++$ret->{totalFailed};
}
} elsif ($build->finished == 0) {
++$ret->{totalQueued};
}
}
return $ret;
}
1;
1;

View File

@@ -15,6 +15,7 @@ our @EXPORT = qw(
forceLogin requireUser requireProjectOwner requireRestartPrivileges requireAdmin requirePost isAdmin isProjectOwner
requireBumpPrivileges
requireCancelBuildPrivileges
requireEvalJobsetPrivileges
trim
getLatestFinishedEval getFirstEval
paramToList
@@ -186,6 +187,27 @@ sub isProjectOwner {
defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username }));
}
sub hasEvalJobsetRole {
my ($c) = @_;
return $c->user_exists && $c->check_user_roles("eval-jobset");
}
sub mayEvalJobset {
my ($c, $project) = @_;
return
$c->user_exists &&
(isAdmin($c) ||
hasEvalJobsetRole($c) ||
isProjectOwner($c, $project));
}
sub requireEvalJobsetPrivileges {
my ($c, $project) = @_;
requireUser($c);
accessDenied($c, "Only the project members, administrators, and accounts with eval-jobset privileges can perform this operation.")
unless mayEvalJobset($c, $project);
}
sub hasCancelBuildRole {
my ($c) = @_;
return $c->user_exists && $c->check_user_roles('cancel-build');
@@ -272,7 +294,7 @@ sub requireAdmin {
sub requirePost {
my ($c) = @_;
error($c, "Request must be POSTed.") if $c->request->method ne "POST";
error($c, "Request must be POSTed.", 405) if $c->request->method ne "POST";
}

View File

@@ -5,7 +5,6 @@ use warnings;
use Exporter;
use File::Path;
use File::Basename;
use Config::General;
use Hydra::Config;
use Hydra::Helper::CatalystUtils;
use Hydra::Model::DB;
@@ -37,36 +36,22 @@ our @EXPORT = qw(
jobsetOverview
jobsetOverview_
pathIsInsidePrefix
readIntoSocket
readNixFile
registerRoot
restartBuilds
run
$MACHINE_LOCAL_STORE
);
our $MACHINE_LOCAL_STORE = Nix::Store->new();
sub getHydraHome {
my $dir = $ENV{"HYDRA_HOME"} or die "The HYDRA_HOME directory does not exist!\n";
return $dir;
}
my $hydraConfig;
sub getHydraConfig {
return $hydraConfig if defined $hydraConfig;
my $conf = $ENV{"HYDRA_CONFIG"} || (Hydra::Model::DB::getHydraPath . "/hydra.conf");
my %opts = (%Hydra::Config::configGeneralOpts, -ConfigFile => $conf);
if (-f $conf) {
my %h = Config::General->new(%opts)->getall;
$hydraConfig = \%h;
} else {
$hydraConfig = {};
}
return $hydraConfig;
}
# Return hash of statsd configuration of the following shape:
# (
# host => string,
@@ -190,6 +175,9 @@ sub getDrvLogPath {
for ($fn . $bucketed, $fn . $bucketed . ".bz2") {
return $_ if -f $_;
}
for ($fn . $bucketed, $fn . $bucketed . ".zst") {
return $_ if -f $_;
}
return undef;
}
@@ -206,6 +194,10 @@ sub findLog {
return undef if scalar @outPaths == 0;
# Filter out any NULLs. Content-addressed derivations
# that haven't built yet or failed to build may have a NULL outPath.
@outPaths = grep {defined} @outPaths;
my @steps = $c->model('DB::BuildSteps')->search(
{ path => { -in => [@outPaths] } },
{ select => ["drvpath"]
@@ -305,8 +297,7 @@ sub getEvals {
my @evals = $evals_result_set->search(
{ hasnewbuilds => 1 },
{ order_by => "$me.id DESC", rows => $rows, offset => $offset
, prefetch => { evaluationerror => [ ] } });
{ order_by => "$me.id DESC", rows => $rows, offset => $offset });
my @res = ();
my $cache = {};
@@ -426,6 +417,16 @@ sub pathIsInsidePrefix {
return $cur;
}
sub readIntoSocket{
my (%args) = @_;
my $sock;
eval {
open($sock, "-|", @{$args{cmd}}) or die q(failed to open socket from command:\n $x);
};
return $sock;
}
@@ -513,7 +514,7 @@ sub restartBuilds {
$builds = $builds->search({ finished => 1 });
foreach my $build ($builds->search({}, { columns => ["drvpath"] })) {
next if !isValidPath($build->drvpath);
next if !$MACHINE_LOCAL_STORE->isValidPath($build->drvpath);
registerRoot $build->drvpath;
}
@@ -556,7 +557,7 @@ sub getStoreUri {
sub readNixFile {
my ($path) = @_;
return grab(cmd => ["nix", "--experimental-features", "nix-command",
"cat-store", "--store", getStoreUri(), "$path"]);
"store", "cat", "--store", getStoreUri(), "$path"]);
}

View File

@@ -7,7 +7,6 @@ use Digest::SHA qw(sha256_hex);
use File::Path;
use Hydra::Helper::Exec;
use Hydra::Helper::Nix;
use Nix::Store;
sub supportedInputTypes {
my ($self, $inputTypes) = @_;
@@ -38,9 +37,9 @@ sub fetchInput {
(my $cachedInput) = $self->{db}->resultset('CachedBazaarInputs')->search(
{uri => $uri, revision => $revision});
addTempRoot($cachedInput->storepath) if defined $cachedInput;
$MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput;
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
} else {
@@ -58,7 +57,7 @@ sub fetchInput {
($sha256, $storePath) = split ' ', $stdout;
# FIXME: time window between nix-prefetch-bzr and addTempRoot.
addTempRoot($storePath);
$MACHINE_LOCAL_STORE->addTempRoot($storePath);
$self->{db}->txn_do(sub {
$self->{db}->resultset('CachedBazaarInputs')->create(

View File

@@ -9,11 +9,24 @@ use Hydra::Helper::CatalystUtils;
sub stepFinished {
my ($self, $step, $logPath) = @_;
my $doCompress = $self->{config}->{'compress_build_logs'} // "1";
my $doCompress = $self->{config}->{'compress_build_logs'} // '1';
my $silent = $self->{config}->{'compress_build_logs_silent'} // '0';
my $compression = $self->{config}->{'compress_build_logs_compression'} // 'bzip2';
if ($doCompress eq "1" && -e $logPath) {
print STDERR "compressing $logPath...\n";
system("bzip2", "--force", $logPath);
if (not -e $logPath or $doCompress ne "1") {
return;
}
if ($silent ne '1') {
print STDERR "compressing '$logPath' with $compression...\n";
}
if ($compression eq 'bzip2') {
system('bzip2', '--force', $logPath);
} elsif ($compression eq 'zstd') {
system('zstd', '--rm', '--quiet', '-T0', $logPath);
} else {
print STDERR "unknown compression type '$compression'\n";
}
}

View File

@@ -7,7 +7,6 @@ use Digest::SHA qw(sha256_hex);
use File::Path;
use Hydra::Helper::Exec;
use Hydra::Helper::Nix;
use Nix::Store;
sub supportedInputTypes {
my ($self, $inputTypes) = @_;
@@ -58,7 +57,7 @@ sub fetchInput {
{uri => $uri, revision => $revision},
{rows => 1});
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
$revision = $cachedInput->revision;
@@ -75,8 +74,8 @@ sub fetchInput {
die "darcs changes --count failed" if $? != 0;
system "rm", "-rf", "$tmpDir/export/_darcs";
$storePath = addToStore("$tmpDir/export", 1, "sha256");
$sha256 = queryPathHash($storePath);
$storePath = $MACHINE_LOCAL_STORE->addToStore("$tmpDir/export", 1, "sha256");
$sha256 = $MACHINE_LOCAL_STORE->queryPathHash($storePath);
$sha256 =~ s/sha256://;
$self->{db}->txn_do(sub {

View File

@@ -186,9 +186,9 @@ sub fetchInput {
{uri => $uri, branch => $branch, revision => $revision, isdeepclone => defined($deepClone) ? 1 : 0},
{rows => 1});
addTempRoot($cachedInput->storepath) if defined $cachedInput;
$MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput;
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
$revision = $cachedInput->revision;
@@ -217,7 +217,7 @@ sub fetchInput {
($sha256, $storePath) = split ' ', grab(cmd => ["nix-prefetch-git", $clonePath, $revision], chomp => 1);
# FIXME: time window between nix-prefetch-git and addTempRoot.
addTempRoot($storePath);
$MACHINE_LOCAL_STORE->addTempRoot($storePath);
$self->{db}->txn_do(sub {
$self->{db}->resultset('CachedGitInputs')->update_or_create(
@@ -261,7 +261,7 @@ sub getCommits {
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
my $out = grab(cmd => ["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath);
my $out = grab(cmd => ["git", "--git-dir=.git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath);
my $res = [];
foreach my $line (split /\n/, $out) {

View File

@@ -88,10 +88,6 @@ sub buildQueued {
common(@_, [], 0);
}
sub buildStarted {
common(@_, [], 1);
}
sub buildFinished {
common(@_, 2);
}

View File

@@ -30,7 +30,7 @@ sub _iterate {
$pulls->{$pull->{number}} = $pull;
}
# TODO Make Link header parsing more robust!!!
my @links = split ',', $res->header("Link");
my @links = split ',', ($res->header("Link") // "");
my $next = "";
foreach my $link (@links) {
my ($url, $rel) = split ";", $link;

View File

@@ -1,89 +0,0 @@
package Hydra::Plugin::HipChatNotification;
use strict;
use warnings;
use parent 'Hydra::Plugin';
use LWP::UserAgent;
use Hydra::Helper::CatalystUtils;
sub isEnabled {
my ($self) = @_;
return defined $self->{config}->{hipchat};
}
sub buildFinished {
my ($self, $topbuild, $dependents) = @_;
my $cfg = $self->{config}->{hipchat};
my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : ();
my $baseurl = $self->{config}->{'base_uri'} || "http://localhost:3000";
# Figure out to which rooms to send notification. For each email
# room, we send one aggregate message.
my %rooms;
foreach my $build ($topbuild, @{$dependents}) {
my $prevBuild = getPreviousBuild($build);
my $jobName = showJobName $build;
foreach my $room (@config) {
my $force = $room->{force};
next unless $jobName =~ /^$room->{jobs}$/;
# If build is cancelled or aborted, do not send email.
next if ! $force && ($build->buildstatus == 4 || $build->buildstatus == 3);
# If there is a previous (that is not cancelled or aborted) build
# with same buildstatus, do not send email.
next if ! $force && defined $prevBuild && ($build->buildstatus == $prevBuild->buildstatus);
$rooms{$room->{room}} //= { room => $room, builds => [] };
push @{$rooms{$room->{room}}->{builds}}, $build;
}
}
return if scalar keys %rooms == 0;
my ($authors, $nrCommits) = getResponsibleAuthors($topbuild, $self->{plugins});
# Send a message to each room.
foreach my $roomId (keys %rooms) {
my $room = $rooms{$roomId};
my @deps = grep { $_->id != $topbuild->id } @{$room->{builds}};
my $img =
$topbuild->buildstatus == 0 ? "$baseurl/static/images/checkmark_16.png" :
$topbuild->buildstatus == 2 ? "$baseurl/static/images/dependency_16.png" :
$topbuild->buildstatus == 4 ? "$baseurl/static/images/cancelled_16.png" :
"$baseurl/static/images/error_16.png";
my $msg = "";
$msg .= "<img src='$img'/> ";
$msg .= "Job <a href='$baseurl/job/${\$topbuild->jobset->get_column('project')}/${\$topbuild->jobset->get_column('name')}/${\$topbuild->get_column('job')}'>${\showJobName($topbuild)}</a>";
$msg .= " (and ${\scalar @deps} others)" if scalar @deps > 0;
$msg .= ": <a href='$baseurl/build/${\$topbuild->id}'>" . showStatus($topbuild) . "</a>";
if (scalar keys %{$authors} > 0) {
# FIXME: HTML escaping
my @x = map { "<a href='mailto:$authors->{$_}'>$_</a>" } (sort keys %{$authors});
$msg .= ", likely due to ";
$msg .= "$nrCommits commits by " if $nrCommits > 1;
$msg .= join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]) : (), $x[-1]);
}
print STDERR "sending hipchat notification to room $roomId: $msg\n";
my $ua = LWP::UserAgent->new();
my $resp = $ua->post('https://api.hipchat.com/v1/rooms/message?format=json&auth_token=' . $room->{room}->{token}, {
room_id => $roomId,
from => 'Hydra',
message => $msg,
message_format => 'html',
notify => $room->{room}->{notify} || 0,
color => $topbuild->buildstatus == 0 ? 'green' : 'red' });
print STDERR $resp->status_line, ": ", $resp->decoded_content,"\n" if !$resp->is_success;
}
}
1;

View File

@@ -7,7 +7,6 @@ use Digest::SHA qw(sha256_hex);
use File::Path;
use Hydra::Helper::Nix;
use Hydra::Helper::Exec;
use Nix::Store;
use Fcntl qw(:flock);
sub supportedInputTypes {
@@ -68,9 +67,9 @@ sub fetchInput {
(my $cachedInput) = $self->{db}->resultset('CachedHgInputs')->search(
{uri => $uri, branch => $branch, revision => $revision});
addTempRoot($cachedInput->storepath) if defined $cachedInput;
$MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput;
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
} else {
@@ -85,7 +84,7 @@ sub fetchInput {
($sha256, $storePath) = split ' ', $stdout;
# FIXME: time window between nix-prefetch-hg and addTempRoot.
addTempRoot($storePath);
$MACHINE_LOCAL_STORE->addTempRoot($storePath);
$self->{db}->txn_do(sub {
$self->{db}->resultset('CachedHgInputs')->update_or_create(

View File

@@ -5,7 +5,6 @@ use warnings;
use parent 'Hydra::Plugin';
use POSIX qw(strftime);
use Hydra::Helper::Nix;
use Nix::Store;
sub supportedInputTypes {
my ($self, $inputTypes) = @_;
@@ -30,7 +29,7 @@ sub fetchInput {
{srcpath => $uri, lastseen => {">", $timestamp - $timeout}},
{rows => 1, order_by => "lastseen DESC"});
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
$timestamp = $cachedInput->timestamp;
@@ -46,7 +45,7 @@ sub fetchInput {
}
chomp $storePath;
$sha256 = (queryPathInfo($storePath, 0))[1] or die;
$sha256 = ($MACHINE_LOCAL_STORE->queryPathInfo($storePath, 0))[1] or die;
($cachedInput) = $self->{db}->resultset('CachedPathInputs')->search(
{srcpath => $uri, sha256hash => $sha256});

View File

@@ -12,7 +12,74 @@ use Try::Tiny;
sub isEnabled {
my ($self) = @_;
return defined $self->{config}->{runcommand};
return areStaticCommandsEnabled($self->{config}) || areDynamicCommandsEnabled($self->{config});
}
sub areStaticCommandsEnabled {
my ($config) = @_;
if (defined $config->{runcommand}) {
return 1;
}
return 0;
}
sub areDynamicCommandsEnabled {
my ($config) = @_;
if ((defined $config->{dynamicruncommand})
&& $config->{dynamicruncommand}->{enable}) {
return 1;
}
return 0;
}
sub isBuildEligibleForDynamicRunCommand {
my ($build) = @_;
if ($build->get_column("buildstatus") != 0) {
return 0;
}
if ($build->get_column("job") =~ "^runCommandHook\..+") {
my $out = $build->buildoutputs->find({name => "out"});
if (!defined $out) {
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: no output named 'out'.";
return 0;
}
my $path = $out->path;
if (-l $path) {
$path = readlink($path);
}
if (! -e $path) {
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The 'out' output doesn't exist locally. This is a bug.";
return 0;
}
if (! -x $path) {
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The 'out' output is not executable.";
return 0;
}
if (! -f $path) {
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The 'out' output is not a regular file or symlink.";
return 0;
}
if (! $build->jobset->supportsDynamicRunCommand()) {
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The project or jobset don't have dynamic runcommand enabled.";
return 0;
}
return 1;
}
return 0;
}
sub configSectionMatches {
@@ -43,10 +110,11 @@ sub eventMatches {
}
sub fanoutToCommands {
my ($config, $event, $project, $jobset, $job) = @_;
my ($config, $event, $build) = @_;
my @commands;
# Calculate all the statically defined commands to execute
my $cfg = $config->{runcommand};
my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : ();
@@ -55,9 +123,10 @@ sub fanoutToCommands {
next unless eventMatches($conf, $event);
next unless configSectionMatches(
$matcher,
$project,
$jobset,
$job);
$build->jobset->get_column('project'),
$build->jobset->get_column('name'),
$build->get_column('job')
);
if (!defined($conf->{command})) {
warn "<runcommand> section for '$matcher' lacks a 'command' option";
@@ -70,6 +139,18 @@ sub fanoutToCommands {
})
}
# Calculate all dynamically defined commands to execute
if (areDynamicCommandsEnabled($config)) {
if (isBuildEligibleForDynamicRunCommand($build)) {
my $job = $build->get_column('job');
my $out = $build->buildoutputs->find({name => "out"});
push(@commands, {
matcher => "DynamicRunCommand($job)",
command => $out->path
})
}
}
return \@commands;
}
@@ -138,9 +219,7 @@ sub buildFinished {
my $commandsToRun = fanoutToCommands(
$self->{config},
$event,
$build->project->get_column('name'),
$build->jobset->get_column('name'),
$build->get_column('job')
$build
);
if (@$commandsToRun == 0) {

View File

@@ -14,6 +14,7 @@ use Nix::Config;
use Nix::Store;
use Hydra::Model::DB;
use Hydra::Helper::CatalystUtils;
use Hydra::Helper::Nix;
sub isEnabled {
my ($self) = @_;
@@ -92,7 +93,7 @@ sub buildFinished {
my $hash = substr basename($path), 0, 32;
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($path, 0);
my $system;
if (defined $deriver and isValidPath($deriver)) {
if (defined $deriver and $MACHINE_LOCAL_STORE->isValidPath($deriver)) {
$system = derivationFromPath($deriver)->{platform};
}
foreach my $reference (@{$refs}) {

View File

@@ -7,7 +7,6 @@ use Digest::SHA qw(sha256_hex);
use Hydra::Helper::Exec;
use Hydra::Helper::Nix;
use IPC::Run;
use Nix::Store;
sub supportedInputTypes {
my ($self, $inputTypes) = @_;
@@ -45,9 +44,9 @@ sub fetchInput {
(my $cachedInput) = $self->{db}->resultset('CachedSubversionInputs')->search(
{uri => $uri, revision => $revision});
addTempRoot($cachedInput->storepath) if defined $cachedInput;
$MACHINE_LOCAL_STORE->addTempRoot($cachedInput->storepath) if defined $cachedInput;
if (defined $cachedInput && isValidPath($cachedInput->storepath)) {
if (defined $cachedInput && $MACHINE_LOCAL_STORE->isValidPath($cachedInput->storepath)) {
$storePath = $cachedInput->storepath;
$sha256 = $cachedInput->sha256hash;
} else {
@@ -62,16 +61,16 @@ sub fetchInput {
die "error checking out Subversion repo at `$uri':\n$stderr" if $res;
if ($type eq "svn-checkout") {
$storePath = addToStore($wcPath, 1, "sha256");
$storePath = $MACHINE_LOCAL_STORE->addToStore($wcPath, 1, "sha256");
} else {
# Hm, if the Nix Perl bindings supported filters in
# addToStore(), then we wouldn't need to make a copy here.
my $tmpDir = File::Temp->newdir("hydra-svn-export.XXXXXX", CLEANUP => 1, TMPDIR => 1) or die;
(system "svn", "export", $wcPath, "$tmpDir/source", "--quiet") == 0 or die "svn export failed";
$storePath = addToStore("$tmpDir/source", 1, "sha256");
$storePath = $MACHINE_LOCAL_STORE->addToStore("$tmpDir/source", 1, "sha256");
}
$sha256 = queryPathHash($storePath); $sha256 =~ s/sha256://;
$sha256 = $MACHINE_LOCAL_STORE->queryPathHash($storePath); $sha256 =~ s/sha256://;
$self->{db}->txn_do(sub {
$self->{db}->resultset('CachedSubversionInputs')->update_or_create(

View File

@@ -49,7 +49,7 @@ __PACKAGE__->table("buildoutputs");
=head2 path
data_type: 'text'
is_nullable: 0
is_nullable: 1
=cut
@@ -59,7 +59,7 @@ __PACKAGE__->add_columns(
"name",
{ data_type => "text", is_nullable => 0 },
"path",
{ data_type => "text", is_nullable => 0 },
{ data_type => "text", is_nullable => 1 },
);
=head1 PRIMARY KEY
@@ -94,8 +94,8 @@ __PACKAGE__->belongs_to(
);
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-08-26 12:02:36
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:gU+kZ6A0ISKpaXGRGve8mg
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-06-30 12:02:32
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Jsabm3YTcI7YvCuNdKP5Ng
my %hint = (
columns => [

View File

@@ -55,7 +55,7 @@ __PACKAGE__->table("buildstepoutputs");
=head2 path
data_type: 'text'
is_nullable: 0
is_nullable: 1
=cut
@@ -67,7 +67,7 @@ __PACKAGE__->add_columns(
"name",
{ data_type => "text", is_nullable => 0 },
"path",
{ data_type => "text", is_nullable => 0 },
{ data_type => "text", is_nullable => 1 },
);
=head1 PRIMARY KEY
@@ -119,8 +119,8 @@ __PACKAGE__->belongs_to(
);
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2021-08-26 12:02:36
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:gxp8rOjpRVen4YbIjomHTw
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-06-30 12:02:32
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Bad70CRTt7zb2GGuRoQ++Q
# You can replace this text with custom code or comments, and it will be preserved on regeneration

View File

@@ -105,4 +105,6 @@ __PACKAGE__->add_column(
"+id" => { retrieve_on_insert => 1 }
);
__PACKAGE__->mk_group_accessors('column' => 'has_error');
1;

View File

@@ -155,6 +155,12 @@ __PACKAGE__->table("jobsets");
data_type: 'text'
is_nullable: 1
=head2 enable_dynamic_run_command
data_type: 'boolean'
default_value: false
is_nullable: 0
=cut
__PACKAGE__->add_columns(
@@ -207,6 +213,8 @@ __PACKAGE__->add_columns(
{ data_type => "integer", default_value => 0, is_nullable => 0 },
"flake",
{ data_type => "text", is_nullable => 1 },
"enable_dynamic_run_command",
{ data_type => "boolean", default_value => \"false", is_nullable => 0 },
);
=head1 PRIMARY KEY
@@ -354,8 +362,8 @@ __PACKAGE__->has_many(
);
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-08 22:24:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:cQOnMitrWGMoJX6kZGNW+w
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-24 14:17:33
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:7wPE5ebeVTkenMCWG9Sgcg
use JSON::MaybeXS;
@@ -378,6 +386,15 @@ __PACKAGE__->add_column(
"+id" => { retrieve_on_insert => 1 }
);
__PACKAGE__->mk_group_accessors('column' => 'has_error');
sub supportsDynamicRunCommand {
my ($self) = @_;
return $self->get_column('enable_dynamic_run_command') == 1
&& $self->project->supportsDynamicRunCommand();
}
sub as_json {
my $self = shift;
@@ -406,6 +423,7 @@ sub as_json {
# boolean_columns
"enableemail" => $self->get_column("enableemail") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
"enable_dynamic_run_command" => $self->get_column("enable_dynamic_run_command") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
"visible" => $self->get_column("hidden") ? JSON::MaybeXS::false : JSON::MaybeXS::true,
"inputs" => { map { $_->name => $_ } $self->jobsetinputs }

View File

@@ -88,6 +88,12 @@ __PACKAGE__->table("projects");
data_type: 'text'
is_nullable: 1
=head2 enable_dynamic_run_command
data_type: 'boolean'
default_value: false
is_nullable: 0
=cut
__PACKAGE__->add_columns(
@@ -111,6 +117,8 @@ __PACKAGE__->add_columns(
{ data_type => "text", is_nullable => 1 },
"declvalue",
{ data_type => "text", is_nullable => 1 },
"enable_dynamic_run_command",
{ data_type => "boolean", default_value => \"false", is_nullable => 0 },
);
=head1 PRIMARY KEY
@@ -228,8 +236,8 @@ Composing rels: L</projectmembers> -> username
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-08 22:24:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:r/wbX3FAm5/OFrrwOQL5fA
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-24 14:20:32
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PtXDyT8Pc7LYhhdEG39EKQ
use JSON::MaybeXS;
@@ -238,6 +246,12 @@ sub builds {
return $self->jobsets->related_resultset('builds');
};
sub supportsDynamicRunCommand {
my ($self) = @_;
return $self->get_column('enable_dynamic_run_command') == 1;
}
sub as_json {
my $self = shift;
@@ -251,6 +265,7 @@ sub as_json {
# boolean_columns
"enabled" => $self->get_column("enabled") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
"enable_dynamic_run_command" => $self->get_column("enable_dynamic_run_command") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
"hidden" => $self->get_column("hidden") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
"jobsets" => [ map { $_->name } $self->jobsets ]

View File

@@ -216,7 +216,7 @@ sub json_hint {
sub _authenticator() {
my $authenticator = Crypt::Passphrase->new(
encoder => 'Argon2',
encoder => { module => 'Argon2', output_size => 16 },
validators => [
(sub {
my ($password, $hash) = @_;

View File

@@ -0,0 +1,30 @@
package Hydra::Schema::ResultSet::EvaluationErrors;
use strict;
use utf8;
use warnings;
use parent 'DBIx::Class::ResultSet';
use Storable qw(dclone);
__PACKAGE__->load_components('Helper::ResultSet::RemoveColumns');
# Exclude expensive error message values unless explicitly requested, and
# replace them with a summary field describing their presence/absence.
sub search_rs {
my ( $class, $query, $attrs ) = @_;
if ($attrs) {
$attrs = dclone($attrs);
}
unless (exists $attrs->{'select'} || exists $attrs->{'columns'}) {
$attrs->{'+columns'}->{'has_error'} = "errormsg != ''";
}
unless (exists $attrs->{'+columns'}->{'errormsg'}) {
push @{ $attrs->{'remove_columns'} }, 'errormsg';
}
return $class->next::method($query, $attrs);
}

View File

@@ -0,0 +1,30 @@
package Hydra::Schema::ResultSet::Jobsets;
use strict;
use utf8;
use warnings;
use parent 'DBIx::Class::ResultSet';
use Storable qw(dclone);
__PACKAGE__->load_components('Helper::ResultSet::RemoveColumns');
# Exclude expensive error message values unless explicitly requested, and
# replace them with a summary field describing their presence/absence.
sub search_rs {
my ( $class, $query, $attrs ) = @_;
if ($attrs) {
$attrs = dclone($attrs);
}
unless (exists $attrs->{'select'} || exists $attrs->{'columns'}) {
$attrs->{'+columns'}->{'has_error'} = "errormsg != ''";
}
unless (exists $attrs->{'+columns'}->{'errormsg'}) {
push @{ $attrs->{'remove_columns'} }, 'errormsg';
}
return $class->next::method($query, $attrs);
}

View File

@@ -8,6 +8,7 @@ use MIME::Base64;
use Nix::Manifest;
use Nix::Store;
use Nix::Utils;
use Hydra::Helper::Nix;
use base qw/Catalyst::View/;
sub process {
@@ -17,7 +18,7 @@ sub process {
$c->response->content_type('text/x-nix-narinfo'); # !!! check MIME type
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath, 1);
my ($deriver, $narHash, $time, $narSize, $refs) = $MACHINE_LOCAL_STORE->queryPathInfo($storePath, 1);
my $info;
$info .= "StorePath: $storePath\n";
@@ -28,8 +29,8 @@ sub process {
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
if (defined $deriver) {
$info .= "Deriver: " . basename $deriver . "\n";
if (isValidPath($deriver)) {
my $drv = derivationFromPath($deriver);
if ($MACHINE_LOCAL_STORE->isValidPath($deriver)) {
my $drv = $MACHINE_LOCAL_STORE->derivationFromPath($deriver);
$info .= "System: $drv->{platform}\n";
}
}

View File

@@ -16,7 +16,10 @@ sub process {
my $tail = int($c->stash->{tail} // "0");
if ($logPath =~ /\.bz2$/) {
if ($logPath =~ /\.zst$/) {
my $doTail = $tail ? "| tail -n '$tail'" : "";
open($fh, "-|", "zstd -dc < '$logPath' $doTail") or die;
} elsif ($logPath =~ /\.bz2$/) {
my $doTail = $tail ? "| tail -n '$tail'" : "";
open($fh, "-|", "bzip2 -dc < '$logPath' $doTail") or die;
} else {

View File

@@ -6,6 +6,7 @@ use base 'Catalyst::View::TT';
use Template::Plugin::HTML;
use Hydra::Helper::Nix;
use Time::Seconds;
use Digest::SHA qw(sha1_hex);
__PACKAGE__->config(
TEMPLATE_EXTENSION => '.tt',
@@ -25,8 +26,14 @@ __PACKAGE__->config(
makeNameTextForJobset
relativeDuration
stripSSHUser
metricDivId
/]);
sub metricDivId {
my ($self, $c, $text) = @_;
return "metric-" . sha1_hex($text);
}
sub buildLogExists {
my ($self, $c, $build) = @_;
return 1 if defined $c->config->{log_prefix};