Compare commits
15 Commits
33a935e8ef
...
add-gitea-
Author | SHA1 | Date | |
---|---|---|---|
ae8c1554cb | |||
de10c0e0fb | |||
e6df0c141c | |||
78f57b82f7 | |||
2b0729da7a | |||
84ce142a9d | |||
0dd4c0cc8e | |||
21f793e21b | |||
1001b67704 | |||
d5d4d19a4c | |||
17f9920cf9 | |||
fa1e989b7d | |||
47158cf360 | |||
c7972c3687 | |||
d12d4753ee |
@ -63,8 +63,7 @@ following:
|
|||||||
.. other configuration ..
|
.. other configuration ..
|
||||||
location /hydra/ {
|
location /hydra/ {
|
||||||
|
|
||||||
proxy_pass http://127.0.0.1:3000;
|
proxy_pass http://127.0.0.1:3000/;
|
||||||
proxy_redirect http://127.0.0.1:3000 https://example.com/hydra;
|
|
||||||
|
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
@ -74,6 +73,9 @@ following:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Note the trailing slash on the `proxy_pass` directive, which causes nginx to
|
||||||
|
strip off the `/hydra/` part of the URL before passing it to hydra.
|
||||||
|
|
||||||
Populating a Cache
|
Populating a Cache
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
33
flake.lock
generated
33
flake.lock
generated
@ -1,27 +1,18 @@
|
|||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"nix": {
|
"nix": {
|
||||||
"inputs": {
|
"flake": false,
|
||||||
"flake-compat": [],
|
|
||||||
"flake-parts": [],
|
|
||||||
"git-hooks-nix": [],
|
|
||||||
"nixpkgs": [
|
|
||||||
"nixpkgs"
|
|
||||||
],
|
|
||||||
"nixpkgs-23-11": [],
|
|
||||||
"nixpkgs-regression": []
|
|
||||||
},
|
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1744030329,
|
"lastModified": 1748154947,
|
||||||
"narHash": "sha256-r+psCOW77vTSTNbxTVrYHeh6OgB0QukbnyUVDwg8s4I=",
|
"narHash": "sha256-rCpANMHFIlafta6J/G0ILRd+WNSnzv/lzi40Y8f1AR8=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"rev": "a4962f73b5fc874d4b16baef47921daf349addfc",
|
"rev": "d761dad79c79af17aa476a29749bd9d69747548f",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "2.28-maintenance",
|
"ref": "2.29-maintenance",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
@ -29,11 +20,11 @@
|
|||||||
"nix-eval-jobs": {
|
"nix-eval-jobs": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1744018595,
|
"lastModified": 1748211873,
|
||||||
"narHash": "sha256-v5n6t49X7MOpqS9j0FtI6TWOXvxuZMmGsp2OfUK5QfA=",
|
"narHash": "sha256-AJ22q6yWc1hPkqssXMxQqD6QUeJ6hbx52xWHhKsmuP0=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "nix-eval-jobs",
|
"repo": "nix-eval-jobs",
|
||||||
"rev": "cba718bafe5dc1607c2b6761ecf53c641a6f3b21",
|
"rev": "d9262e535e35454daebcebd434bdb9c1486bb998",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@ -44,16 +35,16 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1743987495,
|
"lastModified": 1748124805,
|
||||||
"narHash": "sha256-46T2vMZ4/AfCK0Y2OjlFzJPxmdpP8GtsuEqSSJv3oe4=",
|
"narHash": "sha256-8A7HjmnvCpDjmETrZY1QwzKunR63LiP7lHu1eA5q6JI=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "db8f4fe18ce772a9c8f3adf321416981c8fe9371",
|
"rev": "db1aed32009f408e4048c1dd0beaf714dd34ed93",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-24.11-small",
|
"ref": "nixos-25.05-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
67
flake.nix
67
flake.nix
@ -1,18 +1,12 @@
|
|||||||
{
|
{
|
||||||
description = "A Nix-based continuous build system";
|
description = "A Nix-based continuous build system";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
||||||
|
|
||||||
inputs.nix = {
|
inputs.nix = {
|
||||||
url = "github:NixOS/nix/2.28-maintenance";
|
url = "github:NixOS/nix/2.29-maintenance";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
# We want to control the deps precisely
|
||||||
|
flake = false;
|
||||||
# hide nix dev tooling from our lock file
|
|
||||||
inputs.flake-parts.follows = "";
|
|
||||||
inputs.git-hooks-nix.follows = "";
|
|
||||||
inputs.nixpkgs-regression.follows = "";
|
|
||||||
inputs.nixpkgs-23-11.follows = "";
|
|
||||||
inputs.flake-compat.follows = "";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
inputs.nix-eval-jobs = {
|
inputs.nix-eval-jobs = {
|
||||||
@ -30,10 +24,27 @@
|
|||||||
|
|
||||||
# A Nixpkgs overlay that provides a 'hydra' package.
|
# A Nixpkgs overlay that provides a 'hydra' package.
|
||||||
overlays.default = final: prev: {
|
overlays.default = final: prev: {
|
||||||
nix-eval-jobs = final.callPackage nix-eval-jobs {};
|
nixDependenciesForHydra = final.lib.makeScope final.newScope
|
||||||
|
(import (nix + "/packaging/dependencies.nix") {
|
||||||
|
pkgs = final;
|
||||||
|
inherit (final) stdenv;
|
||||||
|
inputs = {};
|
||||||
|
});
|
||||||
|
nixComponentsForHydra = final.lib.makeScope final.nixDependenciesForHydra.newScope
|
||||||
|
(import (nix + "/packaging/components.nix") {
|
||||||
|
officialRelease = true;
|
||||||
|
inherit (final) lib;
|
||||||
|
pkgs = final;
|
||||||
|
src = nix;
|
||||||
|
maintainers = [ ];
|
||||||
|
});
|
||||||
|
nix-eval-jobs = final.callPackage nix-eval-jobs {
|
||||||
|
nixComponents = final.nixComponentsForHydra;
|
||||||
|
};
|
||||||
hydra = final.callPackage ./package.nix {
|
hydra = final.callPackage ./package.nix {
|
||||||
inherit (nixpkgs.lib) fileset;
|
inherit (final.lib) fileset;
|
||||||
rawSrc = self;
|
rawSrc = self;
|
||||||
|
nixComponents = final.nixComponentsForHydra;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -73,24 +84,26 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
packages = forEachSystem (system: let
|
packages = forEachSystem (system: let
|
||||||
nixComponents = {
|
inherit (nixpkgs) lib;
|
||||||
inherit (nix.packages.${system})
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
nix-util
|
nixDependencies = lib.makeScope pkgs.newScope
|
||||||
nix-store
|
(import (nix + "/packaging/dependencies.nix") {
|
||||||
nix-expr
|
inherit pkgs;
|
||||||
nix-fetchers
|
inherit (pkgs) stdenv;
|
||||||
nix-flake
|
inputs = {};
|
||||||
nix-main
|
});
|
||||||
nix-cmd
|
nixComponents = lib.makeScope nixDependencies.newScope
|
||||||
nix-cli
|
(import (nix + "/packaging/components.nix") {
|
||||||
nix-perl-bindings
|
officialRelease = true;
|
||||||
;
|
inherit lib pkgs;
|
||||||
};
|
src = nix;
|
||||||
|
maintainers = [ ];
|
||||||
|
});
|
||||||
in {
|
in {
|
||||||
nix-eval-jobs = nixpkgs.legacyPackages.${system}.callPackage nix-eval-jobs {
|
nix-eval-jobs = pkgs.callPackage nix-eval-jobs {
|
||||||
inherit nixComponents;
|
inherit nixComponents;
|
||||||
};
|
};
|
||||||
hydra = nixpkgs.legacyPackages.${system}.callPackage ./package.nix {
|
hydra = pkgs.callPackage ./package.nix {
|
||||||
inherit (nixpkgs.lib) fileset;
|
inherit (nixpkgs.lib) fileset;
|
||||||
inherit nixComponents;
|
inherit nixComponents;
|
||||||
inherit (self.packages.${system}) nix-eval-jobs;
|
inherit (self.packages.${system}) nix-eval-jobs;
|
||||||
|
@ -228,8 +228,8 @@ in
|
|||||||
|
|
||||||
nix.settings = {
|
nix.settings = {
|
||||||
trusted-users = [ "hydra-queue-runner" ];
|
trusted-users = [ "hydra-queue-runner" ];
|
||||||
gc-keep-outputs = true;
|
keep-outputs = true;
|
||||||
gc-keep-derivations = true;
|
keep-derivations = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
services.hydra-dev.extraConfig =
|
services.hydra-dev.extraConfig =
|
||||||
|
@ -277,5 +277,8 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
dontStrip = true;
|
dontStrip = true;
|
||||||
|
|
||||||
meta.description = "Build of Hydra on ${stdenv.system}";
|
meta.description = "Build of Hydra on ${stdenv.system}";
|
||||||
passthru = { inherit perlDeps; };
|
passthru = {
|
||||||
|
inherit perlDeps;
|
||||||
|
nix = nixComponents.nix-cli;
|
||||||
|
};
|
||||||
})
|
})
|
||||||
|
@ -50,7 +50,7 @@ static std::unique_ptr<SSHMaster::Connection> openConnection(
|
|||||||
auto remoteStore = machine->storeUri.params.find("remote-store");
|
auto remoteStore = machine->storeUri.params.find("remote-store");
|
||||||
if (remoteStore != machine->storeUri.params.end()) {
|
if (remoteStore != machine->storeUri.params.end()) {
|
||||||
command.push_back("--store");
|
command.push_back("--store");
|
||||||
command.push_back(shellEscape(remoteStore->second));
|
command.push_back(escapeShellArgAlways(remoteStore->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,6 +134,8 @@ system_time State::doDispatch()
|
|||||||
comparator is a partial ordering (see MachineInfo). */
|
comparator is a partial ordering (see MachineInfo). */
|
||||||
int highestGlobalPriority;
|
int highestGlobalPriority;
|
||||||
int highestLocalPriority;
|
int highestLocalPriority;
|
||||||
|
size_t numRequiredSystemFeatures;
|
||||||
|
size_t numRevDeps;
|
||||||
BuildID lowestBuildID;
|
BuildID lowestBuildID;
|
||||||
|
|
||||||
StepInfo(Step::ptr step, Step::State & step_) : step(step)
|
StepInfo(Step::ptr step, Step::State & step_) : step(step)
|
||||||
@ -142,6 +144,8 @@ system_time State::doDispatch()
|
|||||||
lowestShareUsed = std::min(lowestShareUsed, jobset->shareUsed());
|
lowestShareUsed = std::min(lowestShareUsed, jobset->shareUsed());
|
||||||
highestGlobalPriority = step_.highestGlobalPriority;
|
highestGlobalPriority = step_.highestGlobalPriority;
|
||||||
highestLocalPriority = step_.highestLocalPriority;
|
highestLocalPriority = step_.highestLocalPriority;
|
||||||
|
numRequiredSystemFeatures = step->requiredSystemFeatures.size();
|
||||||
|
numRevDeps = step_.rdeps.size();
|
||||||
lowestBuildID = step_.lowestBuildID;
|
lowestBuildID = step_.lowestBuildID;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -194,6 +198,8 @@ system_time State::doDispatch()
|
|||||||
a.highestGlobalPriority != b.highestGlobalPriority ? a.highestGlobalPriority > b.highestGlobalPriority :
|
a.highestGlobalPriority != b.highestGlobalPriority ? a.highestGlobalPriority > b.highestGlobalPriority :
|
||||||
a.lowestShareUsed != b.lowestShareUsed ? a.lowestShareUsed < b.lowestShareUsed :
|
a.lowestShareUsed != b.lowestShareUsed ? a.lowestShareUsed < b.lowestShareUsed :
|
||||||
a.highestLocalPriority != b.highestLocalPriority ? a.highestLocalPriority > b.highestLocalPriority :
|
a.highestLocalPriority != b.highestLocalPriority ? a.highestLocalPriority > b.highestLocalPriority :
|
||||||
|
a.numRequiredSystemFeatures != b.numRequiredSystemFeatures ? a.numRequiredSystemFeatures > b.numRequiredSystemFeatures :
|
||||||
|
a.numRevDeps != b.numRevDeps ? a.numRevDeps > b.numRevDeps :
|
||||||
a.lowestBuildID < b.lowestBuildID;
|
a.lowestBuildID < b.lowestBuildID;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
#include <nix/util/signals.hh>
|
#include <nix/util/signals.hh>
|
||||||
#include "state.hh"
|
#include "state.hh"
|
||||||
#include "hydra-build-result.hh"
|
#include "hydra-build-result.hh"
|
||||||
#include <nix/store/store-api.hh>
|
#include <nix/store/store-open.hh>
|
||||||
#include <nix/store/remote-store.hh>
|
#include <nix/store/remote-store.hh>
|
||||||
|
|
||||||
#include <nix/store/globals.hh>
|
#include <nix/store/globals.hh>
|
||||||
@ -703,6 +703,7 @@ void State::dumpStatus(Connection & conn)
|
|||||||
: 0.0},
|
: 0.0},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#if NIX_WITH_S3_SUPPORT
|
||||||
auto s3Store = dynamic_cast<S3BinaryCacheStore *>(&*store);
|
auto s3Store = dynamic_cast<S3BinaryCacheStore *>(&*store);
|
||||||
if (s3Store) {
|
if (s3Store) {
|
||||||
auto & s3Stats = s3Store->getS3Stats();
|
auto & s3Stats = s3Store->getS3Stats();
|
||||||
@ -728,6 +729,7 @@ void State::dumpStatus(Connection & conn)
|
|||||||
+ s3Stats.getBytes / (1024.0 * 1024.0 * 1024.0) * 0.09},
|
+ s3Stats.getBytes / (1024.0 * 1024.0 * 1024.0) * 0.09},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -830,7 +832,7 @@ void State::run(BuildID buildOne)
|
|||||||
<< metricsAddr << "/metrics (port " << exposerPort << ")"
|
<< metricsAddr << "/metrics (port " << exposerPort << ")"
|
||||||
<< std::endl;
|
<< std::endl;
|
||||||
|
|
||||||
Store::Params localParams;
|
Store::Config::Params localParams;
|
||||||
localParams["max-connections"] = "16";
|
localParams["max-connections"] = "16";
|
||||||
localParams["max-connection-age"] = "600";
|
localParams["max-connection-age"] = "600";
|
||||||
localStore = openStore(getEnv("NIX_REMOTE").value_or(""), localParams);
|
localStore = openStore(getEnv("NIX_REMOTE").value_or(""), localParams);
|
||||||
|
@ -492,8 +492,14 @@ Step::ptr State::createStep(ref<Store> destStore,
|
|||||||
runnable while step->created == false. */
|
runnable while step->created == false. */
|
||||||
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
|
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
|
||||||
{
|
{
|
||||||
auto parsedDrv = ParsedDerivation{drvPath, *step->drv};
|
auto parsedOpt = StructuredAttrs::tryParse(step->drv->env);
|
||||||
step->drvOptions = std::make_unique<DerivationOptions>(DerivationOptions::fromParsedDerivation(parsedDrv));
|
try {
|
||||||
|
step->drvOptions = std::make_unique<DerivationOptions>(
|
||||||
|
DerivationOptions::fromStructuredAttrs(step->drv->env, parsedOpt ? &*parsedOpt : nullptr));
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace({}, "while parsing derivation '%s'", localStore->printStorePath(drvPath));
|
||||||
|
throw;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
step->preferLocalBuild = step->drvOptions->willBuildLocally(*localStore, *step->drv);
|
step->preferLocalBuild = step->drvOptions->willBuildLocally(*localStore, *step->drv);
|
||||||
|
@ -172,7 +172,7 @@ struct Step
|
|||||||
nix::StorePath drvPath;
|
nix::StorePath drvPath;
|
||||||
std::unique_ptr<nix::Derivation> drv;
|
std::unique_ptr<nix::Derivation> drv;
|
||||||
std::unique_ptr<nix::DerivationOptions> drvOptions;
|
std::unique_ptr<nix::DerivationOptions> drvOptions;
|
||||||
std::set<std::string> requiredSystemFeatures;
|
nix::StringSet requiredSystemFeatures;
|
||||||
bool preferLocalBuild;
|
bool preferLocalBuild;
|
||||||
bool isDeterministic;
|
bool isDeterministic;
|
||||||
std::string systemType; // concatenation of drv.platform and requiredSystemFeatures
|
std::string systemType; // concatenation of drv.platform and requiredSystemFeatures
|
||||||
|
@ -76,7 +76,9 @@ sub view_GET {
|
|||||||
$c->stash->{removed} = $diff->{removed};
|
$c->stash->{removed} = $diff->{removed};
|
||||||
$c->stash->{unfinished} = $diff->{unfinished};
|
$c->stash->{unfinished} = $diff->{unfinished};
|
||||||
$c->stash->{aborted} = $diff->{aborted};
|
$c->stash->{aborted} = $diff->{aborted};
|
||||||
$c->stash->{failed} = $diff->{failed};
|
$c->stash->{totalAborted} = $diff->{totalAborted};
|
||||||
|
$c->stash->{totalFailed} = $diff->{totalFailed};
|
||||||
|
$c->stash->{totalQueued} = $diff->{totalQueued};
|
||||||
|
|
||||||
$c->stash->{full} = ($c->req->params->{full} || "0") eq "1";
|
$c->stash->{full} = ($c->req->params->{full} || "0") eq "1";
|
||||||
|
|
||||||
|
@ -32,7 +32,12 @@ sub buildDiff {
|
|||||||
removed => [],
|
removed => [],
|
||||||
unfinished => [],
|
unfinished => [],
|
||||||
aborted => [],
|
aborted => [],
|
||||||
failed => [],
|
|
||||||
|
# These summary counters cut across the categories to determine whether
|
||||||
|
# actions such as "Restart all failed" or "Bump queue" are available.
|
||||||
|
totalAborted => 0,
|
||||||
|
totalFailed => 0,
|
||||||
|
totalQueued => 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
my $n = 0;
|
my $n = 0;
|
||||||
@ -80,8 +85,15 @@ sub buildDiff {
|
|||||||
} else {
|
} else {
|
||||||
push @{$ret->{new}}, $build if !$found;
|
push @{$ret->{new}}, $build if !$found;
|
||||||
}
|
}
|
||||||
if (defined $build->buildstatus && $build->buildstatus != 0) {
|
|
||||||
push @{$ret->{failed}}, $build;
|
if ($build->finished != 0 && $build->buildstatus != 0) {
|
||||||
|
if ($aborted) {
|
||||||
|
++$ret->{totalAborted};
|
||||||
|
} else {
|
||||||
|
++$ret->{totalFailed};
|
||||||
|
}
|
||||||
|
} elsif ($build->finished == 0) {
|
||||||
|
++$ret->{totalQueued};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,12 +13,12 @@
|
|||||||
|
|
||||||
<div class="tab-content tab-pane">
|
<div class="tab-content tab-pane">
|
||||||
<div id="tabs-errors" class="">
|
<div id="tabs-errors" class="">
|
||||||
[% IF jobset %]
|
[% IF eval %]
|
||||||
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(jobset.errortime || jobset.lastcheckedtime) %].</p>
|
|
||||||
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(jobset.fetcherrormsg || jobset.errormsg) %]</pre></div></div>
|
|
||||||
[% ELSIF eval %]
|
|
||||||
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(eval.evaluationerror.errortime || eval.timestamp) %].</p>
|
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(eval.evaluationerror.errortime || eval.timestamp) %].</p>
|
||||||
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(eval.evaluationerror.errormsg) %]</pre></div></div>
|
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(eval.evaluationerror.errormsg) %]</pre></div></div>
|
||||||
|
[% ELSIF jobset %]
|
||||||
|
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(jobset.errortime || jobset.lastcheckedtime) %].</p>
|
||||||
|
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(jobset.fetcherrormsg || jobset.errormsg) %]</pre></div></div>
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -48,16 +48,16 @@ c.uri_for(c.controller('JobsetEval').action_for('view'),
|
|||||||
<a class="nav-link dropdown-toggle" data-toggle="dropdown" href="#">Actions</a>
|
<a class="nav-link dropdown-toggle" data-toggle="dropdown" href="#">Actions</a>
|
||||||
<div class="dropdown-menu">
|
<div class="dropdown-menu">
|
||||||
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('create_jobset'), [eval.id]) %]">Create a jobset from this evaluation</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('create_jobset'), [eval.id]) %]">Create a jobset from this evaluation</a>
|
||||||
[% IF unfinished.size > 0 %]
|
[% IF totalQueued > 0 %]
|
||||||
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('cancel'), [eval.id]) %]">Cancel all scheduled builds</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('cancel'), [eval.id]) %]">Cancel all scheduled builds</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF aborted.size > 0 || stillFail.size > 0 || nowFail.size > 0 || failed.size > 0 %]
|
[% IF totalFailed > 0 %]
|
||||||
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('restart_failed'), [eval.id]) %]">Restart all failed builds</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('restart_failed'), [eval.id]) %]">Restart all failed builds</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF aborted.size > 0 %]
|
[% IF totalAborted > 0 %]
|
||||||
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('restart_aborted'), [eval.id]) %]">Restart all aborted builds</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('restart_aborted'), [eval.id]) %]">Restart all aborted builds</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF unfinished.size > 0 %]
|
[% IF totalQueued > 0 %]
|
||||||
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('bump'), [eval.id]) %]">Bump builds to front of queue</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('bump'), [eval.id]) %]">Bump builds to front of queue</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
[% name = m.key ? stripSSHUser(m.key) : "localhost" %]
|
[% name = m.key ? stripSSHUser(m.key) : "localhost" %]
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th colspan="6">
|
<th colspan="7">
|
||||||
<tt [% IF m.value.disabled %]style="text-decoration: line-through;"[% END %]>[% INCLUDE renderMachineName machine=m.key %]</tt>
|
<tt [% IF m.value.disabled %]style="text-decoration: line-through;"[% END %]>[% INCLUDE renderMachineName machine=m.key %]</tt>
|
||||||
[% IF m.value.systemTypes %]
|
[% IF m.value.systemTypes %]
|
||||||
<span class="muted" style="font-weight: normal;">
|
<span class="muted" style="font-weight: normal;">
|
||||||
|
@ -372,6 +372,7 @@ sub evalJobs {
|
|||||||
or die "cannot find the input containing the job expression\n";
|
or die "cannot find the input containing the job expression\n";
|
||||||
|
|
||||||
@cmd = ("nix-eval-jobs",
|
@cmd = ("nix-eval-jobs",
|
||||||
|
"--option", "restrict-eval", "true",
|
||||||
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
||||||
inputsToArgs($inputInfo));
|
inputsToArgs($inputInfo));
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,10 @@ subtest "empty diff" => sub {
|
|||||||
removed => [],
|
removed => [],
|
||||||
unfinished => [],
|
unfinished => [],
|
||||||
aborted => [],
|
aborted => [],
|
||||||
failed => [],
|
|
||||||
|
totalAborted => 0,
|
||||||
|
totalFailed => 0,
|
||||||
|
totalQueued => 0,
|
||||||
},
|
},
|
||||||
"empty list of jobs returns empty diff"
|
"empty list of jobs returns empty diff"
|
||||||
);
|
);
|
||||||
@ -48,12 +51,7 @@ subtest "2 different jobs" => sub {
|
|||||||
"succeed_with_failed is a new job"
|
"succeed_with_failed is a new job"
|
||||||
);
|
);
|
||||||
|
|
||||||
is(scalar(@{$ret->{failed}}), 1, "list of failed jobs is 1 element long");
|
is($ret->{totalFailed}, 1, "total failed jobs is 1");
|
||||||
is(
|
|
||||||
$ret->{failed}[0]->get_column('id'),
|
|
||||||
$builds->{"succeed_with_failed"}->get_column('id'),
|
|
||||||
"succeed_with_failed is a failed job"
|
|
||||||
);
|
|
||||||
|
|
||||||
is(
|
is(
|
||||||
$ret->{removed},
|
$ret->{removed},
|
||||||
@ -70,9 +68,9 @@ subtest "2 different jobs" => sub {
|
|||||||
subtest "failed job with no previous history" => sub {
|
subtest "failed job with no previous history" => sub {
|
||||||
my $ret = buildDiff([$builds->{"fails"}], []);
|
my $ret = buildDiff([$builds->{"fails"}], []);
|
||||||
|
|
||||||
is(scalar(@{$ret->{failed}}), 1, "list of failed jobs is 1 element long");
|
is($ret->{totalFailed}, 1, "total failed jobs is 1");
|
||||||
is(
|
is(
|
||||||
$ret->{failed}[0]->get_column('id'),
|
$ret->{new}[0]->get_column('id'),
|
||||||
$builds->{"fails"}->get_column('id'),
|
$builds->{"fails"}->get_column('id'),
|
||||||
"fails is a failed job"
|
"fails is a failed job"
|
||||||
);
|
);
|
||||||
@ -93,7 +91,6 @@ subtest "not-yet-built job with no previous history" => sub {
|
|||||||
is($ret->{removed}, [], "removed");
|
is($ret->{removed}, [], "removed");
|
||||||
is($ret->{unfinished}, [], "unfinished");
|
is($ret->{unfinished}, [], "unfinished");
|
||||||
is($ret->{aborted}, [], "aborted");
|
is($ret->{aborted}, [], "aborted");
|
||||||
is($ret->{failed}, [], "failed");
|
|
||||||
|
|
||||||
is(scalar(@{$ret->{new}}), 1, "list of new jobs is 1 element long");
|
is(scalar(@{$ret->{new}}), 1, "list of new jobs is 1 element long");
|
||||||
is(
|
is(
|
||||||
|
Reference in New Issue
Block a user