Merge remote-tracking branch 'upstream/master' into split-buildRemote
This commit is contained in:
@ -1,5 +1,5 @@
|
||||
bin_PROGRAMS = hydra-eval-jobs
|
||||
|
||||
hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc
|
||||
hydra_eval_jobs_LDADD = $(NIX_LIBS)
|
||||
hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixcmd
|
||||
hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra
|
||||
|
@ -25,6 +25,28 @@
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
void check_pid_status_nonblocking(pid_t check_pid) {
|
||||
// Only check 'initialized' and known PID's
|
||||
if (check_pid <= 0) { return; }
|
||||
|
||||
int wstatus = 0;
|
||||
pid_t pid = waitpid(check_pid, &wstatus, WNOHANG);
|
||||
// -1 = failure, WNOHANG: 0 = no change
|
||||
if (pid <= 0) { return; }
|
||||
|
||||
std::cerr << "child process (" << pid << ") ";
|
||||
|
||||
if (WIFEXITED(wstatus)) {
|
||||
std::cerr << "exited with status=" << WEXITSTATUS(wstatus) << std::endl;
|
||||
} else if (WIFSIGNALED(wstatus)) {
|
||||
std::cerr << "killed by signal=" << WTERMSIG(wstatus) << std::endl;
|
||||
} else if (WIFSTOPPED(wstatus)) {
|
||||
std::cerr << "stopped by signal=" << WSTOPSIG(wstatus) << std::endl;
|
||||
} else if (WIFCONTINUED(wstatus)) {
|
||||
std::cerr << "continued" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
using namespace nix;
|
||||
|
||||
static Path gcRootsDir;
|
||||
@ -63,13 +85,13 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||
|
||||
static MyArgs myArgs;
|
||||
|
||||
static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const string & name, const string & subAttribute)
|
||||
static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const std::string & name, const std::string & subAttribute)
|
||||
{
|
||||
Strings res;
|
||||
std::function<void(Value & v)> rec;
|
||||
|
||||
rec = [&](Value & v) {
|
||||
state.forceValue(v);
|
||||
state.forceValue(v, noPos);
|
||||
if (v.type() == nString)
|
||||
res.push_back(v.string.s);
|
||||
else if (v.isList())
|
||||
@ -78,7 +100,7 @@ static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const stri
|
||||
else if (v.type() == nAttrs) {
|
||||
auto a = v.attrs->find(state.symbols.create(subAttribute));
|
||||
if (a != v.attrs->end())
|
||||
res.push_back(state.forceString(*a->value));
|
||||
res.push_back(std::string(state.forceString(*a->value)));
|
||||
}
|
||||
};
|
||||
|
||||
@ -113,7 +135,7 @@ static void worker(
|
||||
callFlake(state, lockedFlake, *vFlake);
|
||||
|
||||
auto vOutputs = vFlake->attrs->get(state.symbols.create("outputs"))->value;
|
||||
state.forceValue(*vOutputs);
|
||||
state.forceValue(*vOutputs, noPos);
|
||||
|
||||
auto aHydraJobs = vOutputs->attrs->get(state.symbols.create("hydraJobs"));
|
||||
if (!aHydraJobs)
|
||||
@ -157,7 +179,7 @@ static void worker(
|
||||
if (drv->querySystem() == "unknown")
|
||||
throw EvalError("derivation must have a 'system' attribute");
|
||||
|
||||
auto drvPath = drv->queryDrvPath();
|
||||
auto drvPath = state.store->printStorePath(drv->requireDrvPath());
|
||||
|
||||
nlohmann::json job;
|
||||
|
||||
@ -175,24 +197,24 @@ static void worker(
|
||||
|
||||
/* If this is an aggregate, then get its constituents. */
|
||||
auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));
|
||||
if (a && state.forceBool(*a->value, *a->pos)) {
|
||||
if (a && state.forceBool(*a->value, a->pos)) {
|
||||
auto a = v->attrs->get(state.symbols.create("constituents"));
|
||||
if (!a)
|
||||
throw EvalError("derivation must have a ‘constituents’ attribute");
|
||||
|
||||
|
||||
PathSet context;
|
||||
state.coerceToString(*a->pos, *a->value, context, true, false);
|
||||
state.coerceToString(a->pos, *a->value, context, true, false);
|
||||
for (auto & i : context)
|
||||
if (i.at(0) == '!') {
|
||||
size_t index = i.find("!", 1);
|
||||
job["constituents"].push_back(string(i, index + 1));
|
||||
job["constituents"].push_back(std::string(i, index + 1));
|
||||
}
|
||||
|
||||
state.forceList(*a->value, *a->pos);
|
||||
state.forceList(*a->value, a->pos);
|
||||
for (unsigned int n = 0; n < a->value->listSize(); ++n) {
|
||||
auto v = a->value->listElems()[n];
|
||||
state.forceValue(*v);
|
||||
state.forceValue(*v, noPos);
|
||||
if (v->type() == nString)
|
||||
job["namedConstituents"].push_back(state.forceStringNoCtx(*v));
|
||||
}
|
||||
@ -210,7 +232,9 @@ static void worker(
|
||||
|
||||
nlohmann::json out;
|
||||
for (auto & j : outputs)
|
||||
out[j.first] = j.second;
|
||||
// FIXME: handle CA/impure builds.
|
||||
if (j.second)
|
||||
out[j.first] = state.store->printStorePath(*j.second);
|
||||
job["outputs"] = std::move(out);
|
||||
|
||||
reply["job"] = std::move(job);
|
||||
@ -219,8 +243,8 @@ static void worker(
|
||||
else if (v->type() == nAttrs) {
|
||||
auto attrs = nlohmann::json::array();
|
||||
StringSet ss;
|
||||
for (auto & i : v->attrs->lexicographicOrder()) {
|
||||
std::string name(i->name);
|
||||
for (auto & i : v->attrs->lexicographicOrder(state.symbols)) {
|
||||
std::string name(state.symbols[i->name]);
|
||||
if (name.find('.') != std::string::npos || name.find(' ') != std::string::npos) {
|
||||
printError("skipping job with illegal name '%s'", name);
|
||||
continue;
|
||||
@ -309,8 +333,8 @@ int main(int argc, char * * argv)
|
||||
/* Start a handler thread per worker process. */
|
||||
auto handler = [&]()
|
||||
{
|
||||
pid_t pid = -1;
|
||||
try {
|
||||
pid_t pid = -1;
|
||||
AutoCloseFD from, to;
|
||||
|
||||
while (true) {
|
||||
@ -412,6 +436,7 @@ int main(int argc, char * * argv)
|
||||
}
|
||||
}
|
||||
} catch (...) {
|
||||
check_pid_status_nonblocking(pid);
|
||||
auto state(state_.lock());
|
||||
state->exc = std::current_exception();
|
||||
wakeup.notify_all();
|
||||
@ -489,10 +514,14 @@ int main(int argc, char * * argv)
|
||||
std::string drvName(drvPath.name());
|
||||
assert(hasSuffix(drvName, drvExtension));
|
||||
drvName.resize(drvName.size() - drvExtension.size());
|
||||
auto h = std::get<Hash>(hashDerivationModulo(*store, drv, true));
|
||||
auto outPath = store->makeOutputPath("out", h, drvName);
|
||||
|
||||
auto hashModulo = hashDerivationModulo(*store, drv, true);
|
||||
if (hashModulo.kind != DrvHash::Kind::Regular) continue;
|
||||
auto h = hashModulo.hashes.find("out");
|
||||
if (h == hashModulo.hashes.end()) continue;
|
||||
auto outPath = store->makeOutputPath("out", h->second, drvName);
|
||||
drv.env["out"] = store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } });
|
||||
drv.outputs.insert_or_assign("out", DerivationOutput::InputAddressed { .path = outPath });
|
||||
auto newDrvPath = store->printStorePath(writeDerivation(*store, drv));
|
||||
|
||||
debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath);
|
||||
|
@ -2,7 +2,7 @@ bin_PROGRAMS = hydra-queue-runner
|
||||
|
||||
hydra_queue_runner_SOURCES = hydra-queue-runner.cc queue-monitor.cc dispatcher.cc \
|
||||
builder.cc build-result.cc build-remote.cc \
|
||||
build-result.hh counter.hh state.hh db.hh \
|
||||
hydra-build-result.hh counter.hh state.hh db.hh \
|
||||
nar-extractor.cc nar-extractor.hh
|
||||
hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx
|
||||
hydra_queue_runner_LDADD = $(NIX_LIBS) -lpqxx -lprometheus-cpp-pull -lprometheus-cpp-core
|
||||
hydra_queue_runner_CXXFLAGS = $(NIX_CFLAGS) -Wall -I ../libhydra -Wno-deprecated-declarations
|
||||
|
@ -5,6 +5,7 @@
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include "build-result.hh"
|
||||
#include "serve-protocol.hh"
|
||||
#include "state.hh"
|
||||
#include "util.hh"
|
||||
@ -51,13 +52,35 @@ static Strings extraStoreArgs(std::string & machine)
|
||||
|
||||
static void openConnection(Machine::ptr machine, Path tmpDir, int stderrFD, Child & child)
|
||||
{
|
||||
string pgmName;
|
||||
std::string pgmName;
|
||||
Pipe to, from;
|
||||
to.create();
|
||||
from.create();
|
||||
|
||||
child.pid = startProcess([&]() {
|
||||
Strings argv;
|
||||
if (machine->isLocalhost()) {
|
||||
pgmName = "nix-store";
|
||||
argv = {"nix-store", "--builders", "", "--serve", "--write"};
|
||||
} else {
|
||||
pgmName = "ssh";
|
||||
auto sshName = machine->sshName;
|
||||
Strings extraArgs = extraStoreArgs(sshName);
|
||||
argv = {"ssh", sshName};
|
||||
if (machine->sshKey != "") append(argv, {"-i", machine->sshKey});
|
||||
if (machine->sshPublicHostKey != "") {
|
||||
Path fileName = tmpDir + "/host-key";
|
||||
auto p = machine->sshName.find("@");
|
||||
std::string host = p != std::string::npos ? std::string(machine->sshName, p + 1) : machine->sshName;
|
||||
writeFile(fileName, host + " " + machine->sshPublicHostKey + "\n");
|
||||
append(argv, {"-oUserKnownHostsFile=" + fileName});
|
||||
}
|
||||
append(argv,
|
||||
{ "-x", "-a", "-oBatchMode=yes", "-oConnectTimeout=60", "-oTCPKeepAlive=yes"
|
||||
, "--", "nix-store", "--serve", "--write" });
|
||||
append(argv, extraArgs);
|
||||
}
|
||||
|
||||
child.pid = startProcess([&]() {
|
||||
restoreProcessContext();
|
||||
|
||||
if (dup2(to.readSide.get(), STDIN_FILENO) == -1)
|
||||
@ -69,30 +92,6 @@ static void openConnection(Machine::ptr machine, Path tmpDir, int stderrFD, Chil
|
||||
if (dup2(stderrFD, STDERR_FILENO) == -1)
|
||||
throw SysError("cannot dup stderr");
|
||||
|
||||
Strings argv;
|
||||
if (machine->isLocalhost()) {
|
||||
pgmName = "nix-store";
|
||||
argv = {"nix-store", "--builders", "", "--serve", "--write"};
|
||||
}
|
||||
else {
|
||||
pgmName = "ssh";
|
||||
auto sshName = machine->sshName;
|
||||
Strings extraArgs = extraStoreArgs(sshName);
|
||||
argv = {"ssh", sshName};
|
||||
if (machine->sshKey != "") append(argv, {"-i", machine->sshKey});
|
||||
if (machine->sshPublicHostKey != "") {
|
||||
Path fileName = tmpDir + "/host-key";
|
||||
auto p = machine->sshName.find("@");
|
||||
string host = p != string::npos ? string(machine->sshName, p + 1) : machine->sshName;
|
||||
writeFile(fileName, host + " " + machine->sshPublicHostKey + "\n");
|
||||
append(argv, {"-oUserKnownHostsFile=" + fileName});
|
||||
}
|
||||
append(argv,
|
||||
{ "-x", "-a", "-oBatchMode=yes", "-oConnectTimeout=60", "-oTCPKeepAlive=yes"
|
||||
, "--", "nix-store", "--serve", "--write" });
|
||||
append(argv, extraArgs);
|
||||
}
|
||||
|
||||
execvp(argv.front().c_str(), (char * *) stringsToCharPtrs(argv).data()); // FIXME: remove cast
|
||||
|
||||
throw SysError("cannot start %s", pgmName);
|
||||
@ -179,7 +178,7 @@ StorePaths reverseTopoSortPaths(const std::map<StorePath, ValidPathInfo> & paths
|
||||
|
||||
std::pair<Path, AutoCloseFD> openLogFile(const std::string & logDir, const StorePath & drvPath)
|
||||
{
|
||||
string base(drvPath.to_string());
|
||||
std::string base(drvPath.to_string());
|
||||
auto logFile = logDir + "/" + string(base, 0, 2) + "/" + string(base, 2);
|
||||
|
||||
createDirs(dirOf(logFile));
|
||||
@ -192,7 +191,7 @@ std::pair<Path, AutoCloseFD> openLogFile(const std::string & logDir, const Store
|
||||
|
||||
void handshake(Machine::Connection & conn, unsigned int repeats)
|
||||
{
|
||||
conn.to << SERVE_MAGIC_1 << 0x204;
|
||||
conn.to << SERVE_MAGIC_1 << 0x206;
|
||||
conn.to.flush();
|
||||
|
||||
unsigned int magic = readInt(conn.from);
|
||||
@ -232,10 +231,10 @@ BasicDerivation sendInputs(
|
||||
a no-op for regular stores, but for the binary cache store,
|
||||
this will copy the inputs to the binary cache from the local
|
||||
store. */
|
||||
if (localStore.getUri() != destStore.getUri()) {
|
||||
StorePathSet closure;
|
||||
localStore.computeFSClosure(step.drv->inputSrcs, closure);
|
||||
copyPaths(localStore, destStore, closure, NoRepair, NoCheckSigs, NoSubstitute);
|
||||
if (localStore != destStore) {
|
||||
copyClosure(localStore, destStore,
|
||||
step.drv->inputSrcs,
|
||||
NoRepair, NoCheckSigs, NoSubstitute);
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -1,4 +1,4 @@
|
||||
#include "build-result.hh"
|
||||
#include "hydra-build-result.hh"
|
||||
#include "store-api.hh"
|
||||
#include "util.hh"
|
||||
#include "fs-accessor.hh"
|
||||
@ -78,7 +78,7 @@ BuildOutput getBuildOutput(
|
||||
product.type = match[1];
|
||||
product.subtype = match[2];
|
||||
std::string s(match[3]);
|
||||
product.path = s[0] == '"' ? string(s, 1, s.size() - 2) : s;
|
||||
product.path = s[0] == '"' ? std::string(s, 1, s.size() - 2) : s;
|
||||
product.defaultPath = match[5];
|
||||
|
||||
/* Ensure that the path exists and points into the Nix
|
||||
|
@ -1,7 +1,7 @@
|
||||
#include <cmath>
|
||||
|
||||
#include "state.hh"
|
||||
#include "build-result.hh"
|
||||
#include "hydra-build-result.hh"
|
||||
#include "finally.hh"
|
||||
#include "binary-cache-store.hh"
|
||||
|
||||
|
@ -6,8 +6,10 @@
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include <prometheus/exposer.h>
|
||||
|
||||
#include "state.hh"
|
||||
#include "build-result.hh"
|
||||
#include "hydra-build-result.hh"
|
||||
#include "store-api.hh"
|
||||
#include "remote-store.hh"
|
||||
|
||||
@ -36,8 +38,55 @@ std::string getEnvOrDie(const std::string & key)
|
||||
return *value;
|
||||
}
|
||||
|
||||
State::PromMetrics::PromMetrics()
|
||||
: registry(std::make_shared<prometheus::Registry>())
|
||||
, queue_checks_started(
|
||||
prometheus::BuildCounter()
|
||||
.Name("hydraqueuerunner_queue_checks_started_total")
|
||||
.Help("Number of times State::getQueuedBuilds() was started")
|
||||
.Register(*registry)
|
||||
.Add({})
|
||||
)
|
||||
, queue_build_loads(
|
||||
prometheus::BuildCounter()
|
||||
.Name("hydraqueuerunner_queue_build_loads_total")
|
||||
.Help("Number of builds loaded")
|
||||
.Register(*registry)
|
||||
.Add({})
|
||||
)
|
||||
, queue_steps_created(
|
||||
prometheus::BuildCounter()
|
||||
.Name("hydraqueuerunner_queue_steps_created_total")
|
||||
.Help("Number of steps created")
|
||||
.Register(*registry)
|
||||
.Add({})
|
||||
)
|
||||
, queue_checks_early_exits(
|
||||
prometheus::BuildCounter()
|
||||
.Name("hydraqueuerunner_queue_checks_early_exits_total")
|
||||
.Help("Number of times State::getQueuedBuilds() yielded to potential bumps")
|
||||
.Register(*registry)
|
||||
.Add({})
|
||||
)
|
||||
, queue_checks_finished(
|
||||
prometheus::BuildCounter()
|
||||
.Name("hydraqueuerunner_queue_checks_finished_total")
|
||||
.Help("Number of times State::getQueuedBuilds() was completed")
|
||||
.Register(*registry)
|
||||
.Add({})
|
||||
)
|
||||
, queue_max_id(
|
||||
prometheus::BuildGauge()
|
||||
.Name("hydraqueuerunner_queue_max_build_id_info")
|
||||
.Help("Maximum build record ID in the queue")
|
||||
.Register(*registry)
|
||||
.Add({})
|
||||
)
|
||||
{
|
||||
|
||||
State::State()
|
||||
}
|
||||
|
||||
State::State(std::optional<std::string> metricsAddrOpt)
|
||||
: config(std::make_unique<HydraConfig>())
|
||||
, maxUnsupportedTime(config->getIntOption("max_unsupported_time", 0))
|
||||
, dbPool(config->getIntOption("max_db_connections", 128))
|
||||
@ -45,11 +94,16 @@ State::State()
|
||||
, maxLogSize(config->getIntOption("max_log_size", 64ULL << 20))
|
||||
, uploadLogsToBinaryCache(config->getBoolOption("upload_logs_to_binary_cache", false))
|
||||
, rootsDir(config->getStrOption("gc_roots_dir", fmt("%s/gcroots/per-user/%s/hydra-roots", settings.nixStateDir, getEnvOrDie("LOGNAME"))))
|
||||
, metricsAddr(config->getStrOption("queue_runner_metrics_address", std::string{"127.0.0.1:9198"}))
|
||||
{
|
||||
hydraData = getEnvOrDie("HYDRA_DATA");
|
||||
|
||||
logDir = canonPath(hydraData + "/build-logs");
|
||||
|
||||
if (metricsAddrOpt.has_value()) {
|
||||
metricsAddr = metricsAddrOpt.value();
|
||||
}
|
||||
|
||||
/* handle deprecated store specification */
|
||||
if (config->getStrOption("store_mode") != "")
|
||||
throw Error("store_mode in hydra.conf is deprecated, please use store_uri");
|
||||
@ -87,7 +141,7 @@ void State::parseMachines(const std::string & contents)
|
||||
}
|
||||
|
||||
for (auto line : tokenizeString<Strings>(contents, "\n")) {
|
||||
line = trim(string(line, 0, line.find('#')));
|
||||
line = trim(std::string(line, 0, line.find('#')));
|
||||
auto tokens = tokenizeString<std::vector<std::string>>(line);
|
||||
if (tokens.size() < 3) continue;
|
||||
tokens.resize(8);
|
||||
@ -95,7 +149,7 @@ void State::parseMachines(const std::string & contents)
|
||||
auto machine = std::make_shared<Machine>();
|
||||
machine->sshName = tokens[0];
|
||||
machine->systemTypes = tokenizeString<StringSet>(tokens[1], ",");
|
||||
machine->sshKey = tokens[2] == "-" ? string("") : tokens[2];
|
||||
machine->sshKey = tokens[2] == "-" ? std::string("") : tokens[2];
|
||||
if (tokens[3] != "")
|
||||
machine->maxJobs = string2Int<decltype(machine->maxJobs)>(tokens[3]).value();
|
||||
else
|
||||
@ -149,7 +203,7 @@ void State::parseMachines(const std::string & contents)
|
||||
|
||||
void State::monitorMachinesFile()
|
||||
{
|
||||
string defaultMachinesFile = "/etc/nix/machines";
|
||||
std::string defaultMachinesFile = "/etc/nix/machines";
|
||||
auto machinesFiles = tokenizeString<std::vector<Path>>(
|
||||
getEnv("NIX_REMOTE_SYSTEMS").value_or(pathExists(defaultMachinesFile) ? defaultMachinesFile : ""), ":");
|
||||
|
||||
@ -191,7 +245,7 @@ void State::monitorMachinesFile()
|
||||
|
||||
debug("reloading machines files");
|
||||
|
||||
string contents;
|
||||
std::string contents;
|
||||
for (auto & machinesFile : machinesFiles) {
|
||||
try {
|
||||
contents += readFile(machinesFile);
|
||||
@ -308,7 +362,7 @@ void State::finishBuildStep(pqxx::work & txn, const RemoteResult & result,
|
||||
|
||||
|
||||
int State::createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t stopTime,
|
||||
Build::ptr build, const StorePath & drvPath, const string & outputName, const StorePath & storePath)
|
||||
Build::ptr build, const StorePath & drvPath, const std::string & outputName, const StorePath & storePath)
|
||||
{
|
||||
restart:
|
||||
auto stepNr = allocBuildStep(txn, build->id);
|
||||
@ -683,14 +737,14 @@ void State::showStatus()
|
||||
auto conn(dbPool.get());
|
||||
receiver statusDumped(*conn, "status_dumped");
|
||||
|
||||
string status;
|
||||
std::string status;
|
||||
bool barf = false;
|
||||
|
||||
/* Get the last JSON status dump from the database. */
|
||||
{
|
||||
pqxx::work txn(*conn);
|
||||
auto res = txn.exec("select status from SystemStatus where what = 'queue-runner'");
|
||||
if (res.size()) status = res[0][0].as<string>();
|
||||
if (res.size()) status = res[0][0].as<std::string>();
|
||||
}
|
||||
|
||||
if (status != "") {
|
||||
@ -710,7 +764,7 @@ void State::showStatus()
|
||||
{
|
||||
pqxx::work txn(*conn);
|
||||
auto res = txn.exec("select status from SystemStatus where what = 'queue-runner'");
|
||||
if (res.size()) status = res[0][0].as<string>();
|
||||
if (res.size()) status = res[0][0].as<std::string>();
|
||||
}
|
||||
|
||||
}
|
||||
@ -754,6 +808,18 @@ void State::run(BuildID buildOne)
|
||||
if (!lock)
|
||||
throw Error("hydra-queue-runner is already running");
|
||||
|
||||
std::cout << "Starting the Prometheus exporter on " << metricsAddr << std::endl;
|
||||
|
||||
/* Set up simple exporter, to show that we're still alive. */
|
||||
prometheus::Exposer promExposer{metricsAddr};
|
||||
auto exposerPort = promExposer.GetListeningPorts().front();
|
||||
|
||||
promExposer.RegisterCollectable(prom.registry);
|
||||
|
||||
std::cout << "Started the Prometheus exporter, listening on "
|
||||
<< metricsAddr << "/metrics (port " << exposerPort << ")"
|
||||
<< std::endl;
|
||||
|
||||
Store::Params localParams;
|
||||
localParams["max-connections"] = "16";
|
||||
localParams["max-connection-age"] = "600";
|
||||
@ -864,6 +930,7 @@ int main(int argc, char * * argv)
|
||||
bool unlock = false;
|
||||
bool status = false;
|
||||
BuildID buildOne = 0;
|
||||
std::optional<std::string> metricsAddrOpt = std::nullopt;
|
||||
|
||||
parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
|
||||
if (*arg == "--unlock")
|
||||
@ -875,6 +942,8 @@ int main(int argc, char * * argv)
|
||||
buildOne = *b;
|
||||
else
|
||||
throw Error("‘--build-one’ requires a build ID");
|
||||
} else if (*arg == "--prometheus-address") {
|
||||
metricsAddrOpt = getArg(*arg, arg, end);
|
||||
} else
|
||||
return false;
|
||||
return true;
|
||||
@ -883,7 +952,7 @@ int main(int argc, char * * argv)
|
||||
settings.verboseBuild = true;
|
||||
settings.lockCPU = false;
|
||||
|
||||
State state;
|
||||
State state{metricsAddrOpt};
|
||||
if (status)
|
||||
state.showStatus();
|
||||
else if (unlock)
|
||||
|
@ -64,7 +64,7 @@ struct Extractor : ParseSink
|
||||
}
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const string & target) override
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
members.insert_or_assign(prefix + path, NarMemberData { .type = FSAccessor::Type::tSymlink });
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
#include "state.hh"
|
||||
#include "build-result.hh"
|
||||
#include "hydra-build-result.hh"
|
||||
#include "globals.hh"
|
||||
|
||||
#include <cstring>
|
||||
@ -82,6 +82,8 @@ struct PreviousFailure : public std::exception {
|
||||
bool State::getQueuedBuilds(Connection & conn,
|
||||
ref<Store> destStore, unsigned int & lastBuildId)
|
||||
{
|
||||
prom.queue_checks_started.Increment();
|
||||
|
||||
printInfo("checking the queue for builds > %d...", lastBuildId);
|
||||
|
||||
/* Grab the queued builds from the database, but don't process
|
||||
@ -107,16 +109,19 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||
auto builds_(builds.lock());
|
||||
BuildID id = row["id"].as<BuildID>();
|
||||
if (buildOne && id != buildOne) continue;
|
||||
if (id > newLastBuildId) newLastBuildId = id;
|
||||
if (id > newLastBuildId) {
|
||||
newLastBuildId = id;
|
||||
prom.queue_max_id.Set(id);
|
||||
}
|
||||
if (builds_->count(id)) continue;
|
||||
|
||||
auto build = std::make_shared<Build>(
|
||||
localStore->parseStorePath(row["drvPath"].as<string>()));
|
||||
localStore->parseStorePath(row["drvPath"].as<std::string>()));
|
||||
build->id = id;
|
||||
build->jobsetId = row["jobset_id"].as<JobsetID>();
|
||||
build->projectName = row["project"].as<string>();
|
||||
build->jobsetName = row["jobset"].as<string>();
|
||||
build->jobName = row["job"].as<string>();
|
||||
build->projectName = row["project"].as<std::string>();
|
||||
build->jobsetName = row["jobset"].as<std::string>();
|
||||
build->jobName = row["job"].as<std::string>();
|
||||
build->maxSilentTime = row["maxsilent"].as<int>();
|
||||
build->buildTimeout = row["timeout"].as<int>();
|
||||
build->timestamp = row["timestamp"].as<time_t>();
|
||||
@ -136,6 +141,7 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||
std::set<StorePath> finishedDrvs;
|
||||
|
||||
createBuild = [&](Build::ptr build) {
|
||||
prom.queue_build_loads.Increment();
|
||||
printMsg(lvlTalkative, format("loading build %1% (%2%)") % build->id % build->fullJobName());
|
||||
nrAdded++;
|
||||
newBuildsByID.erase(build->id);
|
||||
@ -306,9 +312,14 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||
|
||||
/* Stop after a certain time to allow priority bumps to be
|
||||
processed. */
|
||||
if (std::chrono::system_clock::now() > start + std::chrono::seconds(600)) break;
|
||||
if (std::chrono::system_clock::now() > start + std::chrono::seconds(600)) {
|
||||
prom.queue_checks_early_exits.Increment();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
prom.queue_checks_finished.Increment();
|
||||
|
||||
lastBuildId = newBuildsByID.empty() ? newLastBuildId : newBuildsByID.begin()->first - 1;
|
||||
return newBuildsByID.empty();
|
||||
}
|
||||
@ -437,6 +448,8 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||
|
||||
if (!isNew) return step;
|
||||
|
||||
prom.queue_steps_created.Increment();
|
||||
|
||||
printMsg(lvlDebug, "considering derivation ‘%1%’", localStore->printStorePath(drvPath));
|
||||
|
||||
/* Initialize the step. Note that the step may be visible in
|
||||
@ -447,7 +460,7 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||
step->parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *step->drv);
|
||||
|
||||
step->preferLocalBuild = step->parsedDrv->willBuildLocally(*localStore);
|
||||
step->isDeterministic = get(step->drv->env, "isDetermistic").value_or("0") == "1";
|
||||
step->isDeterministic = getOr(step->drv->env, "isDetermistic", "0") == "1";
|
||||
|
||||
step->systemType = step->drv->platform;
|
||||
{
|
||||
@ -513,9 +526,9 @@ Step::ptr State::createStep(ref<Store> destStore,
|
||||
// FIXME: should copy directly from substituter to destStore.
|
||||
}
|
||||
|
||||
StorePathSet closure;
|
||||
localStore->computeFSClosure({*path}, closure);
|
||||
copyPaths(*localStore, *destStore, closure, NoRepair, CheckSigs, NoSubstitute);
|
||||
copyClosure(*localStore, *destStore,
|
||||
StorePathSet { *path },
|
||||
NoRepair, CheckSigs, NoSubstitute);
|
||||
|
||||
time_t stopTime = time(0);
|
||||
|
||||
@ -620,7 +633,7 @@ void State::processJobsetSharesChange(Connection & conn)
|
||||
auto res = txn.exec("select project, name, schedulingShares from Jobsets");
|
||||
for (auto const & row : res) {
|
||||
auto jobsets_(jobsets.lock());
|
||||
auto i = jobsets_->find(std::make_pair(row["project"].as<string>(), row["name"].as<string>()));
|
||||
auto i = jobsets_->find(std::make_pair(row["project"].as<std::string>(), row["name"].as<std::string>()));
|
||||
if (i == jobsets_->end()) continue;
|
||||
i->second->setShares(row["schedulingShares"].as<unsigned int>());
|
||||
}
|
||||
|
@ -6,12 +6,18 @@
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <queue>
|
||||
#include <regex>
|
||||
|
||||
#include <prometheus/counter.h>
|
||||
#include <prometheus/gauge.h>
|
||||
#include <prometheus/registry.h>
|
||||
|
||||
#include "db.hh"
|
||||
|
||||
#include "parsed-derivations.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "pool.hh"
|
||||
#include "build-result.hh"
|
||||
#include "store-api.hh"
|
||||
#include "sync.hh"
|
||||
#include "nar-extractor.hh"
|
||||
@ -290,7 +296,8 @@ struct Machine
|
||||
|
||||
bool isLocalhost()
|
||||
{
|
||||
return sshName == "localhost";
|
||||
std::regex r("^(ssh://|ssh-ng://)?localhost$");
|
||||
return std::regex_search(sshName, r);
|
||||
}
|
||||
|
||||
// A connection to a machine
|
||||
@ -444,8 +451,25 @@ private:
|
||||
via gc_roots_dir. */
|
||||
nix::Path rootsDir;
|
||||
|
||||
std::string metricsAddr;
|
||||
|
||||
struct PromMetrics
|
||||
{
|
||||
std::shared_ptr<prometheus::Registry> registry;
|
||||
|
||||
prometheus::Counter& queue_checks_started;
|
||||
prometheus::Counter& queue_build_loads;
|
||||
prometheus::Counter& queue_steps_created;
|
||||
prometheus::Counter& queue_checks_early_exits;
|
||||
prometheus::Counter& queue_checks_finished;
|
||||
prometheus::Gauge& queue_max_id;
|
||||
|
||||
PromMetrics();
|
||||
};
|
||||
PromMetrics prom;
|
||||
|
||||
public:
|
||||
State();
|
||||
State(std::optional<std::string> metricsAddrOpt);
|
||||
|
||||
struct BuildOptions {
|
||||
unsigned int maxSilentTime, buildTimeout, repeats;
|
||||
@ -560,6 +584,8 @@ private:
|
||||
|
||||
void addRoot(const nix::StorePath & storePath);
|
||||
|
||||
void runMetricsExporter();
|
||||
|
||||
public:
|
||||
|
||||
void showStatus();
|
||||
|
@ -7,15 +7,16 @@ use base 'Hydra::Base::Controller::NixChannel';
|
||||
use Hydra::Helper::Nix;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
use File::Basename;
|
||||
use File::LibMagic;
|
||||
use File::stat;
|
||||
use Data::Dump qw(dump);
|
||||
use Nix::Store;
|
||||
use Nix::Config;
|
||||
use List::SomeUtils qw(all);
|
||||
use Encode;
|
||||
use MIME::Types;
|
||||
use JSON::PP;
|
||||
|
||||
use feature 'state';
|
||||
|
||||
sub buildChain :Chained('/') :PathPart('build') :CaptureArgs(1) {
|
||||
my ($self, $c, $id) = @_;
|
||||
@ -38,6 +39,17 @@ sub buildChain :Chained('/') :PathPart('build') :CaptureArgs(1) {
|
||||
$c->stash->{jobset} = $c->stash->{build}->jobset;
|
||||
$c->stash->{job} = $c->stash->{build}->job;
|
||||
$c->stash->{runcommandlogs} = [$c->stash->{build}->runcommandlogs->search({}, {order_by => ["id DESC"]})];
|
||||
|
||||
$c->stash->{runcommandlogProblem} = undef;
|
||||
if ($c->stash->{job} =~ qr/^runCommandHook\..*/) {
|
||||
if (!$c->config->{dynamicruncommand}->{enable}) {
|
||||
$c->stash->{runcommandlogProblem} = "disabled-server";
|
||||
} elsif (!$c->stash->{project}->enable_dynamic_run_command) {
|
||||
$c->stash->{runcommandlogProblem} = "disabled-project";
|
||||
} elsif (!$c->stash->{jobset}->enable_dynamic_run_command) {
|
||||
$c->stash->{runcommandlogProblem} = "disabled-jobset";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -223,16 +235,12 @@ sub serveFile {
|
||||
elsif ($ls->{type} eq "regular") {
|
||||
|
||||
$c->stash->{'plain'} = { data => grab(cmd => ["nix", "--experimental-features", "nix-command",
|
||||
"cat-store", "--store", getStoreUri(), "$path"]) };
|
||||
"store", "cat", "--store", getStoreUri(), "$path"]) };
|
||||
|
||||
# Detect MIME type. Borrowed from Catalyst::Plugin::Static::Simple.
|
||||
my $type = "text/plain";
|
||||
if ($path =~ /.*\.(\S{1,})$/xms) {
|
||||
my $ext = $1;
|
||||
my $mimeTypes = MIME::Types->new(only_complete => 1);
|
||||
my $t = $mimeTypes->mimeTypeOf($ext);
|
||||
$type = ref $t ? $t->type : $t if $t;
|
||||
}
|
||||
# Detect MIME type.
|
||||
state $magic = File::LibMagic->new(follow_symlinks => 1);
|
||||
my $info = $magic->info_from_filename($path);
|
||||
my $type = $info->{mime_with_encoding};
|
||||
$c->response->content_type($type);
|
||||
$c->forward('Hydra::View::Plain');
|
||||
}
|
||||
@ -277,29 +285,7 @@ sub download : Chained('buildChain') PathPart {
|
||||
my $path = $product->path;
|
||||
$path .= "/" . join("/", @path) if scalar @path > 0;
|
||||
|
||||
if (isLocalStore) {
|
||||
|
||||
notFound($c, "File '" . $product->path . "' does not exist.") unless -e $product->path;
|
||||
|
||||
# Make sure the file is in the Nix store.
|
||||
$path = checkPath($self, $c, $path);
|
||||
|
||||
# If this is a directory but no "/" is attached, then redirect.
|
||||
if (-d $path && substr($c->request->uri, -1) ne "/") {
|
||||
return $c->res->redirect($c->request->uri . "/");
|
||||
}
|
||||
|
||||
$path = "$path/index.html" if -d $path && -e "$path/index.html";
|
||||
|
||||
notFound($c, "File '$path' does not exist.") if !-e $path;
|
||||
|
||||
notFound($c, "Path '$path' is a directory.") if -d $path;
|
||||
|
||||
$c->serve_static_file($path);
|
||||
|
||||
} else {
|
||||
serveFile($c, $path);
|
||||
}
|
||||
serveFile($c, $path);
|
||||
|
||||
$c->response->headers->last_modified($c->stash->{build}->stoptime);
|
||||
}
|
||||
@ -355,7 +341,7 @@ sub contents : Chained('buildChain') PathPart Args(1) {
|
||||
|
||||
# FIXME: don't use shell invocations below.
|
||||
|
||||
# FIXME: use nix cat-store
|
||||
# FIXME: use nix store cat
|
||||
|
||||
my $res;
|
||||
|
||||
|
@ -69,7 +69,7 @@ sub prometheus : Chained('job') PathPart('prometheus') Args(0) {
|
||||
|
||||
my $lastBuild = $c->stash->{jobset}->builds->find(
|
||||
{ job => $c->stash->{job}, finished => 1 },
|
||||
{ order_by => 'id DESC', rows => 1, columns => [@buildListColumns] }
|
||||
{ order_by => 'id DESC', rows => 1, columns => ["stoptime", "buildstatus", "closuresize", "size"] }
|
||||
);
|
||||
|
||||
$prometheus->new_counter(
|
||||
@ -92,6 +92,26 @@ sub prometheus : Chained('job') PathPart('prometheus') Args(0) {
|
||||
$c->stash->{job},
|
||||
)->inc($lastBuild->buildstatus > 0);
|
||||
|
||||
$prometheus->new_gauge(
|
||||
name => "hydra_build_closure_size",
|
||||
help => "Closure size of the last job's build in bytes",
|
||||
labels => [ "project", "jobset", "job" ]
|
||||
)->labels(
|
||||
$c->stash->{project}->name,
|
||||
$c->stash->{jobset}->name,
|
||||
$c->stash->{job},
|
||||
)->inc($lastBuild->closuresize);
|
||||
|
||||
$prometheus->new_gauge(
|
||||
name => "hydra_build_output_size",
|
||||
help => "Output size of the last job's build in bytes",
|
||||
labels => [ "project", "jobset", "job" ]
|
||||
)->labels(
|
||||
$c->stash->{project}->name,
|
||||
$c->stash->{jobset}->name,
|
||||
$c->stash->{job},
|
||||
)->inc($lastBuild->size);
|
||||
|
||||
$c->stash->{'plain'} = { data => $prometheus->render };
|
||||
$c->forward('Hydra::View::Plain');
|
||||
}
|
||||
|
@ -261,6 +261,14 @@ sub updateJobset {
|
||||
|
||||
my $checkinterval = int(trim($c->stash->{params}->{checkinterval}));
|
||||
|
||||
my $enable_dynamic_run_command = defined $c->stash->{params}->{enable_dynamic_run_command} ? 1 : 0;
|
||||
if ($enable_dynamic_run_command
|
||||
&& !($c->config->{dynamicruncommand}->{enable}
|
||||
&& $jobset->project->enable_dynamic_run_command))
|
||||
{
|
||||
badRequest($c, "Dynamic RunCommand is not enabled by the server or the parent project.");
|
||||
}
|
||||
|
||||
$jobset->update(
|
||||
{ name => $jobsetName
|
||||
, description => trim($c->stash->{params}->{"description"})
|
||||
@ -268,6 +276,7 @@ sub updateJobset {
|
||||
, nixexprinput => $nixExprInput
|
||||
, enabled => $enabled
|
||||
, enableemail => defined $c->stash->{params}->{enableemail} ? 1 : 0
|
||||
, enable_dynamic_run_command => $enable_dynamic_run_command
|
||||
, emailoverride => trim($c->stash->{params}->{emailoverride}) || ""
|
||||
, hidden => defined $c->stash->{params}->{visible} ? 0 : 1
|
||||
, keepnr => int(trim($c->stash->{params}->{keepnr} // "0"))
|
||||
|
@ -149,6 +149,11 @@ sub updateProject {
|
||||
my $displayName = trim $c->stash->{params}->{displayname};
|
||||
error($c, "You must specify a display name.") if $displayName eq "";
|
||||
|
||||
my $enable_dynamic_run_command = defined $c->stash->{params}->{enable_dynamic_run_command} ? 1 : 0;
|
||||
if ($enable_dynamic_run_command && !$c->config->{dynamicruncommand}->{enable}) {
|
||||
badRequest($c, "Dynamic RunCommand is not enabled by the server.");
|
||||
}
|
||||
|
||||
$project->update(
|
||||
{ name => $projectName
|
||||
, displayname => $displayName
|
||||
@ -157,6 +162,7 @@ sub updateProject {
|
||||
, enabled => defined $c->stash->{params}->{enabled} ? 1 : 0
|
||||
, hidden => defined $c->stash->{params}->{visible} ? 0 : 1
|
||||
, owner => $owner
|
||||
, enable_dynamic_run_command => $enable_dynamic_run_command
|
||||
, declfile => trim($c->stash->{params}->{declarative}->{file})
|
||||
, decltype => trim($c->stash->{params}->{declarative}->{type})
|
||||
, declvalue => trim($c->stash->{params}->{declarative}->{value})
|
||||
|
@ -19,14 +19,16 @@ use Hydra::Helper::CatalystUtils;
|
||||
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(
|
||||
validateDeclarativeJobset
|
||||
createJobsetInputsRowAndData
|
||||
updateDeclarativeJobset
|
||||
handleDeclarativeJobsetBuild
|
||||
handleDeclarativeJobsetJson
|
||||
);
|
||||
|
||||
|
||||
sub updateDeclarativeJobset {
|
||||
my ($db, $project, $jobsetName, $declSpec) = @_;
|
||||
sub validateDeclarativeJobset {
|
||||
my ($config, $project, $jobsetName, $declSpec) = @_;
|
||||
|
||||
my @allowed_keys = qw(
|
||||
enabled
|
||||
@ -39,6 +41,7 @@ sub updateDeclarativeJobset {
|
||||
checkinterval
|
||||
schedulingshares
|
||||
enableemail
|
||||
enable_dynamic_run_command
|
||||
emailoverride
|
||||
keepnr
|
||||
);
|
||||
@ -61,16 +64,39 @@ sub updateDeclarativeJobset {
|
||||
}
|
||||
}
|
||||
|
||||
my $enable_dynamic_run_command = defined $update{enable_dynamic_run_command} ? 1 : 0;
|
||||
if ($enable_dynamic_run_command
|
||||
&& !($config->{dynamicruncommand}->{enable}
|
||||
&& $project->enable_dynamic_run_command))
|
||||
{
|
||||
die "Dynamic RunCommand is not enabled by the server or the parent project.";
|
||||
}
|
||||
|
||||
return %update;
|
||||
}
|
||||
|
||||
sub createJobsetInputsRowAndData {
|
||||
my ($name, $declSpec) = @_;
|
||||
my $data = $declSpec->{"inputs"}->{$name};
|
||||
my $row = {
|
||||
name => $name,
|
||||
type => $data->{type}
|
||||
};
|
||||
$row->{emailresponsible} = $data->{emailresponsible} // 0;
|
||||
|
||||
return ($row, $data);
|
||||
}
|
||||
|
||||
sub updateDeclarativeJobset {
|
||||
my ($config, $db, $project, $jobsetName, $declSpec) = @_;
|
||||
|
||||
my %update = validateDeclarativeJobset($config, $project, $jobsetName, $declSpec);
|
||||
|
||||
$db->txn_do(sub {
|
||||
my $jobset = $project->jobsets->update_or_create(\%update);
|
||||
$jobset->jobsetinputs->delete;
|
||||
foreach my $name (keys %{$declSpec->{"inputs"}}) {
|
||||
my $data = $declSpec->{"inputs"}->{$name};
|
||||
my $row = {
|
||||
name => $name,
|
||||
type => $data->{type}
|
||||
};
|
||||
$row->{emailresponsible} = $data->{emailresponsible} // 0;
|
||||
my ($row, $data) = createJobsetInputsRowAndData($name, $declSpec);
|
||||
my $input = $jobset->jobsetinputs->create($row);
|
||||
$input->jobsetinputalts->create({altnr => 0, value => $data->{value}});
|
||||
}
|
||||
@ -81,6 +107,7 @@ sub updateDeclarativeJobset {
|
||||
|
||||
sub handleDeclarativeJobsetJson {
|
||||
my ($db, $project, $declSpec) = @_;
|
||||
my $config = getHydraConfig();
|
||||
$db->txn_do(sub {
|
||||
my @kept = keys %$declSpec;
|
||||
push @kept, ".jobsets";
|
||||
@ -88,7 +115,7 @@ sub handleDeclarativeJobsetJson {
|
||||
foreach my $jobsetName (keys %$declSpec) {
|
||||
my $spec = $declSpec->{$jobsetName};
|
||||
eval {
|
||||
updateDeclarativeJobset($db, $project, $jobsetName, $spec);
|
||||
updateDeclarativeJobset($config, $db, $project, $jobsetName, $spec);
|
||||
1;
|
||||
} or do {
|
||||
print STDERR "ERROR: failed to process declarative jobset ", $project->name, ":${jobsetName}, ", $@, "\n";
|
||||
|
@ -537,7 +537,7 @@ sub getStoreUri {
|
||||
sub readNixFile {
|
||||
my ($path) = @_;
|
||||
return grab(cmd => ["nix", "--experimental-features", "nix-command",
|
||||
"cat-store", "--store", getStoreUri(), "$path"]);
|
||||
"store", "cat", "--store", getStoreUri(), "$path"]);
|
||||
}
|
||||
|
||||
|
||||
|
@ -261,7 +261,7 @@ sub getCommits {
|
||||
|
||||
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
|
||||
|
||||
my $out = grab(cmd => ["git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath);
|
||||
my $out = grab(cmd => ["git", "--git-dir=.git", "log", "--pretty=format:%H%x09%an%x09%ae%x09%at", "$rev1..$rev2"], dir => $clonePath);
|
||||
|
||||
my $res = [];
|
||||
foreach my $line (split /\n/, $out) {
|
||||
|
@ -30,7 +30,7 @@ sub _iterate {
|
||||
$pulls->{$pull->{number}} = $pull;
|
||||
}
|
||||
# TODO Make Link header parsing more robust!!!
|
||||
my @links = split ',', $res->header("Link");
|
||||
my @links = split ',', ($res->header("Link") // "");
|
||||
my $next = "";
|
||||
foreach my $link (@links) {
|
||||
my ($url, $rel) = split ";", $link;
|
||||
|
@ -1,89 +0,0 @@
|
||||
package Hydra::Plugin::HipChatNotification;
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use parent 'Hydra::Plugin';
|
||||
use LWP::UserAgent;
|
||||
use Hydra::Helper::CatalystUtils;
|
||||
|
||||
sub isEnabled {
|
||||
my ($self) = @_;
|
||||
return defined $self->{config}->{hipchat};
|
||||
}
|
||||
|
||||
sub buildFinished {
|
||||
my ($self, $topbuild, $dependents) = @_;
|
||||
|
||||
my $cfg = $self->{config}->{hipchat};
|
||||
my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : ();
|
||||
|
||||
my $baseurl = $self->{config}->{'base_uri'} || "http://localhost:3000";
|
||||
|
||||
# Figure out to which rooms to send notification. For each email
|
||||
# room, we send one aggregate message.
|
||||
my %rooms;
|
||||
foreach my $build ($topbuild, @{$dependents}) {
|
||||
my $prevBuild = getPreviousBuild($build);
|
||||
my $jobName = showJobName $build;
|
||||
|
||||
foreach my $room (@config) {
|
||||
my $force = $room->{force};
|
||||
next unless $jobName =~ /^$room->{jobs}$/;
|
||||
|
||||
# If build is cancelled or aborted, do not send email.
|
||||
next if ! $force && ($build->buildstatus == 4 || $build->buildstatus == 3);
|
||||
|
||||
# If there is a previous (that is not cancelled or aborted) build
|
||||
# with same buildstatus, do not send email.
|
||||
next if ! $force && defined $prevBuild && ($build->buildstatus == $prevBuild->buildstatus);
|
||||
|
||||
$rooms{$room->{room}} //= { room => $room, builds => [] };
|
||||
push @{$rooms{$room->{room}}->{builds}}, $build;
|
||||
}
|
||||
}
|
||||
|
||||
return if scalar keys %rooms == 0;
|
||||
|
||||
my ($authors, $nrCommits) = getResponsibleAuthors($topbuild, $self->{plugins});
|
||||
|
||||
# Send a message to each room.
|
||||
foreach my $roomId (keys %rooms) {
|
||||
my $room = $rooms{$roomId};
|
||||
my @deps = grep { $_->id != $topbuild->id } @{$room->{builds}};
|
||||
|
||||
my $img =
|
||||
$topbuild->buildstatus == 0 ? "$baseurl/static/images/checkmark_16.png" :
|
||||
$topbuild->buildstatus == 2 ? "$baseurl/static/images/dependency_16.png" :
|
||||
$topbuild->buildstatus == 4 ? "$baseurl/static/images/cancelled_16.png" :
|
||||
"$baseurl/static/images/error_16.png";
|
||||
|
||||
my $msg = "";
|
||||
$msg .= "<img src='$img'/> ";
|
||||
$msg .= "Job <a href='$baseurl/job/${\$topbuild->jobset->get_column('project')}/${\$topbuild->jobset->get_column('name')}/${\$topbuild->get_column('job')}'>${\showJobName($topbuild)}</a>";
|
||||
$msg .= " (and ${\scalar @deps} others)" if scalar @deps > 0;
|
||||
$msg .= ": <a href='$baseurl/build/${\$topbuild->id}'>" . showStatus($topbuild) . "</a>";
|
||||
|
||||
if (scalar keys %{$authors} > 0) {
|
||||
# FIXME: HTML escaping
|
||||
my @x = map { "<a href='mailto:$authors->{$_}'>$_</a>" } (sort keys %{$authors});
|
||||
$msg .= ", likely due to ";
|
||||
$msg .= "$nrCommits commits by " if $nrCommits > 1;
|
||||
$msg .= join(" or ", scalar @x > 1 ? join(", ", @x[0..scalar @x - 2]) : (), $x[-1]);
|
||||
}
|
||||
|
||||
print STDERR "sending hipchat notification to room $roomId: $msg\n";
|
||||
|
||||
my $ua = LWP::UserAgent->new();
|
||||
my $resp = $ua->post('https://api.hipchat.com/v1/rooms/message?format=json&auth_token=' . $room->{room}->{token}, {
|
||||
room_id => $roomId,
|
||||
from => 'Hydra',
|
||||
message => $msg,
|
||||
message_format => 'html',
|
||||
notify => $room->{room}->{notify} || 0,
|
||||
color => $topbuild->buildstatus == 0 ? 'green' : 'red' });
|
||||
|
||||
print STDERR $resp->status_line, ": ", $resp->decoded_content,"\n" if !$resp->is_success;
|
||||
}
|
||||
}
|
||||
|
||||
1;
|
@ -12,7 +12,74 @@ use Try::Tiny;
|
||||
|
||||
sub isEnabled {
|
||||
my ($self) = @_;
|
||||
return defined $self->{config}->{runcommand};
|
||||
|
||||
return areStaticCommandsEnabled($self->{config}) || areDynamicCommandsEnabled($self->{config});
|
||||
}
|
||||
|
||||
sub areStaticCommandsEnabled {
|
||||
my ($config) = @_;
|
||||
|
||||
if (defined $config->{runcommand}) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
sub areDynamicCommandsEnabled {
|
||||
my ($config) = @_;
|
||||
|
||||
if ((defined $config->{dynamicruncommand})
|
||||
&& $config->{dynamicruncommand}->{enable}) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
sub isBuildEligibleForDynamicRunCommand {
|
||||
my ($build) = @_;
|
||||
|
||||
if ($build->get_column("buildstatus") != 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ($build->get_column("job") =~ "^runCommandHook\..+") {
|
||||
my $out = $build->buildoutputs->find({name => "out"});
|
||||
if (!defined $out) {
|
||||
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: no output named 'out'.";
|
||||
return 0;
|
||||
}
|
||||
|
||||
my $path = $out->path;
|
||||
if (-l $path) {
|
||||
$path = readlink($path);
|
||||
}
|
||||
|
||||
if (! -e $path) {
|
||||
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The 'out' output doesn't exist locally. This is a bug.";
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (! -x $path) {
|
||||
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The 'out' output is not executable.";
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (! -f $path) {
|
||||
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The 'out' output is not a regular file or symlink.";
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (! $build->jobset->supportsDynamicRunCommand()) {
|
||||
warn "DynamicRunCommand hook on " . $build->job . " (" . $build->id . ") rejected: The project or jobset don't have dynamic runcommand enabled.";
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
sub configSectionMatches {
|
||||
@ -43,10 +110,11 @@ sub eventMatches {
|
||||
}
|
||||
|
||||
sub fanoutToCommands {
|
||||
my ($config, $event, $project, $jobset, $job) = @_;
|
||||
my ($config, $event, $build) = @_;
|
||||
|
||||
my @commands;
|
||||
|
||||
# Calculate all the statically defined commands to execute
|
||||
my $cfg = $config->{runcommand};
|
||||
my @config = defined $cfg ? ref $cfg eq "ARRAY" ? @$cfg : ($cfg) : ();
|
||||
|
||||
@ -55,9 +123,10 @@ sub fanoutToCommands {
|
||||
next unless eventMatches($conf, $event);
|
||||
next unless configSectionMatches(
|
||||
$matcher,
|
||||
$project,
|
||||
$jobset,
|
||||
$job);
|
||||
$build->jobset->get_column('project'),
|
||||
$build->jobset->get_column('name'),
|
||||
$build->get_column('job')
|
||||
);
|
||||
|
||||
if (!defined($conf->{command})) {
|
||||
warn "<runcommand> section for '$matcher' lacks a 'command' option";
|
||||
@ -70,6 +139,18 @@ sub fanoutToCommands {
|
||||
})
|
||||
}
|
||||
|
||||
# Calculate all dynamically defined commands to execute
|
||||
if (areDynamicCommandsEnabled($config)) {
|
||||
if (isBuildEligibleForDynamicRunCommand($build)) {
|
||||
my $job = $build->get_column('job');
|
||||
my $out = $build->buildoutputs->find({name => "out"});
|
||||
push(@commands, {
|
||||
matcher => "DynamicRunCommand($job)",
|
||||
command => $out->path
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return \@commands;
|
||||
}
|
||||
|
||||
@ -138,9 +219,7 @@ sub buildFinished {
|
||||
my $commandsToRun = fanoutToCommands(
|
||||
$self->{config},
|
||||
$event,
|
||||
$build->project->get_column('name'),
|
||||
$build->jobset->get_column('name'),
|
||||
$build->get_column('job')
|
||||
$build
|
||||
);
|
||||
|
||||
if (@$commandsToRun == 0) {
|
||||
|
@ -155,6 +155,12 @@ __PACKAGE__->table("jobsets");
|
||||
data_type: 'text'
|
||||
is_nullable: 1
|
||||
|
||||
=head2 enable_dynamic_run_command
|
||||
|
||||
data_type: 'boolean'
|
||||
default_value: false
|
||||
is_nullable: 0
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->add_columns(
|
||||
@ -207,6 +213,8 @@ __PACKAGE__->add_columns(
|
||||
{ data_type => "integer", default_value => 0, is_nullable => 0 },
|
||||
"flake",
|
||||
{ data_type => "text", is_nullable => 1 },
|
||||
"enable_dynamic_run_command",
|
||||
{ data_type => "boolean", default_value => \"false", is_nullable => 0 },
|
||||
);
|
||||
|
||||
=head1 PRIMARY KEY
|
||||
@ -354,8 +362,8 @@ __PACKAGE__->has_many(
|
||||
);
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-08 22:24:10
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:cQOnMitrWGMoJX6kZGNW+w
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-24 14:17:33
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:7wPE5ebeVTkenMCWG9Sgcg
|
||||
|
||||
use JSON::MaybeXS;
|
||||
|
||||
@ -378,6 +386,13 @@ __PACKAGE__->add_column(
|
||||
"+id" => { retrieve_on_insert => 1 }
|
||||
);
|
||||
|
||||
sub supportsDynamicRunCommand {
|
||||
my ($self) = @_;
|
||||
|
||||
return $self->get_column('enable_dynamic_run_command') == 1
|
||||
&& $self->project->supportsDynamicRunCommand();
|
||||
}
|
||||
|
||||
sub as_json {
|
||||
my $self = shift;
|
||||
|
||||
@ -406,6 +421,7 @@ sub as_json {
|
||||
|
||||
# boolean_columns
|
||||
"enableemail" => $self->get_column("enableemail") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
"enable_dynamic_run_command" => $self->get_column("enable_dynamic_run_command") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
"visible" => $self->get_column("hidden") ? JSON::MaybeXS::false : JSON::MaybeXS::true,
|
||||
|
||||
"inputs" => { map { $_->name => $_ } $self->jobsetinputs }
|
||||
|
@ -88,6 +88,12 @@ __PACKAGE__->table("projects");
|
||||
data_type: 'text'
|
||||
is_nullable: 1
|
||||
|
||||
=head2 enable_dynamic_run_command
|
||||
|
||||
data_type: 'boolean'
|
||||
default_value: false
|
||||
is_nullable: 0
|
||||
|
||||
=cut
|
||||
|
||||
__PACKAGE__->add_columns(
|
||||
@ -111,6 +117,8 @@ __PACKAGE__->add_columns(
|
||||
{ data_type => "text", is_nullable => 1 },
|
||||
"declvalue",
|
||||
{ data_type => "text", is_nullable => 1 },
|
||||
"enable_dynamic_run_command",
|
||||
{ data_type => "boolean", default_value => \"false", is_nullable => 0 },
|
||||
);
|
||||
|
||||
=head1 PRIMARY KEY
|
||||
@ -228,8 +236,8 @@ Composing rels: L</projectmembers> -> username
|
||||
__PACKAGE__->many_to_many("usernames", "projectmembers", "username");
|
||||
|
||||
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-08 22:24:10
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:r/wbX3FAm5/OFrrwOQL5fA
|
||||
# Created by DBIx::Class::Schema::Loader v0.07049 @ 2022-01-24 14:20:32
|
||||
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PtXDyT8Pc7LYhhdEG39EKQ
|
||||
|
||||
use JSON::MaybeXS;
|
||||
|
||||
@ -238,6 +246,12 @@ sub builds {
|
||||
return $self->jobsets->related_resultset('builds');
|
||||
};
|
||||
|
||||
sub supportsDynamicRunCommand {
|
||||
my ($self) = @_;
|
||||
|
||||
return $self->get_column('enable_dynamic_run_command') == 1;
|
||||
}
|
||||
|
||||
sub as_json {
|
||||
my $self = shift;
|
||||
|
||||
@ -251,6 +265,7 @@ sub as_json {
|
||||
|
||||
# boolean_columns
|
||||
"enabled" => $self->get_column("enabled") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
"enable_dynamic_run_command" => $self->get_column("enable_dynamic_run_command") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
"hidden" => $self->get_column("hidden") ? JSON::MaybeXS::true : JSON::MaybeXS::false,
|
||||
|
||||
"jobsets" => [ map { $_->name } $self->jobsets ]
|
||||
|
@ -18,7 +18,7 @@ struct Connection : pqxx::connection
|
||||
std::string upper_prefix = "DBI:Pg:";
|
||||
|
||||
if (hasPrefix(s, lower_prefix) || hasPrefix(s, upper_prefix)) {
|
||||
return concatStringsSep(" ", tokenizeString<Strings>(string(s, lower_prefix.size()), ";"));
|
||||
return concatStringsSep(" ", tokenizeString<Strings>(std::string(s, lower_prefix.size()), ";"));
|
||||
}
|
||||
|
||||
throw Error("$HYDRA_DBI does not denote a PostgreSQL database");
|
||||
|
@ -17,7 +17,7 @@ struct HydraConfig
|
||||
if (hydraConfigFile && pathExists(*hydraConfigFile)) {
|
||||
|
||||
for (auto line : tokenizeString<Strings>(readFile(*hydraConfigFile), "\n")) {
|
||||
line = trim(string(line, 0, line.find('#')));
|
||||
line = trim(std::string(line, 0, line.find('#')));
|
||||
|
||||
auto eq = line.find('=');
|
||||
if (eq == std::string::npos) continue;
|
||||
|
@ -4,6 +4,6 @@
|
||||
|
||||
<div class="dep-tree">
|
||||
<ul class="tree">
|
||||
[% INCLUDE renderNode node=buildTimeGraph %]
|
||||
[% INCLUDE renderNode node=buildTimeGraph isRoot=1 %]
|
||||
</ul>
|
||||
</div>
|
||||
|
@ -149,7 +149,7 @@ END;
|
||||
[% IF build.dependents %]<li class="nav-item"><a class="nav-link" href="#tabs-usedby" data-toggle="tab">Used By</a></li>[% END%]
|
||||
[% IF drvAvailable %]<li class="nav-item"><a class="nav-link" href="#tabs-build-deps" data-toggle="tab">Build Dependencies</a></li>[% END %]
|
||||
[% IF localStore && available %]<li class="nav-item"><a class="nav-link" href="#tabs-runtime-deps" data-toggle="tab">Runtime Dependencies</a></li>[% END %]
|
||||
[% IF runcommandlogs.size() > 0 %]<li class="nav-item"><a class="nav-link" href="#tabs-runcommandlogs" data-toggle="tab">RunCommand Logs</a></li>[% END %]
|
||||
[% IF runcommandlogProblem || runcommandlogs.size() > 0 %]<li class="nav-item"><a class="nav-link" href="#tabs-runcommandlogs" data-toggle="tab">RunCommand Logs[% IF runcommandlogProblem %] <span class="badge badge-warning">Disabled</span>[% END %]</a></li>[% END %]
|
||||
</ul>
|
||||
|
||||
<div id="generic-tabs" class="tab-content">
|
||||
@ -481,14 +481,27 @@ END;
|
||||
[% END %]
|
||||
|
||||
[% IF drvAvailable %]
|
||||
[% INCLUDE makeLazyTab tabName="tabs-build-deps" uri=c.uri_for('/build' build.id 'build-deps') %]
|
||||
[% INCLUDE makeLazyTab tabName="tabs-build-deps" uri=c.uri_for('/build' build.id 'build-deps') callback="makeTreeCollapsible" %]
|
||||
[% END %]
|
||||
|
||||
[% IF available %]
|
||||
[% INCLUDE makeLazyTab tabName="tabs-runtime-deps" uri=c.uri_for('/build' build.id 'runtime-deps') %]
|
||||
[% INCLUDE makeLazyTab tabName="tabs-runtime-deps" uri=c.uri_for('/build' build.id 'runtime-deps') callback="makeTreeCollapsible" %]
|
||||
[% END %]
|
||||
|
||||
<div id="tabs-runcommandlogs" class="tab-pane">
|
||||
[% IF runcommandlogProblem %]
|
||||
<div class="alert alert-warning" role="alert">
|
||||
[% IF runcommandlogProblem == "disabled-server" %]
|
||||
This server does not enable Dynamic RunCommand support.
|
||||
[% ELSIF runcommandlogProblem == "disabled-project" %]
|
||||
This project does not enable Dynamic RunCommand support.
|
||||
[% ELSIF runcommandlogProblem == "disabled-jobset" %]
|
||||
This jobset does not enable Dynamic RunCommand support.
|
||||
[% ELSE %]
|
||||
Dynamic RunCommand is not enabled: [% runcommandlogProblem %].
|
||||
[% END %]
|
||||
</div>
|
||||
[% END %]
|
||||
<div class="d-flex flex-column">
|
||||
[% FOREACH runcommandlog IN runcommandlogs %]
|
||||
<div class="p-2 border-bottom">
|
||||
|
@ -520,7 +520,11 @@ BLOCK makeLazyTab %]
|
||||
<center><span class="spinner-border spinner-border-sm"/></center>
|
||||
</div>
|
||||
<script>
|
||||
$(function() { makeLazyTab("[% tabName %]", "[% uri %]"); });
|
||||
[% IF callback.defined %]
|
||||
$(function() { makeLazyTab("[% tabName %]", "[% uri %]", [% callback %] ); });
|
||||
[% ELSE %]
|
||||
$(function() { makeLazyTab("[% tabName %]", "[% uri %]", null ); });
|
||||
[% END %]
|
||||
</script>
|
||||
[% END;
|
||||
|
||||
|
@ -19,9 +19,16 @@
|
||||
<tt>[% node.name %]</tt> (<em>no info</em>)
|
||||
[% END %]
|
||||
</span></span>
|
||||
[% IF isRoot %]
|
||||
<span class="dep-tree-buttons">
|
||||
(<a href="#" class="tree-collapse-all">collapse all</a>
|
||||
–
|
||||
<a href="#" class="tree-expand-all">expand all</a>)
|
||||
</span>
|
||||
[% END %]
|
||||
[% IF node.refs.size > 0 %]
|
||||
<ul class="subtree">
|
||||
[% FOREACH ref IN node.refs; INCLUDE renderNode node=ref; END %]
|
||||
[% FOREACH ref IN node.refs; INCLUDE renderNode node=ref isRoot=0; END %]
|
||||
</ul>
|
||||
[% END %]
|
||||
[% END %]
|
||||
|
@ -157,6 +157,21 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group row">
|
||||
<label class="col-sm-3" for="editjobsetenable_dynamic_run_command">Enable Dynamic RunCommand Hooks</label>
|
||||
<div class="col-sm-9">
|
||||
<input type="checkbox" id="editjobsetenable_dynamic_run_command" name="enable_dynamic_run_command"
|
||||
[% IF !c.config.dynamicruncommand.enable %]
|
||||
title="The server has not enabled dynamic RunCommands" disabled
|
||||
[% ELSIF !project.enable_dynamic_run_command %]
|
||||
title="The parent project has not enabled dynamic RunCommands" disabled
|
||||
[% ELSIF jobset.enable_dynamic_run_command %]
|
||||
checked
|
||||
[% END %]
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group row">
|
||||
<label class="col-sm-3" for="editjobsetenableemail">Email notification</label>
|
||||
<div class="col-sm-9">
|
||||
|
@ -52,6 +52,20 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="form-group row">
|
||||
<label class="col-sm-3" for="editprojectenable_dynamic_run_command">Enable Dynamic RunCommand Hooks for Jobsets</label>
|
||||
<div class="col-sm-9">
|
||||
<input type="checkbox" id="editprojectenable_dynamic_run_command" name="enable_dynamic_run_command"
|
||||
[% IF !c.config.dynamicruncommand.enable %]
|
||||
title="The server has not enabled dynamic RunCommands" disabled
|
||||
[% ELSIF project.enable_dynamic_run_command %]
|
||||
checked
|
||||
[% END %]
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group row">
|
||||
<label class="col-sm-3" for="editprojectdeclfile">
|
||||
Declarative spec file
|
||||
|
@ -160,6 +160,10 @@
|
||||
<th>Scheduling shares:</th>
|
||||
<td>[% jobset.schedulingshares %] [% IF totalShares %] ([% f = format("%.2f"); f(jobset.schedulingshares / totalShares * 100) %]% out of [% totalShares %] shares)[% END %]</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Enable Dynamic RunCommand Hooks:</th>
|
||||
<td>[% c.config.dynamicruncommand.enable ? project.enable_dynamic_run_command ? jobset.enable_dynamic_run_command ? "Yes" : "No (not enabled by jobset)" : "No (not enabled by project)" : "No (not enabled by server)" %]</td>
|
||||
</tr>
|
||||
[% IF emailNotification %]
|
||||
<tr>
|
||||
<th>Enable email notification:</th>
|
||||
|
@ -93,7 +93,7 @@
|
||||
<footer class="navbar">
|
||||
<hr />
|
||||
<small>
|
||||
<em><a href="http://nixos.org/hydra" target="_blank">Hydra</a> [% HTML.escape(version) %] (using [% HTML.escape(nixVersion) %]).</em>
|
||||
<em><a href="http://nixos.org/hydra" target="_blank" class="squiggle">Hydra</a> [% HTML.escape(version) %] (using [% HTML.escape(nixVersion) %]).</em>
|
||||
[% IF c.user_exists %]
|
||||
You are signed in as <tt>[% HTML.escape(c.user.username) %]</tt>
|
||||
[%- IF c.user.type == 'google' %] via Google[% END %].
|
||||
|
@ -92,6 +92,10 @@
|
||||
<th>Enabled:</th>
|
||||
<td>[% project.enabled ? "Yes" : "No" %]</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Enable Dynamic RunCommand Hooks:</th>
|
||||
<td>[% c.config.dynamicruncommand.enable ? project.enable_dynamic_run_command ? "Yes" : "No (not enabled by project)" : "No (not enabled by server)" %]</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
div.skip-topbar {
|
||||
padding-top: 40px;
|
||||
padding-top: 20px;
|
||||
margin-bottom: 1.5em;
|
||||
}
|
||||
|
||||
@ -33,6 +33,11 @@ span:target > span.dep-tree-line {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
span.dep-tree-buttons {
|
||||
font-style: italic;
|
||||
padding-left: 10px;
|
||||
}
|
||||
|
||||
span.disabled-project, span.disabled-jobset, span.disabled-job {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
@ -146,6 +151,36 @@ td.step-status span.warn {
|
||||
padding-top: 1.5rem;
|
||||
}
|
||||
|
||||
.container {
|
||||
max-width: 80%;
|
||||
}
|
||||
|
||||
.tab-content {
|
||||
margin-right: 0 !important;
|
||||
}
|
||||
|
||||
body {
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.navbar-nav {
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.dropdown-item {
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
a.squiggle:hover {
|
||||
background-image: url("data:image/svg+xml;charset=utf8,%3Csvg id='squiggle-link' xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink' xmlns:ev='http://www.w3.org/2001/xml-events' viewBox='0 0 10 18'%3E%3Cstyle type='text/css'%3E.squiggle{animation:shift .5s linear infinite;}@keyframes shift {from {transform:translateX(-10px);}to {transform:translateX(0);}}%3C/style%3E%3Cpath fill='none' stroke='%230056b3' stroke-width='0.65' class='squiggle' d='M0,17.5 c 2.5,0,2.5,-1.5,5,-1.5 s 2.5,1.5,5,1.5 c 2.5,0,2.5,-1.5,5,-1.5 s 2.5,1.5,5,1.5' /%3E%3C/svg%3E");
|
||||
background-position: 0 100%;
|
||||
background-size: auto 24px;
|
||||
background-repeat: repeat;
|
||||
text-decoration: none;
|
||||
border-bottom: none;
|
||||
padding-bottom: 1px;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
/* Prevent some flickering */
|
||||
html {
|
||||
|
@ -9,6 +9,7 @@ ul.tree, ul.subtree {
|
||||
ul.subtree > li {
|
||||
position: relative;
|
||||
padding-left: 2.0em;
|
||||
line-height: 140%;
|
||||
border-left: 0.1em solid #6185a0;
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,9 @@
|
||||
$(document).ready(function() {
|
||||
|
||||
function makeTreeCollapsible(tab) {
|
||||
/*** Tree toggles in logfiles. ***/
|
||||
|
||||
/* Set the appearance of the toggle depending on whether the
|
||||
corresponding subtree is initially shown or hidden. */
|
||||
$(".tree-toggle").map(function() {
|
||||
tab.find(".tree-toggle").map(function() {
|
||||
if ($(this).siblings("ul:hidden").length == 0) {
|
||||
$(this).text("-");
|
||||
} else {
|
||||
@ -13,7 +12,7 @@ $(document).ready(function() {
|
||||
});
|
||||
|
||||
/* When a toggle is clicked, show or hide the subtree. */
|
||||
$(".tree-toggle").click(function() {
|
||||
tab.find(".tree-toggle").click(function() {
|
||||
if ($(this).siblings("ul:hidden").length != 0) {
|
||||
$(this).siblings("ul").show();
|
||||
$(this).text("-");
|
||||
@ -24,21 +23,23 @@ $(document).ready(function() {
|
||||
});
|
||||
|
||||
/* Implementation of the expand all link. */
|
||||
$(".tree-expand-all").click(function() {
|
||||
$(".tree-toggle", $(this).parent().siblings(".tree")).map(function() {
|
||||
tab.find(".tree-expand-all").click(function() {
|
||||
tab.find(".tree-toggle", $(this).parent().siblings(".tree")).map(function() {
|
||||
$(this).siblings("ul").show();
|
||||
$(this).text("-");
|
||||
});
|
||||
});
|
||||
|
||||
/* Implementation of the collapse all link. */
|
||||
$(".tree-collapse-all").click(function() {
|
||||
$(".tree-toggle", $(this).parent().siblings(".tree")).map(function() {
|
||||
tab.find(".tree-collapse-all").click(function() {
|
||||
tab.find(".tree-toggle", $(this).parent().siblings(".tree")).map(function() {
|
||||
$(this).siblings("ul").hide();
|
||||
$(this).text("+");
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
$("table.clickable-rows").click(function(event) {
|
||||
if ($(event.target).closest("a").length) return;
|
||||
link = $(event.target).parents("tr").find("a.row-link");
|
||||
@ -132,7 +133,7 @@ $(document).ready(function() {
|
||||
|
||||
var tabsLoaded = {};
|
||||
|
||||
function makeLazyTab(tabName, uri) {
|
||||
function makeLazyTab(tabName, uri, callback) {
|
||||
$('.nav-tabs').bind('show.bs.tab', function(e) {
|
||||
var pattern = /#.+/gi;
|
||||
var id = e.target.toString().match(pattern)[0];
|
||||
@ -140,11 +141,15 @@ function makeLazyTab(tabName, uri) {
|
||||
tabsLoaded[id] = 1;
|
||||
$('#' + tabName).load(uri, function(response, status, xhr) {
|
||||
var lazy = xhr.getResponseHeader("X-Hydra-Lazy") === "Yes";
|
||||
var tab = $('#' + tabName);
|
||||
if (status == "error" && !lazy) {
|
||||
$('#' + tabName).html("<div class='alert alert-error'>Error loading tab: " + xhr.status + " " + xhr.statusText + "</div>");
|
||||
tab.html("<div class='alert alert-error'>Error loading tab: " + xhr.status + " " + xhr.statusText + "</div>");
|
||||
}
|
||||
else {
|
||||
$('#' + tabName).html(response);
|
||||
tab.html(response);
|
||||
if (callback) {
|
||||
callback(tab);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -619,7 +619,7 @@ sub checkJobsetWrapped {
|
||||
} else {
|
||||
# Update the jobset with the spec's inputs, and the continue
|
||||
# evaluating the .jobsets jobset.
|
||||
updateDeclarativeJobset($db, $project, ".jobsets", $declSpec);
|
||||
updateDeclarativeJobset($config, $db, $project, ".jobsets", $declSpec);
|
||||
$jobset->discard_changes;
|
||||
$inputInfo->{"declInput"} = [ $declInput ];
|
||||
$inputInfo->{"projectName"} = [ fetchInput($plugins, $db, $project, $jobset, "projectName", "string", $project->name, 0) ];
|
||||
@ -640,8 +640,8 @@ sub checkJobsetWrapped {
|
||||
my $flakeRef = $jobset->flake;
|
||||
if (defined $flakeRef) {
|
||||
(my $res, my $json, my $stderr) = captureStdoutStderr(
|
||||
600, "nix", "flake", "info", "--tarball-ttl", 0, "--json", "--", $flakeRef);
|
||||
die "'nix flake info' returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8))
|
||||
600, "nix", "flake", "metadata", "--refresh", "--json", "--", $flakeRef);
|
||||
die "'nix flake metadata' returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8))
|
||||
. ":\n" . ($stderr ? decode("utf-8", $stderr) : "(no output)\n")
|
||||
if $res;
|
||||
$flakeRef = decode_json($json)->{'url'};
|
||||
|
@ -49,6 +49,7 @@ create table Projects (
|
||||
declfile text, -- File containing declarative jobset specification
|
||||
decltype text, -- Type of the input containing declarative jobset specification
|
||||
declvalue text, -- Value of the input containing declarative jobset specification
|
||||
enable_dynamic_run_command boolean not null default false,
|
||||
foreign key (owner) references Users(userName) on update cascade
|
||||
);
|
||||
|
||||
@ -88,6 +89,7 @@ create table Jobsets (
|
||||
startTime integer, -- if jobset is currently running
|
||||
type integer not null default 0, -- 0 == legacy, 1 == flake
|
||||
flake text,
|
||||
enable_dynamic_run_command boolean not null default false,
|
||||
constraint jobsets_schedulingshares_nonzero_check check (schedulingShares > 0),
|
||||
constraint jobsets_type_known_check check (type = 0 or type = 1),
|
||||
-- If the type is 0, then nixExprInput and nixExprPath should be non-null and other type-specific fields should be null
|
||||
|
4
src/sql/upgrade-82.sql
Normal file
4
src/sql/upgrade-82.sql
Normal file
@ -0,0 +1,4 @@
|
||||
ALTER TABLE Jobsets
|
||||
ADD COLUMN enable_dynamic_run_command boolean not null default false;
|
||||
ALTER TABLE Projects
|
||||
ADD COLUMN enable_dynamic_run_command boolean not null default false;
|
Reference in New Issue
Block a user