6 Commits

Author SHA1 Message Date
Martin Weinelt
7123dd8981 flake.nix: update to nixos-25.11
And squashes eval warnings from accessing pkgs.hostPlatform.
2025-12-05 15:46:05 -05:00
Martin Weinelt
39b2c7c0da package.nix: update postgresql to 17
NixOS 25.11 does not ship with PostgreSQL 13 any more.
2025-12-05 15:46:05 -05:00
John Ericson
20a0857660 Revert "Deduplicate protocol code more with ServeProto::BasicClientConnection"
This reverts commit 58846b0a1c.
2025-12-05 15:46:05 -05:00
Joshua Leivenzon
23c6a292be GithubRefs: Allow arbitrary ref types
GitHub's reference list API does not actually restrict the specified type, so don't artificially restrict it.

The API does not actually make a distinction between the "type" and "prefix" at all, but this is maintained for backwards compatibility. The two are simply concatenated.
2025-12-05 15:46:05 -05:00
John Ericson
9ebc15e709 Upgrade Nix to 2.32 2025-10-20 21:20:16 -04:00
John Ericson
dbae951443 Deduplicate protocol code more with ServeProto::BasicClientConnection
I did this in Nix for this purpose, but didn't get around to actually
taking advantage of it here, until now.
2025-10-20 21:20:16 -04:00
6 changed files with 114 additions and 98 deletions

24
flake.lock generated
View File

@@ -3,16 +3,16 @@
"nix": {
"flake": false,
"locked": {
"lastModified": 1759956402,
"narHash": "sha256-CM27YK+KMi3HLRXqjPaJwkTabmKW+CDXOE3kMMtXH3s=",
"lastModified": 1760573252,
"narHash": "sha256-mcvNeNdJP5R7huOc8Neg0qZESx/0DMg8Fq6lsdx0x8U=",
"owner": "NixOS",
"repo": "nix",
"rev": "3019db2c87006817b6201113ad4ceee0c53c3b62",
"rev": "3c39583e5512729f9c5a44c3b03b6467a2acd963",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "2.31-maintenance",
"ref": "2.32-maintenance",
"repo": "nix",
"type": "github"
}
@@ -20,32 +20,32 @@
"nix-eval-jobs": {
"flake": false,
"locked": {
"lastModified": 1757626891,
"narHash": "sha256-VrHPtHxVIboqgnw+tlCQepgtBOhBvU5hxbMHsPo8LAc=",
"lastModified": 1760478325,
"narHash": "sha256-hA+NOH8KDcsuvH7vJqSwk74PyZP3MtvI/l+CggZcnTc=",
"owner": "nix-community",
"repo": "nix-eval-jobs",
"rev": "c975efc5b2bec0c1ff93c67de4a03306af258ff7",
"rev": "daa42f9e9c84aeff1e325dd50fda321f53dfd02c",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "v2.31.0",
"ref": "v2.32.1",
"repo": "nix-eval-jobs",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1759652726,
"narHash": "sha256-2VjnimOYDRb3DZHyQ2WH2KCouFqYm9h0Rr007Al/WSA=",
"lastModified": 1764020296,
"narHash": "sha256-6zddwDs2n+n01l+1TG6PlyokDdXzu/oBmEejcH5L5+A=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "06b2985f0cc9eb4318bf607168f4b15af1e5e81d",
"rev": "a320ce8e6e2cc6b4397eef214d202a50a4583829",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-25.05-small",
"ref": "nixos-25.11-small",
"repo": "nixpkgs",
"type": "github"
}

View File

@@ -1,16 +1,16 @@
{
description = "A Nix-based continuous build system";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11-small";
inputs.nix = {
url = "github:NixOS/nix/2.31-maintenance";
url = "github:NixOS/nix/2.32-maintenance";
# We want to control the deps precisely
flake = false;
};
inputs.nix-eval-jobs = {
url = "github:nix-community/nix-eval-jobs/v2.31.0";
url = "github:nix-community/nix-eval-jobs/v2.32.1";
# We want to control the deps precisely
flake = false;
};
@@ -59,7 +59,7 @@
manual = forEachSystem (system: let
pkgs = nixpkgs.legacyPackages.${system};
hydra = self.packages.${pkgs.hostPlatform.system}.hydra;
hydra = self.packages.${pkgs.stdenv.hostPlatform.system}.hydra;
in
pkgs.runCommand "hydra-manual-${hydra.version}" { }
''

View File

@@ -4,7 +4,7 @@
hydra = { pkgs, lib,... }: {
_file = ./default.nix;
imports = [ ./hydra.nix ];
services.hydra-dev.package = lib.mkDefault self.packages.${pkgs.hostPlatform.system}.hydra;
services.hydra-dev.package = lib.mkDefault self.packages.${pkgs.stdenv.hostPlatform.system}.hydra;
};
hydraTest = { pkgs, ... }: {

View File

@@ -31,7 +31,7 @@
, perl
, pixz
, boost
, postgresql_13
, postgresql_17
, nlohmann_json
, prometheus-cpp
@@ -192,7 +192,7 @@ stdenv.mkDerivation (finalAttrs: {
subversion
breezy
openldap
postgresql_13
postgresql_17
pixz
nix-eval-jobs
];

View File

@@ -14,6 +14,7 @@
#include <nix/util/current-process.hh>
#include <nix/util/processes.hh>
#include <nix/util/util.hh>
#include <nix/store/export-import.hh>
#include <nix/store/serve-protocol.hh>
#include <nix/store/serve-protocol-impl.hh>
#include <nix/store/ssh.hh>
@@ -104,7 +105,7 @@ static void copyClosureTo(
std::chrono::seconds(600));
conn.to << ServeProto::Command::ImportPaths;
destStore.exportPaths(missing, conn.to);
exportPaths(destStore, missing, conn.to);
conn.to.flush();
if (readInt(conn.from) != 1)
@@ -262,16 +263,18 @@ static BuildResult performBuild(
// Since this a `BasicDerivation`, `staticOutputHashes` will not
// do any real work.
auto outputHashes = staticOutputHashes(localStore, drv);
for (auto & [outputName, output] : drvOutputs) {
auto outputPath = output.second;
// Weve just asserted that the output paths of the derivation
// were known
assert(outputPath);
auto outputHash = outputHashes.at(outputName);
auto drvOutput = DrvOutput { outputHash, outputName };
result.builtOutputs.insert_or_assign(
std::move(outputName),
Realisation { drvOutput, *outputPath });
if (auto * successP = result.tryGetSuccess()) {
for (auto & [outputName, output] : drvOutputs) {
auto outputPath = output.second;
// Weve just asserted that the output paths of the derivation
// were known
assert(outputPath);
auto outputHash = outputHashes.at(outputName);
auto drvOutput = DrvOutput { outputHash, outputName };
successP->builtOutputs.insert_or_assign(
std::move(outputName),
Realisation { drvOutput, *outputPath });
}
}
}
@@ -336,54 +339,68 @@ void RemoteResult::updateWithBuildResult(const nix::BuildResult & buildResult)
startTime = buildResult.startTime;
stopTime = buildResult.stopTime;
timesBuilt = buildResult.timesBuilt;
errorMsg = buildResult.errorMsg;
isNonDeterministic = buildResult.isNonDeterministic;
switch ((BuildResult::Status) buildResult.status) {
case BuildResult::Built:
std::visit(overloaded{
[&](const BuildResult::Success & success) {
stepStatus = bsSuccess;
break;
case BuildResult::Substituted:
case BuildResult::AlreadyValid:
stepStatus = bsSuccess;
isCached = true;
break;
case BuildResult::PermanentFailure:
stepStatus = bsFailed;
canCache = true;
errorMsg = "";
break;
case BuildResult::InputRejected:
case BuildResult::OutputRejected:
stepStatus = bsFailed;
canCache = true;
break;
case BuildResult::TransientFailure:
stepStatus = bsFailed;
canRetry = true;
errorMsg = "";
break;
case BuildResult::TimedOut:
stepStatus = bsTimedOut;
errorMsg = "";
break;
case BuildResult::MiscFailure:
stepStatus = bsAborted;
canRetry = true;
break;
case BuildResult::LogLimitExceeded:
stepStatus = bsLogLimitExceeded;
break;
case BuildResult::NotDeterministic:
stepStatus = bsNotDeterministic;
canRetry = false;
canCache = true;
break;
default:
stepStatus = bsAborted;
break;
}
switch (success.status) {
case BuildResult::Success::Built:
break;
case BuildResult::Success::Substituted:
case BuildResult::Success::AlreadyValid:
case BuildResult::Success::ResolvesToAlreadyValid:
isCached = true;
break;
default:
assert(false);
}
},
[&](const BuildResult::Failure & failure) {
errorMsg = failure.errorMsg;
isNonDeterministic = failure.isNonDeterministic;
switch (failure.status) {
case BuildResult::Failure::PermanentFailure:
stepStatus = bsFailed;
canCache = true;
errorMsg = "";
break;
case BuildResult::Failure::InputRejected:
case BuildResult::Failure::OutputRejected:
stepStatus = bsFailed;
canCache = true;
break;
case BuildResult::Failure::TransientFailure:
stepStatus = bsFailed;
canRetry = true;
errorMsg = "";
break;
case BuildResult::Failure::TimedOut:
stepStatus = bsTimedOut;
errorMsg = "";
break;
case BuildResult::Failure::MiscFailure:
stepStatus = bsAborted;
canRetry = true;
break;
case BuildResult::Failure::LogLimitExceeded:
stepStatus = bsLogLimitExceeded;
break;
case BuildResult::Failure::NotDeterministic:
stepStatus = bsNotDeterministic;
canRetry = false;
canCache = true;
break;
case BuildResult::Failure::CachedFailure:
case BuildResult::Failure::DependencyFailed:
case BuildResult::Failure::NoSubstituters:
case BuildResult::Failure::HashMismatch:
stepStatus = bsAborted;
break;
default:
assert(false);
}
},
}, buildResult.inner);
}
/* Utility guard object to auto-release a semaphore on destruction. */
@@ -405,7 +422,7 @@ void State::buildRemote(ref<Store> destStore,
std::function<void(StepState)> updateStep,
NarMemberDatas & narMembers)
{
assert(BuildResult::TimedOut == 8);
assert(BuildResult::Failure::TimedOut == 8);
auto [logFile, logFD] = build_remote::openLogFile(logDir, step->drvPath);
AutoDelete logFileDel(logFile, false);
@@ -514,7 +531,7 @@ void State::buildRemote(ref<Store> destStore,
updateStep(ssBuilding);
BuildResult buildResult = build_remote::performBuild(
auto buildResult = build_remote::performBuild(
conn,
*localStore,
step->drvPath,
@@ -556,8 +573,9 @@ void State::buildRemote(ref<Store> destStore,
wakeDispatcher();
StorePathSet outputs;
for (auto & [_, realisation] : buildResult.builtOutputs)
outputs.insert(realisation.outPath);
if (auto * successP = buildResult.tryGetSuccess())
for (auto & [_, realisation] : successP->builtOutputs)
outputs.insert(realisation.outPath);
/* Copy the output paths. */
if (!machine->isLocalhost() || localStore != std::shared_ptr<Store>(destStore)) {
@@ -590,15 +608,17 @@ void State::buildRemote(ref<Store> destStore,
/* Register the outputs of the newly built drv */
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
auto outputHashes = staticOutputHashes(*localStore, *step->drv);
for (auto & [outputName, realisation] : buildResult.builtOutputs) {
// Register the resolved drv output
destStore->registerDrvOutput(realisation);
if (auto * successP = buildResult.tryGetSuccess()) {
for (auto & [outputName, realisation] : successP->builtOutputs) {
// Register the resolved drv output
destStore->registerDrvOutput(realisation);
// Also register the unresolved one
auto unresolvedRealisation = realisation;
unresolvedRealisation.signatures.clear();
unresolvedRealisation.id.drvHash = outputHashes.at(outputName);
destStore->registerDrvOutput(unresolvedRealisation);
// Also register the unresolved one
auto unresolvedRealisation = realisation;
unresolvedRealisation.signatures.clear();
unresolvedRealisation.id.drvHash = outputHashes.at(outputName);
destStore->registerDrvOutput(unresolvedRealisation);
}
}
}

View File

@@ -18,9 +18,8 @@ tags) from GitHub following a certain naming scheme
=head1 DESCRIPTION
This plugin reads the list of branches or tags using GitHub's REST API. The name
of the reference must follow a particular prefix. This list is stored in the
nix-store and used as an input to declarative jobsets.
This plugin reads the list of branches or tags using GitHub's REST API. This
list is stored in the nix-store and used as an input to declarative jobsets.
=head1 CONFIGURATION
@@ -34,7 +33,7 @@ The declarative project C<spec.json> file must contains an input such as
"pulls": {
"type": "github_refs",
"value": "[owner] [repo] heads|tags - [prefix]",
"value": "[owner] [repo] [type] - [prefix]",
"emailresponsible": false
}
@@ -42,12 +41,11 @@ In the above snippet, C<[owner]> is the repository owner and C<[repo]> is the
repository name. Also note a literal C<->, which is placed there for the future
use.
C<heads|tags> denotes that one of these two is allowed, that is, the third
position should hold either the C<heads> or the C<tags> keyword. In case of the former, the plugin
will fetch all branches, while in case of the latter, it will fetch the tags.
C<[type]> is the type of ref to list. Typical values are "heads", "tags", and
"pull". "." will include all types.
C<prefix> denotes the prefix the reference name must start with, in order to be
included.
included. "." will include all references.
For example, C<"value": "nixos hydra heads - release/"> refers to
L<https://github.com/nixos/hydra> repository, and will fetch all branches that
@@ -102,8 +100,6 @@ sub fetchInput {
return undef if $input_type ne "github_refs";
my ($owner, $repo, $type, $fut, $prefix) = split ' ', $value;
die "type field is neither 'heads' nor 'tags', but '$type'"
unless $type eq 'heads' or $type eq 'tags';
my $auth = $self->{config}->{github_authorization}->{$owner};
my $githubEndpoint = $self->{config}->{github_endpoint} // "https://api.github.com";