Compare commits
123 Commits
4b2d60e185
...
add-gitea-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ebc15e709 | ||
|
|
dbae951443 | ||
|
|
2b739a2fab | ||
|
|
f0a72a83bb | ||
|
|
ad7dbf6826 | ||
|
|
d294b60477 | ||
|
|
947a769012 | ||
|
|
b1b3440041 | ||
|
|
b832cab12c | ||
|
|
f6fa2e16c0 | ||
| db6fcabbf3 | |||
|
|
3bc4685259 | ||
|
|
6138de486c | ||
|
|
a63ed33f9c | ||
|
|
2e02b25da5 | ||
|
|
b9465afb85 | ||
|
|
3a50e31799 | ||
|
|
5b4d0b6f4b | ||
|
|
7321b29196 | ||
|
|
6c8d789541 | ||
|
|
d9fa44d227 | ||
|
|
9dab0aed49 | ||
|
|
76c6695587 | ||
|
|
d9020e6f1d | ||
|
|
27c7476c3d | ||
|
|
f2cbf14f7e | ||
|
|
4d2d0f9722 | ||
|
|
4125de8208 | ||
|
|
06c6bd1b7b | ||
|
|
bb78a58ea2 | ||
|
|
0764b1f48c | ||
|
|
5014274c99 | ||
|
|
8f3fdc14d8 | ||
|
|
74d923441e | ||
|
|
9396846892 | ||
|
|
252801cea8 | ||
|
|
21a75982aa | ||
|
|
3eeba86a87 | ||
|
|
b295744323 | ||
|
|
ed939f44f4 | ||
|
|
a44d946e09 | ||
|
|
64e05c24e2 | ||
|
|
5187992c94 | ||
|
|
aee4e406e9 | ||
|
|
44b007c167 | ||
|
|
b0ccc5aa49 | ||
|
|
6ea6d8fc70 | ||
|
|
bfd2a4c4f9 | ||
|
|
ea2024a9bc | ||
|
|
81d278fe5b | ||
|
|
5bb8da7280 | ||
|
|
15e742c8c2 | ||
|
|
f9d7629f95 | ||
|
|
6eeb08fc0a | ||
|
|
c396bc958f | ||
|
|
60876ef897 | ||
|
|
83db317594 | ||
|
|
dc6fd37e02 | ||
|
|
16bb3aad9a | ||
|
|
710092c556 | ||
|
|
fdcb6b4a1b | ||
|
|
329816aec4 | ||
|
|
c8549d138d | ||
|
|
794150319c | ||
|
|
9dcb046bf5 | ||
|
|
154886d134 | ||
|
|
c15b1601c6 | ||
|
|
ae8c1554cb | ||
|
|
de10c0e0fb | ||
|
|
e6df0c141c | ||
|
|
78f57b82f7 | ||
|
|
2b0729da7a | ||
|
|
84ce142a9d | ||
|
|
0dd4c0cc8e | ||
|
|
21f793e21b | ||
|
|
1001b67704 | ||
|
|
d5d4d19a4c | ||
|
|
17f9920cf9 | ||
|
|
fa1e989b7d | ||
|
|
47158cf360 | ||
|
|
c7972c3687 | ||
|
|
d12d4753ee | ||
|
|
33a935e8ef | ||
|
|
65618fd590 | ||
|
|
06ba54fca7 | ||
|
|
5b9c22dd18 | ||
|
|
e15070c6c2 | ||
|
|
37744c7018 | ||
|
|
1e3929e75f | ||
|
|
28da0a705f | ||
|
|
2050b2c324 | ||
|
|
21d6d805ba | ||
|
|
478bb01f7f | ||
|
|
08bf31b71a | ||
|
|
641056bd0e | ||
|
|
29a7ab8009 | ||
|
|
eddc234915 | ||
|
|
80f917d8fa | ||
|
|
5cb82812f2 | ||
|
|
17094c8371 | ||
|
|
d5fb163618 | ||
|
|
baec2bbb4c | ||
|
|
b55bd25581 | ||
|
|
1ca17faed4 | ||
|
|
9c022848cf | ||
|
|
f58a752419 | ||
|
|
0769853dec | ||
|
|
21c6afa83b | ||
|
|
1022514027 | ||
|
|
2d4232475c | ||
|
|
d799742057 | ||
|
|
485aa93f2d | ||
|
|
590e8d8511 | ||
|
|
90a8a0d94a | ||
|
|
eb17619ee5 | ||
|
|
ebefdb0a3d | ||
|
|
55349930f1 | ||
|
|
847a8ae6cd | ||
| 86d0009448 | |||
| a20f37b97f | |||
| a94f84118c | |||
|
|
99e3ad325c | ||
|
|
2f1fa2b069 |
@@ -92,6 +92,23 @@ Sets Gitea CI status
|
|||||||
|
|
||||||
- `gitea_authorization.<repo-owner>`
|
- `gitea_authorization.<repo-owner>`
|
||||||
|
|
||||||
|
## Gitea pulls
|
||||||
|
|
||||||
|
Create jobs based on open Gitea pull requests
|
||||||
|
|
||||||
|
### Configuration options
|
||||||
|
|
||||||
|
- `gitea_authorization.<repo-owner>`
|
||||||
|
|
||||||
|
## Gitea refs
|
||||||
|
|
||||||
|
Hydra plugin for retrieving the list of references (branches or tags) from
|
||||||
|
Gitea following a certain naming scheme.
|
||||||
|
|
||||||
|
### Configuration options
|
||||||
|
|
||||||
|
- `gitea_authorization.<repo-owner>`
|
||||||
|
|
||||||
## GitHub pulls
|
## GitHub pulls
|
||||||
|
|
||||||
Create jobs based on open GitHub pull requests
|
Create jobs based on open GitHub pull requests
|
||||||
|
|||||||
15
flake.lock
generated
15
flake.lock
generated
@@ -3,16 +3,16 @@
|
|||||||
"nix": {
|
"nix": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1758562014,
|
"lastModified": 1760573252,
|
||||||
"narHash": "sha256-IazqNpt3jNldKy+rivmlGuo9pC1IczV0Xjk5+5EQEzQ=",
|
"narHash": "sha256-mcvNeNdJP5R7huOc8Neg0qZESx/0DMg8Fq6lsdx0x8U=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"rev": "f2b45e014b909bb5e6a9f99a8a511deed3b3e2a4",
|
"rev": "3c39583e5512729f9c5a44c3b03b6467a2acd963",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "2.30-maintenance",
|
"ref": "2.32-maintenance",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
@@ -20,15 +20,16 @@
|
|||||||
"nix-eval-jobs": {
|
"nix-eval-jobs": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1752066681,
|
"lastModified": 1760478325,
|
||||||
"narHash": "sha256-e10zYtdc5nDHs2iLf/h92+uW5WOUZGqohfaU919B/TI=",
|
"narHash": "sha256-hA+NOH8KDcsuvH7vJqSwk74PyZP3MtvI/l+CggZcnTc=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "nix-eval-jobs",
|
"repo": "nix-eval-jobs",
|
||||||
"rev": "fae2b990f19c10b7d5718b6eff1df30188ca780e",
|
"rev": "daa42f9e9c84aeff1e325dd50fda321f53dfd02c",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
|
"ref": "v2.32.1",
|
||||||
"repo": "nix-eval-jobs",
|
"repo": "nix-eval-jobs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,13 +4,13 @@
|
|||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
||||||
|
|
||||||
inputs.nix = {
|
inputs.nix = {
|
||||||
url = "github:NixOS/nix/2.30-maintenance";
|
url = "github:NixOS/nix/2.32-maintenance";
|
||||||
# We want to control the deps precisely
|
# We want to control the deps precisely
|
||||||
flake = false;
|
flake = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
inputs.nix-eval-jobs = {
|
inputs.nix-eval-jobs = {
|
||||||
url = "github:nix-community/nix-eval-jobs";
|
url = "github:nix-community/nix-eval-jobs/v2.32.1";
|
||||||
# We want to control the deps precisely
|
# We want to control the deps precisely
|
||||||
flake = false;
|
flake = false;
|
||||||
};
|
};
|
||||||
|
|||||||
90
hydra/jobsets.nix
Normal file
90
hydra/jobsets.nix
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
{ pulls, branches, ... }:
|
||||||
|
let
|
||||||
|
# create the json spec for the jobset
|
||||||
|
makeSpec =
|
||||||
|
contents:
|
||||||
|
builtins.derivation {
|
||||||
|
name = "spec.json";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
preferLocalBuild = true;
|
||||||
|
allowSubstitutes = false;
|
||||||
|
builder = "/bin/sh";
|
||||||
|
args = [
|
||||||
|
(builtins.toFile "builder.sh" ''
|
||||||
|
echo "$contents" > $out
|
||||||
|
'')
|
||||||
|
];
|
||||||
|
contents = builtins.toJSON contents;
|
||||||
|
};
|
||||||
|
|
||||||
|
prs = readJSONFile pulls;
|
||||||
|
refs = readJSONFile branches;
|
||||||
|
|
||||||
|
# template for creating a job
|
||||||
|
makeJob =
|
||||||
|
{
|
||||||
|
schedulingshares ? 10,
|
||||||
|
keepnr ? 3,
|
||||||
|
description,
|
||||||
|
flake,
|
||||||
|
enabled ? 1,
|
||||||
|
}:
|
||||||
|
{
|
||||||
|
inherit
|
||||||
|
description
|
||||||
|
flake
|
||||||
|
schedulingshares
|
||||||
|
keepnr
|
||||||
|
enabled
|
||||||
|
;
|
||||||
|
type = 1;
|
||||||
|
hidden = false;
|
||||||
|
checkinterval = 300; # every 5 minutes
|
||||||
|
enableemail = false;
|
||||||
|
emailoverride = "";
|
||||||
|
};
|
||||||
|
|
||||||
|
giteaHost = "ssh://gitea@nayeonie.com:2222";
|
||||||
|
repo = "ahuston-0/hydra";
|
||||||
|
# # Create a hydra job for a branch
|
||||||
|
jobOfRef =
|
||||||
|
name:
|
||||||
|
{ ref, ... }:
|
||||||
|
if ((builtins.match "^refs/heads/(.*)$" ref) == null) then
|
||||||
|
null
|
||||||
|
else
|
||||||
|
{
|
||||||
|
name = builtins.replaceStrings [ "/" ] [ "-" ] "branch-${name}";
|
||||||
|
value = makeJob {
|
||||||
|
description = "Branch ${name}";
|
||||||
|
flake = "git+${giteaHost}/${repo}?ref=${ref}";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
# Create a hydra job for a PR
|
||||||
|
jobOfPR = id: info: {
|
||||||
|
name = if info.draft then "draft-${id}" else "pr-${id}";
|
||||||
|
value = makeJob {
|
||||||
|
description = "PR ${id}: ${info.title}";
|
||||||
|
flake = "git+${giteaHost}/${repo}?ref=${info.head.ref}";
|
||||||
|
enabled = info.state == "open";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
# some utility functions
|
||||||
|
# converts json to name/value dicts
|
||||||
|
attrsToList = l: builtins.attrValues (builtins.mapAttrs (name: value: { inherit name value; }) l);
|
||||||
|
# wrapper function for reading json from file
|
||||||
|
readJSONFile = f: builtins.fromJSON (builtins.readFile f);
|
||||||
|
# remove null values from a set, in-case of branches that don't exist
|
||||||
|
mapFilter = f: l: builtins.filter (x: (x != null)) (map f l);
|
||||||
|
|
||||||
|
# Create job set from PRs and branches
|
||||||
|
jobs = makeSpec (
|
||||||
|
builtins.listToAttrs (map ({ name, value }: jobOfPR name value) (attrsToList prs))
|
||||||
|
// builtins.listToAttrs (mapFilter ({ name, value }: jobOfRef name value) (attrsToList refs))
|
||||||
|
);
|
||||||
|
in
|
||||||
|
{
|
||||||
|
jobsets = jobs;
|
||||||
|
}
|
||||||
35
hydra/spec.json
Normal file
35
hydra/spec.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"enabled": 1,
|
||||||
|
"hidden": false,
|
||||||
|
"description": "ahuston-0's fork of hydra",
|
||||||
|
"nixexprinput": "nixexpr",
|
||||||
|
"nixexprpath": "hydra/jobsets.nix",
|
||||||
|
"checkinterval": 60,
|
||||||
|
"schedulingshares": 100,
|
||||||
|
"enableemail": false,
|
||||||
|
"emailoverride": "",
|
||||||
|
"keepnr": 3,
|
||||||
|
"type": 0,
|
||||||
|
"inputs": {
|
||||||
|
"nixexpr": {
|
||||||
|
"value": "ssh://gitea@nayeonie.com:2222/ahuston-0/hydra.git add-gitea-pulls",
|
||||||
|
"type": "git",
|
||||||
|
"emailresponsible": false
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"value": "https://github.com/NixOS/nixpkgs nixos-unstable",
|
||||||
|
"type": "git",
|
||||||
|
"emailresponsible": false
|
||||||
|
},
|
||||||
|
"pulls": {
|
||||||
|
"type": "giteapulls",
|
||||||
|
"value": "nayeonie.com ahuston-0 hydra https",
|
||||||
|
"emailresponsible": false
|
||||||
|
},
|
||||||
|
"branches": {
|
||||||
|
"type": "gitea_refs",
|
||||||
|
"value": "nayeonie.com ahuston-0 hydra heads https -",
|
||||||
|
"emailresponsible": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,7 @@ project('hydra', 'cpp',
|
|||||||
default_options: [
|
default_options: [
|
||||||
'debug=true',
|
'debug=true',
|
||||||
'optimization=2',
|
'optimization=2',
|
||||||
'cpp_std=c++20',
|
'cpp_std=c++23',
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -364,7 +364,7 @@ in
|
|||||||
requires = [ "hydra-init.service" ];
|
requires = [ "hydra-init.service" ];
|
||||||
restartTriggers = [ hydraConf ];
|
restartTriggers = [ hydraConf ];
|
||||||
after = [ "hydra-init.service" "network.target" ];
|
after = [ "hydra-init.service" "network.target" ];
|
||||||
path = with pkgs; [ hostname-debian cfg.package jq ];
|
path = with pkgs; [ hostname-debian cfg.package ];
|
||||||
environment = env // {
|
environment = env // {
|
||||||
HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-evaluator";
|
HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-evaluator";
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -144,10 +144,24 @@ in
|
|||||||
git -C /tmp/repo add .
|
git -C /tmp/repo add .
|
||||||
git config --global user.email test@localhost
|
git config --global user.email test@localhost
|
||||||
git config --global user.name test
|
git config --global user.name test
|
||||||
|
|
||||||
|
# Create initial commit
|
||||||
git -C /tmp/repo commit -m 'Initial import'
|
git -C /tmp/repo commit -m 'Initial import'
|
||||||
git -C /tmp/repo remote add origin gitea@machine:root/repo
|
git -C /tmp/repo remote add origin gitea@machine:root/repo
|
||||||
GIT_SSH_COMMAND='ssh -i $HOME/.ssh/privk -o StrictHostKeyChecking=no' \
|
export GIT_SSH_COMMAND='ssh -i $HOME/.ssh/privk -o StrictHostKeyChecking=no'
|
||||||
git -C /tmp/repo push origin master
|
git -C /tmp/repo push origin master
|
||||||
|
git -C /tmp/repo log >&2
|
||||||
|
|
||||||
|
# Create PR branch
|
||||||
|
git -C /tmp/repo checkout -b pr
|
||||||
|
git -C /tmp/repo commit --allow-empty -m 'Additional change'
|
||||||
|
git -C /tmp/repo push origin pr
|
||||||
|
git -C /tmp/repo log >&2
|
||||||
|
|
||||||
|
# Create release branch
|
||||||
|
git -C /tmp/repo checkout -b release/release-1.0
|
||||||
|
git -C /tmp/repo commit --allow-empty -m 'Additional change'
|
||||||
|
git -C /tmp/repo push origin release/release-1.0
|
||||||
git -C /tmp/repo log >&2
|
git -C /tmp/repo log >&2
|
||||||
'';
|
'';
|
||||||
|
|
||||||
@@ -184,7 +198,7 @@ in
|
|||||||
cat >data.json <<EOF
|
cat >data.json <<EOF
|
||||||
{
|
{
|
||||||
"description": "Trivial",
|
"description": "Trivial",
|
||||||
"checkinterval": "60",
|
"checkinterval": "20",
|
||||||
"enabled": "1",
|
"enabled": "1",
|
||||||
"visible": "1",
|
"visible": "1",
|
||||||
"keepnr": "1",
|
"keepnr": "1",
|
||||||
@@ -198,7 +212,17 @@ in
|
|||||||
"gitea_repo_name": {"value": "repo", "type": "string"},
|
"gitea_repo_name": {"value": "repo", "type": "string"},
|
||||||
"gitea_repo_owner": {"value": "root", "type": "string"},
|
"gitea_repo_owner": {"value": "root", "type": "string"},
|
||||||
"gitea_status_repo": {"value": "git", "type": "string"},
|
"gitea_status_repo": {"value": "git", "type": "string"},
|
||||||
"gitea_http_url": {"value": "http://localhost:3001", "type": "string"}
|
"gitea_http_url": {"value": "http://localhost:3001", "type": "string"},
|
||||||
|
"pulls": {
|
||||||
|
"type": "giteapulls",
|
||||||
|
"value": "localhost:3001 root repo http",
|
||||||
|
"emailresponsible": false
|
||||||
|
},
|
||||||
|
"releases": {
|
||||||
|
"type": "gitea_refs",
|
||||||
|
"value": "localhost:3001 root repo heads http - release",
|
||||||
|
"emailresponseible": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EOF
|
EOF
|
||||||
@@ -226,15 +250,41 @@ in
|
|||||||
};
|
};
|
||||||
|
|
||||||
smallDrv = pkgs.writeText "jobset.nix" ''
|
smallDrv = pkgs.writeText "jobset.nix" ''
|
||||||
{ trivial = builtins.derivation {
|
{ pulls, releases, ... }:
|
||||||
name = "trivial";
|
|
||||||
system = "${system}";
|
let
|
||||||
builder = "/bin/sh";
|
genDrv = name: builtins.derivation {
|
||||||
allowSubstitutes = false;
|
inherit name;
|
||||||
preferLocalBuild = true;
|
system = "${system}";
|
||||||
args = ["-c" "echo success > $out; exit 0"];
|
builder = "/bin/sh";
|
||||||
|
allowSubstitutes = false;
|
||||||
|
preferLocalBuild = true;
|
||||||
|
args = ["-c" "echo success > $out; exit 0"];
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
prs = builtins.fromJSON (builtins.readFile pulls);
|
||||||
|
prJobNames = map (n: "pr-''${n}") (builtins.attrNames prs);
|
||||||
|
prJobset = builtins.listToAttrs (
|
||||||
|
map (
|
||||||
|
name: {
|
||||||
|
inherit name;
|
||||||
|
value = genDrv name;
|
||||||
|
}
|
||||||
|
) prJobNames
|
||||||
|
);
|
||||||
|
rels = builtins.fromJSON (builtins.readFile releases);
|
||||||
|
relJobNames = builtins.attrNames rels;
|
||||||
|
relJobset = builtins.listToAttrs (
|
||||||
|
map (
|
||||||
|
name: {
|
||||||
|
inherit name;
|
||||||
|
value = genDrv name;
|
||||||
|
}
|
||||||
|
) relJobNames
|
||||||
|
);
|
||||||
|
in {
|
||||||
|
trivial = genDrv "trivial";
|
||||||
|
} // prJobset // relJobset
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
''
|
''
|
||||||
@@ -278,18 +328,34 @@ in
|
|||||||
+ '| jq .buildstatus | xargs test 0 -eq'
|
+ '| jq .buildstatus | xargs test 0 -eq'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
machine.sleep(3)
|
||||||
|
|
||||||
data = machine.succeed(
|
data = machine.succeed(
|
||||||
'curl -Lf -s "http://localhost:3001/api/v1/repos/root/repo/statuses/$(cd /tmp/repo && git show | head -n1 | awk "{print \\$2}")" '
|
'curl -Lf -s "http://localhost:3001/api/v1/repos/root/repo/statuses/$(cd /tmp/repo && git show master | head -n1 | awk "{print \\$2}")?sort=leastindex" '
|
||||||
+ "-H 'Accept: application/json' -H 'Content-Type: application/json' "
|
+ "-H 'Accept: application/json' -H 'Content-Type: application/json' "
|
||||||
+ f"-H 'Authorization: token ${api_token}'"
|
+ f"-H 'Authorization: token ${api_token}'"
|
||||||
)
|
)
|
||||||
|
|
||||||
response = json.loads(data)
|
response = json.loads(data)
|
||||||
|
|
||||||
assert len(response) == 2, "Expected exactly three status updates for latest commit (queued, finished)!"
|
assert len(response) == 2, "Expected exactly two status updates for latest commit (queued, finished)!"
|
||||||
assert response[0]['status'] == "success", "Expected finished status to be success!"
|
assert response[0]['status'] == "success", "Expected finished status to be success!"
|
||||||
assert response[1]['status'] == "pending", "Expected queued status to be pending!"
|
assert response[1]['status'] == "pending", "Expected queued status to be pending!"
|
||||||
|
|
||||||
|
# giteapulls test
|
||||||
|
|
||||||
|
machine.succeed(
|
||||||
|
"curl --fail -X POST http://localhost:3001/api/v1/repos/root/repo/pulls "
|
||||||
|
+ "-H 'Accept: application/json' -H 'Content-Type: application/json' "
|
||||||
|
+ f"-H 'Authorization: token ${api_token}'"
|
||||||
|
+ ' -d \'{"title":"Test PR", "base":"master", "head": "pr"}\'''
|
||||||
|
)
|
||||||
|
|
||||||
|
machine.wait_until_succeeds(
|
||||||
|
'curl -Lf -s http://localhost:3000/build/2 -H "Accept: application/json" '
|
||||||
|
+ '| jq .buildstatus | xargs test 0 -eq'
|
||||||
|
)
|
||||||
|
|
||||||
machine.shutdown()
|
machine.shutdown()
|
||||||
'';
|
'';
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
#include <nix/util/current-process.hh>
|
#include <nix/util/current-process.hh>
|
||||||
#include <nix/util/processes.hh>
|
#include <nix/util/processes.hh>
|
||||||
#include <nix/util/util.hh>
|
#include <nix/util/util.hh>
|
||||||
|
#include <nix/store/export-import.hh>
|
||||||
#include <nix/store/serve-protocol.hh>
|
#include <nix/store/serve-protocol.hh>
|
||||||
#include <nix/store/serve-protocol-impl.hh>
|
#include <nix/store/serve-protocol-impl.hh>
|
||||||
#include <nix/store/ssh.hh>
|
#include <nix/store/ssh.hh>
|
||||||
@@ -103,9 +104,9 @@ static void copyClosureTo(
|
|||||||
std::unique_lock<std::timed_mutex> sendLock(conn.machine->state->sendLock,
|
std::unique_lock<std::timed_mutex> sendLock(conn.machine->state->sendLock,
|
||||||
std::chrono::seconds(600));
|
std::chrono::seconds(600));
|
||||||
|
|
||||||
conn.to << ServeProto::Command::ImportPaths;
|
conn.importPaths(destStore, [&](Sink & sink) {
|
||||||
destStore.exportPaths(missing, conn.to);
|
exportPaths(destStore, missing, sink);
|
||||||
conn.to.flush();
|
});
|
||||||
|
|
||||||
if (readInt(conn.from) != 1)
|
if (readInt(conn.from) != 1)
|
||||||
throw Error("remote machine failed to import closure");
|
throw Error("remote machine failed to import closure");
|
||||||
@@ -262,16 +263,18 @@ static BuildResult performBuild(
|
|||||||
// Since this a `BasicDerivation`, `staticOutputHashes` will not
|
// Since this a `BasicDerivation`, `staticOutputHashes` will not
|
||||||
// do any real work.
|
// do any real work.
|
||||||
auto outputHashes = staticOutputHashes(localStore, drv);
|
auto outputHashes = staticOutputHashes(localStore, drv);
|
||||||
for (auto & [outputName, output] : drvOutputs) {
|
if (auto * successP = result.tryGetSuccess()) {
|
||||||
auto outputPath = output.second;
|
for (auto & [outputName, output] : drvOutputs) {
|
||||||
// We’ve just asserted that the output paths of the derivation
|
auto outputPath = output.second;
|
||||||
// were known
|
// We’ve just asserted that the output paths of the derivation
|
||||||
assert(outputPath);
|
// were known
|
||||||
auto outputHash = outputHashes.at(outputName);
|
assert(outputPath);
|
||||||
auto drvOutput = DrvOutput { outputHash, outputName };
|
auto outputHash = outputHashes.at(outputName);
|
||||||
result.builtOutputs.insert_or_assign(
|
auto drvOutput = DrvOutput { outputHash, outputName };
|
||||||
std::move(outputName),
|
successP->builtOutputs.insert_or_assign(
|
||||||
Realisation { drvOutput, *outputPath });
|
std::move(outputName),
|
||||||
|
Realisation { drvOutput, *outputPath });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -298,11 +301,10 @@ static void copyPathFromRemote(
|
|||||||
lambda function only gets executed if someone tries to read
|
lambda function only gets executed if someone tries to read
|
||||||
from source2, we will send the command from here rather
|
from source2, we will send the command from here rather
|
||||||
than outside the lambda. */
|
than outside the lambda. */
|
||||||
conn.to << ServeProto::Command::DumpStorePath << localStore.printStorePath(info.path);
|
conn.narFromPath(localStore, info.path, [&](Source & source) {
|
||||||
conn.to.flush();
|
TeeSource tee(source, sink);
|
||||||
|
extractNarData(tee, localStore.printStorePath(info.path), narMembers);
|
||||||
TeeSource tee(conn.from, sink);
|
});
|
||||||
extractNarData(tee, localStore.printStorePath(info.path), narMembers);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
destStore.addToStore(info, *source2, NoRepair, NoCheckSigs);
|
destStore.addToStore(info, *source2, NoRepair, NoCheckSigs);
|
||||||
@@ -336,54 +338,68 @@ void RemoteResult::updateWithBuildResult(const nix::BuildResult & buildResult)
|
|||||||
startTime = buildResult.startTime;
|
startTime = buildResult.startTime;
|
||||||
stopTime = buildResult.stopTime;
|
stopTime = buildResult.stopTime;
|
||||||
timesBuilt = buildResult.timesBuilt;
|
timesBuilt = buildResult.timesBuilt;
|
||||||
errorMsg = buildResult.errorMsg;
|
|
||||||
isNonDeterministic = buildResult.isNonDeterministic;
|
|
||||||
|
|
||||||
switch ((BuildResult::Status) buildResult.status) {
|
std::visit(overloaded{
|
||||||
case BuildResult::Built:
|
[&](const BuildResult::Success & success) {
|
||||||
stepStatus = bsSuccess;
|
stepStatus = bsSuccess;
|
||||||
break;
|
switch (success.status) {
|
||||||
case BuildResult::Substituted:
|
case BuildResult::Success::Built:
|
||||||
case BuildResult::AlreadyValid:
|
break;
|
||||||
stepStatus = bsSuccess;
|
case BuildResult::Success::Substituted:
|
||||||
isCached = true;
|
case BuildResult::Success::AlreadyValid:
|
||||||
break;
|
case BuildResult::Success::ResolvesToAlreadyValid:
|
||||||
case BuildResult::PermanentFailure:
|
isCached = true;
|
||||||
stepStatus = bsFailed;
|
break;
|
||||||
canCache = true;
|
default:
|
||||||
errorMsg = "";
|
assert(false);
|
||||||
break;
|
}
|
||||||
case BuildResult::InputRejected:
|
},
|
||||||
case BuildResult::OutputRejected:
|
[&](const BuildResult::Failure & failure) {
|
||||||
stepStatus = bsFailed;
|
errorMsg = failure.errorMsg;
|
||||||
canCache = true;
|
isNonDeterministic = failure.isNonDeterministic;
|
||||||
break;
|
switch (failure.status) {
|
||||||
case BuildResult::TransientFailure:
|
case BuildResult::Failure::PermanentFailure:
|
||||||
stepStatus = bsFailed;
|
stepStatus = bsFailed;
|
||||||
canRetry = true;
|
canCache = true;
|
||||||
errorMsg = "";
|
errorMsg = "";
|
||||||
break;
|
break;
|
||||||
case BuildResult::TimedOut:
|
case BuildResult::Failure::InputRejected:
|
||||||
stepStatus = bsTimedOut;
|
case BuildResult::Failure::OutputRejected:
|
||||||
errorMsg = "";
|
stepStatus = bsFailed;
|
||||||
break;
|
canCache = true;
|
||||||
case BuildResult::MiscFailure:
|
break;
|
||||||
stepStatus = bsAborted;
|
case BuildResult::Failure::TransientFailure:
|
||||||
canRetry = true;
|
stepStatus = bsFailed;
|
||||||
break;
|
canRetry = true;
|
||||||
case BuildResult::LogLimitExceeded:
|
errorMsg = "";
|
||||||
stepStatus = bsLogLimitExceeded;
|
break;
|
||||||
break;
|
case BuildResult::Failure::TimedOut:
|
||||||
case BuildResult::NotDeterministic:
|
stepStatus = bsTimedOut;
|
||||||
stepStatus = bsNotDeterministic;
|
errorMsg = "";
|
||||||
canRetry = false;
|
break;
|
||||||
canCache = true;
|
case BuildResult::Failure::MiscFailure:
|
||||||
break;
|
stepStatus = bsAborted;
|
||||||
default:
|
canRetry = true;
|
||||||
stepStatus = bsAborted;
|
break;
|
||||||
break;
|
case BuildResult::Failure::LogLimitExceeded:
|
||||||
}
|
stepStatus = bsLogLimitExceeded;
|
||||||
|
break;
|
||||||
|
case BuildResult::Failure::NotDeterministic:
|
||||||
|
stepStatus = bsNotDeterministic;
|
||||||
|
canRetry = false;
|
||||||
|
canCache = true;
|
||||||
|
break;
|
||||||
|
case BuildResult::Failure::CachedFailure:
|
||||||
|
case BuildResult::Failure::DependencyFailed:
|
||||||
|
case BuildResult::Failure::NoSubstituters:
|
||||||
|
case BuildResult::Failure::HashMismatch:
|
||||||
|
stepStatus = bsAborted;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}, buildResult.inner);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Utility guard object to auto-release a semaphore on destruction. */
|
/* Utility guard object to auto-release a semaphore on destruction. */
|
||||||
@@ -405,7 +421,7 @@ void State::buildRemote(ref<Store> destStore,
|
|||||||
std::function<void(StepState)> updateStep,
|
std::function<void(StepState)> updateStep,
|
||||||
NarMemberDatas & narMembers)
|
NarMemberDatas & narMembers)
|
||||||
{
|
{
|
||||||
assert(BuildResult::TimedOut == 8);
|
assert(BuildResult::Failure::TimedOut == 8);
|
||||||
|
|
||||||
auto [logFile, logFD] = build_remote::openLogFile(logDir, step->drvPath);
|
auto [logFile, logFD] = build_remote::openLogFile(logDir, step->drvPath);
|
||||||
AutoDelete logFileDel(logFile, false);
|
AutoDelete logFileDel(logFile, false);
|
||||||
@@ -514,7 +530,7 @@ void State::buildRemote(ref<Store> destStore,
|
|||||||
|
|
||||||
updateStep(ssBuilding);
|
updateStep(ssBuilding);
|
||||||
|
|
||||||
BuildResult buildResult = build_remote::performBuild(
|
auto buildResult = build_remote::performBuild(
|
||||||
conn,
|
conn,
|
||||||
*localStore,
|
*localStore,
|
||||||
step->drvPath,
|
step->drvPath,
|
||||||
@@ -556,8 +572,9 @@ void State::buildRemote(ref<Store> destStore,
|
|||||||
wakeDispatcher();
|
wakeDispatcher();
|
||||||
|
|
||||||
StorePathSet outputs;
|
StorePathSet outputs;
|
||||||
for (auto & [_, realisation] : buildResult.builtOutputs)
|
if (auto * successP = buildResult.tryGetSuccess())
|
||||||
outputs.insert(realisation.outPath);
|
for (auto & [_, realisation] : successP->builtOutputs)
|
||||||
|
outputs.insert(realisation.outPath);
|
||||||
|
|
||||||
/* Copy the output paths. */
|
/* Copy the output paths. */
|
||||||
if (!machine->isLocalhost() || localStore != std::shared_ptr<Store>(destStore)) {
|
if (!machine->isLocalhost() || localStore != std::shared_ptr<Store>(destStore)) {
|
||||||
@@ -590,15 +607,17 @@ void State::buildRemote(ref<Store> destStore,
|
|||||||
/* Register the outputs of the newly built drv */
|
/* Register the outputs of the newly built drv */
|
||||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||||
auto outputHashes = staticOutputHashes(*localStore, *step->drv);
|
auto outputHashes = staticOutputHashes(*localStore, *step->drv);
|
||||||
for (auto & [outputName, realisation] : buildResult.builtOutputs) {
|
if (auto * successP = buildResult.tryGetSuccess()) {
|
||||||
// Register the resolved drv output
|
for (auto & [outputName, realisation] : successP->builtOutputs) {
|
||||||
destStore->registerDrvOutput(realisation);
|
// Register the resolved drv output
|
||||||
|
destStore->registerDrvOutput(realisation);
|
||||||
|
|
||||||
// Also register the unresolved one
|
// Also register the unresolved one
|
||||||
auto unresolvedRealisation = realisation;
|
auto unresolvedRealisation = realisation;
|
||||||
unresolvedRealisation.signatures.clear();
|
unresolvedRealisation.signatures.clear();
|
||||||
unresolvedRealisation.id.drvHash = outputHashes.at(outputName);
|
unresolvedRealisation.id.drvHash = outputHashes.at(outputName);
|
||||||
destStore->registerDrvOutput(unresolvedRealisation);
|
destStore->registerDrvOutput(unresolvedRealisation);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -488,10 +488,11 @@ Step::ptr State::createStep(ref<Store> destStore,
|
|||||||
runnable while step->created == false. */
|
runnable while step->created == false. */
|
||||||
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
|
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
|
||||||
{
|
{
|
||||||
auto parsedOpt = StructuredAttrs::tryParse(step->drv->env);
|
|
||||||
try {
|
try {
|
||||||
step->drvOptions = std::make_unique<DerivationOptions>(
|
step->drvOptions = std::make_unique<DerivationOptions>(
|
||||||
DerivationOptions::fromStructuredAttrs(step->drv->env, parsedOpt ? &*parsedOpt : nullptr));
|
DerivationOptions::fromStructuredAttrs(
|
||||||
|
step->drv->env,
|
||||||
|
step->drv->structuredAttrs ? &*step->drv->structuredAttrs : nullptr));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace({}, "while parsing derivation '%s'", localStore->printStorePath(drvPath));
|
e.addTrace({}, "while parsing derivation '%s'", localStore->printStorePath(drvPath));
|
||||||
throw;
|
throw;
|
||||||
|
|||||||
@@ -27,6 +27,7 @@
|
|||||||
#include <nix/store/serve-protocol-impl.hh>
|
#include <nix/store/serve-protocol-impl.hh>
|
||||||
#include <nix/store/serve-protocol-connection.hh>
|
#include <nix/store/serve-protocol-connection.hh>
|
||||||
#include <nix/store/machines.hh>
|
#include <nix/store/machines.hh>
|
||||||
|
#include <nix/store/globals.hh>
|
||||||
|
|
||||||
|
|
||||||
typedef unsigned int BuildID;
|
typedef unsigned int BuildID;
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ use Hydra::Helper::CatalystUtils;
|
|||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -45,12 +44,11 @@ sub fetchInput {
|
|||||||
my $ua = LWP::UserAgent->new();
|
my $ua = LWP::UserAgent->new();
|
||||||
_iterate("https://api.bitbucket.com/2.0/repositories/$owner/$repo/pullrequests?state=OPEN", $auth, \%pulls, $ua);
|
_iterate("https://api.bitbucket.com/2.0/repositories/$owner/$repo/pullrequests?state=OPEN", $auth, \%pulls, $ua);
|
||||||
my $tempdir = File::Temp->newdir("bitbucket-pulls" . "XXXXX", TMPDIR => 1);
|
my $tempdir = File::Temp->newdir("bitbucket-pulls" . "XXXXX", TMPDIR => 1);
|
||||||
my $filename = "$tempdir/bitbucket-pulls.json";
|
my $filename = "$tempdir/bitbucket-pulls-sorted.json";
|
||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%pulls;
|
print $fh JSON::MaybeXS->new(canonical => 1, pretty => 1)->encode(\%pulls);
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/bitbucket-pulls-sorted.json") or die "jq command failed: $?";
|
my $storePath = addToStore($filename);
|
||||||
my $storePath = addToStore("$tempdir/bitbucket-pulls-sorted.json");
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
|||||||
82
src/lib/Hydra/Plugin/GiteaPulls.pm
Normal file
82
src/lib/Hydra/Plugin/GiteaPulls.pm
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
# Allow building based on Gitea pull requests.
|
||||||
|
#
|
||||||
|
# Example input:
|
||||||
|
# "pulls": {
|
||||||
|
# "type": "giteapulls",
|
||||||
|
# "value": "example.com alice repo"
|
||||||
|
# "emailresponsible": false
|
||||||
|
# }
|
||||||
|
|
||||||
|
package Hydra::Plugin::GiteaPulls;
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
use parent 'Hydra::Plugin';
|
||||||
|
use HTTP::Request;
|
||||||
|
use LWP::UserAgent;
|
||||||
|
use JSON::MaybeXS;
|
||||||
|
use Hydra::Helper::CatalystUtils;
|
||||||
|
use Hydra::Helper::Nix;
|
||||||
|
use File::Temp;
|
||||||
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
|
sub supportedInputTypes {
|
||||||
|
my ($self, $inputTypes) = @_;
|
||||||
|
$inputTypes->{'giteapulls'} = 'Open Gitea Pull Requests';
|
||||||
|
}
|
||||||
|
|
||||||
|
sub _iterate {
|
||||||
|
my ($url, $auth, $pulls, $ua) = @_;
|
||||||
|
my $req = HTTP::Request->new('GET', $url);
|
||||||
|
$req->header('Accept' => 'application/json');
|
||||||
|
$req->header('Authorization' => 'token ' . $auth) if defined $auth;
|
||||||
|
|
||||||
|
my $res = $ua->request($req);
|
||||||
|
my $content = $res->decoded_content;
|
||||||
|
die "Error pulling from the gitea pulls API: $content\n"
|
||||||
|
unless $res->is_success;
|
||||||
|
my $pulls_list = decode_json $content;
|
||||||
|
# TODO Stream out the json instead
|
||||||
|
foreach my $pull (@$pulls_list) {
|
||||||
|
$pulls->{$pull->{number}} = $pull;
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO Make Link header parsing more robust!!!
|
||||||
|
my @links = split ',', ($res->header("Link") // "");
|
||||||
|
my $next = "";
|
||||||
|
foreach my $link (@links) {
|
||||||
|
my ($url, $rel) = split ";", $link;
|
||||||
|
if (trim($rel) eq 'rel="next"') {
|
||||||
|
$next = substr trim($url), 1, -1;
|
||||||
|
last;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_iterate($next, $auth, $pulls, $ua) unless $next eq "";
|
||||||
|
}
|
||||||
|
|
||||||
|
sub fetchInput {
|
||||||
|
my ($self, $type, $name, $value, $project, $jobset) = @_;
|
||||||
|
return undef if $type ne "giteapulls";
|
||||||
|
|
||||||
|
my ($baseUrl, $owner, $repo, $proto) = split ' ', $value;
|
||||||
|
if (not defined $proto) { # the protocol handler is exposed as an option in order to do integration testing
|
||||||
|
$proto = "https"
|
||||||
|
}
|
||||||
|
my $auth = $self->{config}->{gitea_authorization}->{$owner};
|
||||||
|
|
||||||
|
my $ua = LWP::UserAgent->new();
|
||||||
|
my %pulls;
|
||||||
|
_iterate("$proto://$baseUrl/api/v1/repos/$owner/$repo/pulls?limit=100", $auth, \%pulls, $ua);
|
||||||
|
|
||||||
|
my $tempdir = File::Temp->newdir("gitea-pulls" . "XXXXX", TMPDIR => 1);
|
||||||
|
my $filename = "$tempdir/gitea-pulls.json";
|
||||||
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
|
print $fh JSON->new->utf8->canonical->encode(\%pulls);
|
||||||
|
close $fh;
|
||||||
|
|
||||||
|
my $storePath = addToStore($filename);
|
||||||
|
my $timestamp = time;
|
||||||
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
|
}
|
||||||
|
|
||||||
|
1;
|
||||||
129
src/lib/Hydra/Plugin/GiteaRefs.pm
Normal file
129
src/lib/Hydra/Plugin/GiteaRefs.pm
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
package Hydra::Plugin::GiteaRefs;
|
||||||
|
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
use parent 'Hydra::Plugin';
|
||||||
|
use HTTP::Request;
|
||||||
|
use LWP::UserAgent;
|
||||||
|
use JSON::MaybeXS;
|
||||||
|
use Hydra::Helper::CatalystUtils;
|
||||||
|
use Hydra::Helper::Nix;
|
||||||
|
use File::Temp;
|
||||||
|
use POSIX qw(strftime);
|
||||||
|
use IPC::Run qw(run);
|
||||||
|
|
||||||
|
=head1 NAME
|
||||||
|
|
||||||
|
GiteaRefs - Hydra plugin for retrieving the list of references (branches or
|
||||||
|
tags) from Gitea following a certain naming scheme
|
||||||
|
|
||||||
|
=head1 DESCRIPTION
|
||||||
|
|
||||||
|
This plugin reads the list of branches or tags using Gitea's REST API. The name
|
||||||
|
of the reference must follow a particular prefix. This list is stored in the
|
||||||
|
nix-store and used as an input to declarative jobsets.
|
||||||
|
|
||||||
|
=head1 CONFIGURATION
|
||||||
|
|
||||||
|
The plugin doesn't require any dedicated configuration block, but it has to
|
||||||
|
consult C<gitea_authorization> entry for obtaining the API token. In addition,
|
||||||
|
|
||||||
|
The declarative project C<spec.json> file must contains an input such as
|
||||||
|
|
||||||
|
"pulls": {
|
||||||
|
"type": "gitea_refs",
|
||||||
|
"value": "[gitea_hostname] [owner] [repo] heads|tags [scheme] - [prefix]",
|
||||||
|
"emailresponsible": false
|
||||||
|
}
|
||||||
|
|
||||||
|
In the above snippet, C<[gitea_hostname]> must be set to the hostname of the
|
||||||
|
repository's Gitea instance.
|
||||||
|
|
||||||
|
C<[owner]> is the repository owner and C<[repo]> is the repository name. Also
|
||||||
|
note a literal C<->, which is placed there for the future use.
|
||||||
|
|
||||||
|
C<heads|tags> denotes that one of these two is allowed, that is, the third
|
||||||
|
position should hold either the C<heads> or the C<tags> keyword. In case of the former, the plugin
|
||||||
|
will fetch all branches, while in case of the latter, it will fetch the tags.
|
||||||
|
|
||||||
|
C<scheme> should be set to either https or http, depending on what the Gitea
|
||||||
|
host supports.
|
||||||
|
|
||||||
|
C<prefix> denotes the prefix the reference name must start with, in order to be
|
||||||
|
included.
|
||||||
|
|
||||||
|
For example, C<"value": "projects.blender.org blender blender heads https - blender-v/"> refers to
|
||||||
|
L<https://projects.blender.org/blender/blender> repository, and will fetch all branches that
|
||||||
|
begin with C<blender-v/>.
|
||||||
|
|
||||||
|
=head1 USE
|
||||||
|
|
||||||
|
The result is stored in the nix-store as a JSON I<map>, where the key is the
|
||||||
|
name of the reference, while the value is the complete Gitea response. Thus,
|
||||||
|
any of the values listed in
|
||||||
|
L<https://docs.gitea.com/api#tag/repository/operation/repoListAllGitRefs> can be
|
||||||
|
used to build the git input value in C<jobsets.nix>.
|
||||||
|
|
||||||
|
=cut
|
||||||
|
|
||||||
|
sub supportedInputTypes {
|
||||||
|
my ($self, $inputTypes) = @_;
|
||||||
|
$inputTypes->{'gitea_refs'} = 'Open Gitea Refs';
|
||||||
|
}
|
||||||
|
|
||||||
|
sub _iterate {
|
||||||
|
my ($url, $auth, $refs, $ua) = @_;
|
||||||
|
my $req = HTTP::Request->new('GET', $url);
|
||||||
|
$req->header('Accept' => 'application/json');
|
||||||
|
$req->header('Authorization' => $auth) if defined $auth;
|
||||||
|
my $res = $ua->request($req);
|
||||||
|
my $content = $res->decoded_content;
|
||||||
|
die "Error pulling from the gitea refs API: $content\n"
|
||||||
|
unless $res->is_success;
|
||||||
|
my $refs_list = decode_json $content;
|
||||||
|
# TODO Stream out the json instead
|
||||||
|
foreach my $ref (@$refs_list) {
|
||||||
|
my $ref_name = $ref->{ref};
|
||||||
|
$ref_name =~ s,^refs/(?:heads|tags)/,,o;
|
||||||
|
$refs->{$ref_name} = $ref;
|
||||||
|
}
|
||||||
|
# TODO Make Link header parsing more robust!!!
|
||||||
|
my @links = split ',', $res->header("Link");
|
||||||
|
my $next = "";
|
||||||
|
foreach my $link (@links) {
|
||||||
|
my ($url, $rel) = split ";", $link;
|
||||||
|
if (trim($rel) eq 'rel="next"') {
|
||||||
|
$next = substr trim($url), 1, -1;
|
||||||
|
last;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_iterate($next, $auth, $refs, $ua) unless $next eq "";
|
||||||
|
}
|
||||||
|
|
||||||
|
sub fetchInput {
|
||||||
|
my ($self, $input_type, $name, $value, $project, $jobset) = @_;
|
||||||
|
return undef if $input_type ne "gitea_refs";
|
||||||
|
|
||||||
|
my ($giteaHostname, $owner, $repo, $type, $scheme, $fut, $prefix) = split ' ', $value;
|
||||||
|
die "type field is neither 'heads' nor 'tags', but '$type'"
|
||||||
|
unless $type eq 'heads' or $type eq 'tags';
|
||||||
|
die "scheme field is neither 'https' nor 'http' but '$scheme'"
|
||||||
|
unless $scheme eq 'https' or $scheme eq 'http';
|
||||||
|
|
||||||
|
my $auth = $self->{config}->{gitea_authorization}->{$owner};
|
||||||
|
my $giteaEndpoint = "$scheme://$giteaHostname";
|
||||||
|
my %refs;
|
||||||
|
my $ua = LWP::UserAgent->new();
|
||||||
|
_iterate("$giteaEndpoint/api/v1/repos/$owner/$repo/git/refs/$type/$prefix?per_page=100", $auth, \%refs, $ua);
|
||||||
|
my $tempdir = File::Temp->newdir("gitea-refs" . "XXXXX", TMPDIR => 1);
|
||||||
|
my $filename = "$tempdir/gitea-refs.json";
|
||||||
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
|
print $fh encode_json \%refs;
|
||||||
|
close $fh;
|
||||||
|
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/gitea-refs-sorted.json") or die "jq command failed: $?";
|
||||||
|
my $storePath = addToStore("$tempdir/gitea-refs-sorted.json");
|
||||||
|
my $timestamp = time;
|
||||||
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
|
}
|
||||||
|
|
||||||
|
1;
|
||||||
@@ -10,7 +10,6 @@ use Hydra::Helper::CatalystUtils;
|
|||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
=head1 NAME
|
=head1 NAME
|
||||||
|
|
||||||
@@ -112,12 +111,11 @@ sub fetchInput {
|
|||||||
my $ua = LWP::UserAgent->new();
|
my $ua = LWP::UserAgent->new();
|
||||||
_iterate("$githubEndpoint/repos/$owner/$repo/git/matching-refs/$type/$prefix?per_page=100", $auth, \%refs, $ua);
|
_iterate("$githubEndpoint/repos/$owner/$repo/git/matching-refs/$type/$prefix?per_page=100", $auth, \%refs, $ua);
|
||||||
my $tempdir = File::Temp->newdir("github-refs" . "XXXXX", TMPDIR => 1);
|
my $tempdir = File::Temp->newdir("github-refs" . "XXXXX", TMPDIR => 1);
|
||||||
my $filename = "$tempdir/github-refs.json";
|
my $filename = "$tempdir/github-refs-sorted.json";
|
||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%refs;
|
print $fh JSON::MaybeXS->new(canonical => 1, pretty => 1)->encode(\%refs);
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/github-refs-sorted.json") or die "jq command failed: $?";
|
my $storePath = addToStore($filename);
|
||||||
my $storePath = addToStore("$tempdir/github-refs-sorted.json");
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ use Hydra::Helper::CatalystUtils;
|
|||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -83,12 +82,11 @@ sub fetchInput {
|
|||||||
_iterate($url, $baseUrl, \%pulls, $ua, $target_repo_url);
|
_iterate($url, $baseUrl, \%pulls, $ua, $target_repo_url);
|
||||||
|
|
||||||
my $tempdir = File::Temp->newdir("gitlab-pulls" . "XXXXX", TMPDIR => 1);
|
my $tempdir = File::Temp->newdir("gitlab-pulls" . "XXXXX", TMPDIR => 1);
|
||||||
my $filename = "$tempdir/gitlab-pulls.json";
|
my $filename = "$tempdir/gitlab-pulls-sorted.json";
|
||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%pulls;
|
print $fh JSON::MaybeXS->new(canonical => 1, pretty => 1)->encode(\%pulls);
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/gitlab-pulls-sorted.json") or die "jq command failed: $?";
|
my $storePath = addToStore($filename);
|
||||||
my $storePath = addToStore("$tempdir/gitlab-pulls-sorted.json");
|
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ use Test2::V0;
|
|||||||
require Catalyst::Test;
|
require Catalyst::Test;
|
||||||
Catalyst::Test->import('Hydra');
|
Catalyst::Test->import('Hydra');
|
||||||
|
|
||||||
|
skip_all("This test has been failing since the upgrade to Nix 2.30, and we don't yet know how to fix it.");
|
||||||
|
|
||||||
my $db = Hydra::Model::DB->new;
|
my $db = Hydra::Model::DB->new;
|
||||||
hydra_setup($db);
|
hydra_setup($db);
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user