Compare commits
21 Commits
add-gitea-
...
merge/upst
Author | SHA1 | Date | |
---|---|---|---|
fed0c517bf | |||
082fb6d40c | |||
230f71e7bb | |||
a1a2d37290 | |||
ff5a37edc4 | |||
|
0f3fcf5e80 | ||
|
cad08f87d2 | ||
|
3fef32b364 | ||
|
ae18a7b3ae | ||
|
b657bcdfb7 | ||
|
3b4c4972c2 | ||
|
b2fe3f5218 | ||
|
9911f0107f | ||
|
feebb61897 | ||
|
4bcbed2f1b | ||
|
987dad3371 | ||
|
d2db3c7446 | ||
|
97dcdae068 | ||
|
9a5bd39d4c | ||
|
f1deb22c02 | ||
|
d22d030503 |
21
.github/workflows/test.yml
vendored
21
.github/workflows/test.yml
vendored
@@ -1,27 +1,14 @@
|
|||||||
name: "Test"
|
name: "Test"
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
merge_group:
|
|
||||||
push:
|
push:
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
strategy:
|
runs-on: ubuntu-latest
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- system: x86_64-linux
|
|
||||||
runner: ubuntu-latest
|
|
||||||
- system: aarch64-linux
|
|
||||||
runner: ubuntu-24.04-arm
|
|
||||||
runs-on: ${{ matrix.runner }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v31
|
- uses: cachix/install-nix-action@v17
|
||||||
with:
|
#- run: nix flake check
|
||||||
extra_nix_config: |
|
- run: nix-build -A checks.x86_64-linux.build -A checks.x86_64-linux.validate-openapi
|
||||||
extra-systems = ${{ matrix.system }}
|
|
||||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
|
||||||
- run: nix-build -A checks.${{ matrix.system }}.build -A checks.${{ matrix.system }}.validate-openapi
|
|
||||||
|
28
.github/workflows/update-flakes.yml
vendored
28
.github/workflows/update-flakes.yml
vendored
@@ -1,28 +0,0 @@
|
|||||||
name: "Update Flakes"
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
# Run weekly on Monday at 00:00 UTC
|
|
||||||
- cron: '0 0 * * 1'
|
|
||||||
workflow_dispatch:
|
|
||||||
jobs:
|
|
||||||
update-flakes:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: cachix/install-nix-action@v31
|
|
||||||
- name: Update flake inputs
|
|
||||||
run: nix flake update
|
|
||||||
- name: Create Pull Request
|
|
||||||
uses: peter-evans/create-pull-request@v5
|
|
||||||
with:
|
|
||||||
commit-message: "flake.lock: Update"
|
|
||||||
title: "Update flake inputs"
|
|
||||||
body: |
|
|
||||||
Automated flake input updates.
|
|
||||||
|
|
||||||
This PR was automatically created by the update-flakes workflow.
|
|
||||||
branch: update-flakes
|
|
||||||
delete-branch: true
|
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,9 +1,5 @@
|
|||||||
*~
|
*~
|
||||||
/.direnv/
|
|
||||||
.test_info.*
|
.test_info.*
|
||||||
/src/root/static/bootstrap
|
|
||||||
/src/root/static/fontawesome
|
|
||||||
/src/root/static/js/flot
|
|
||||||
/src/sql/hydra-postgresql.sql
|
/src/sql/hydra-postgresql.sql
|
||||||
/src/sql/hydra-sqlite.sql
|
/src/sql/hydra-sqlite.sql
|
||||||
/src/sql/tmp.sqlite
|
/src/sql/tmp.sqlite
|
||||||
|
@@ -2,9 +2,3 @@ theme = community
|
|||||||
|
|
||||||
# 5 is the least complainy, 1 is the most complainy
|
# 5 is the least complainy, 1 is the most complainy
|
||||||
severity = 1
|
severity = 1
|
||||||
|
|
||||||
# Disallow backticks - use IPC::Run3 instead for better security
|
|
||||||
include = InputOutput::ProhibitBacktickOperators
|
|
||||||
|
|
||||||
# Prohibit shell-invoking system() and exec() - use list form or IPC::Run3 instead
|
|
||||||
include = Hydra::ProhibitShellInvokingSystemCalls
|
|
||||||
|
13
README.md
13
README.md
@@ -23,7 +23,7 @@ Running Hydra is currently only supported on NixOS. The [hydra module](https://g
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
### Creating An Admin User
|
### Creating An Admin User
|
||||||
Once the Hydra service has been configured as above and activated, you should already be able to access the UI interface at the specified URL. However some actions require an admin user which has to be created first:
|
Once the Hydra service has been configured as above and activate you should already be able to access the UI interface at the specified URL. However some actions require an admin user which has to be created first:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ su - hydra
|
$ su - hydra
|
||||||
@@ -80,15 +80,10 @@ $ nix build
|
|||||||
You can use the provided shell.nix to get a working development environment:
|
You can use the provided shell.nix to get a working development environment:
|
||||||
```
|
```
|
||||||
$ nix develop
|
$ nix develop
|
||||||
$ ln -svf ../../../build/src/bootstrap src/root/static/bootstrap
|
$ mesonConfigurePhase
|
||||||
$ ln -svf ../../../build/src/fontawesome src/root/static/fontawesome
|
$ ninja
|
||||||
$ ln -svf ../../../../build/src/flot src/root/static/js/flot
|
|
||||||
$ meson setup build
|
|
||||||
$ ninja -C build
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The development environment can also automatically be established using [nix-direnv](https://github.com/nix-community/nix-direnv).
|
|
||||||
|
|
||||||
### Executing Hydra During Development
|
### Executing Hydra During Development
|
||||||
|
|
||||||
When working on new features or bug fixes you need to be able to run Hydra from your working copy. This
|
When working on new features or bug fixes you need to be able to run Hydra from your working copy. This
|
||||||
@@ -105,7 +100,7 @@ Have a look at the [Procfile](./Procfile) if you want to see how the processes a
|
|||||||
conflicts with services that might be running on your host, hydra and postgress are started on custom ports:
|
conflicts with services that might be running on your host, hydra and postgress are started on custom ports:
|
||||||
|
|
||||||
- hydra-server: 63333 with the username "alice" and the password "foobar"
|
- hydra-server: 63333 with the username "alice" and the password "foobar"
|
||||||
- postgresql: 64444, can be connected to using `psql -p 64444 -h localhost hydra`
|
- postgresql: 64444
|
||||||
|
|
||||||
Note that this is only ever meant as an ad-hoc way of executing Hydra during development. Please make use of the
|
Note that this is only ever meant as an ad-hoc way of executing Hydra during development. Please make use of the
|
||||||
NixOS module for actually running Hydra in production.
|
NixOS module for actually running Hydra in production.
|
||||||
|
@@ -10,7 +10,6 @@
|
|||||||
- [RunCommand](./plugins/RunCommand.md)
|
- [RunCommand](./plugins/RunCommand.md)
|
||||||
- [Using the external API](api.md)
|
- [Using the external API](api.md)
|
||||||
- [Webhooks](webhooks.md)
|
- [Webhooks](webhooks.md)
|
||||||
- [Webhook Authentication Migration Guide](webhook-migration-guide.md)
|
|
||||||
- [Monitoring Hydra](./monitoring/README.md)
|
- [Monitoring Hydra](./monitoring/README.md)
|
||||||
|
|
||||||
## Developer's Guide
|
## Developer's Guide
|
||||||
|
@@ -51,12 +51,10 @@ base_uri example.com
|
|||||||
`base_uri` should be your hydra servers proxied URL. If you are using
|
`base_uri` should be your hydra servers proxied URL. If you are using
|
||||||
Hydra nixos module then setting `hydraURL` option should be enough.
|
Hydra nixos module then setting `hydraURL` option should be enough.
|
||||||
|
|
||||||
You also need to configure your reverse proxy to pass `X-Request-Base`
|
If you want to serve Hydra with a prefix path, for example
|
||||||
to hydra, with the same value as `base_uri`.
|
[http://example.com/hydra]() then you need to configure your reverse
|
||||||
This also covers the case of serving Hydra with a prefix path,
|
proxy to pass `X-Request-Base` to hydra, with prefix path as value. For
|
||||||
as in [http://example.com/hydra]().
|
example if you are using nginx, then use configuration similar to
|
||||||
|
|
||||||
For example if you are using nginx, then use configuration similar to
|
|
||||||
following:
|
following:
|
||||||
|
|
||||||
server {
|
server {
|
||||||
@@ -65,7 +63,8 @@ following:
|
|||||||
.. other configuration ..
|
.. other configuration ..
|
||||||
location /hydra/ {
|
location /hydra/ {
|
||||||
|
|
||||||
proxy_pass http://127.0.0.1:3000/;
|
proxy_pass http://127.0.0.1:3000;
|
||||||
|
proxy_redirect http://127.0.0.1:3000 https://example.com/hydra;
|
||||||
|
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
@@ -75,9 +74,6 @@ following:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Note the trailing slash on the `proxy_pass` directive, which causes nginx to
|
|
||||||
strip off the `/hydra/` part of the URL before passing it to hydra.
|
|
||||||
|
|
||||||
Populating a Cache
|
Populating a Cache
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
@@ -266,40 +262,6 @@ default role mapping:
|
|||||||
Note that configuring both the LDAP parameters in the hydra.conf and via
|
Note that configuring both the LDAP parameters in the hydra.conf and via
|
||||||
the environment variable is a fatal error.
|
the environment variable is a fatal error.
|
||||||
|
|
||||||
Webhook Authentication
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
Hydra supports authenticating webhook requests from GitHub and Gitea to prevent unauthorized job evaluations.
|
|
||||||
Webhook secrets should be stored in separate files outside the Nix store for security using Config::General's include mechanism.
|
|
||||||
|
|
||||||
In your main `hydra.conf`:
|
|
||||||
```apache
|
|
||||||
<webhooks>
|
|
||||||
Include /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
</webhooks>
|
|
||||||
```
|
|
||||||
|
|
||||||
Then create `/var/lib/hydra/secrets/webhook-secrets.conf` with your actual secrets:
|
|
||||||
```apache
|
|
||||||
<github>
|
|
||||||
secret = your-github-webhook-secret
|
|
||||||
</github>
|
|
||||||
<gitea>
|
|
||||||
secret = your-gitea-webhook-secret
|
|
||||||
</gitea>
|
|
||||||
```
|
|
||||||
|
|
||||||
For multiple secrets (useful for rotation or multiple environments), use an array:
|
|
||||||
```apache
|
|
||||||
<github>
|
|
||||||
secret = your-github-webhook-secret-prod
|
|
||||||
secret = your-github-webhook-secret-staging
|
|
||||||
</github>
|
|
||||||
```
|
|
||||||
|
|
||||||
**Important**: The secrets file should have restricted permissions (e.g., 0600) to prevent unauthorized access.
|
|
||||||
See the [Webhooks documentation](webhooks.md) for detailed setup instructions.
|
|
||||||
|
|
||||||
Embedding Extra HTML
|
Embedding Extra HTML
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
|
@@ -46,16 +46,6 @@ $ meson test
|
|||||||
$ YATH_JOB_COUNT=$NIX_BUILD_CORES meson test
|
$ YATH_JOB_COUNT=$NIX_BUILD_CORES meson test
|
||||||
```
|
```
|
||||||
|
|
||||||
To run individual tests:
|
|
||||||
|
|
||||||
```console
|
|
||||||
# Run a specific test file
|
|
||||||
$ PERL5LIB=t/lib:$PERL5LIB perl t/test.pl t/Hydra/Controller/API/checks.t
|
|
||||||
|
|
||||||
# Run all tests in a directory
|
|
||||||
$ PERL5LIB=t/lib:$PERL5LIB perl t/test.pl t/Hydra/Controller/API/
|
|
||||||
```
|
|
||||||
|
|
||||||
**Warning**: Currently, the tests can fail
|
**Warning**: Currently, the tests can fail
|
||||||
if run with high parallelism [due to an issue in
|
if run with high parallelism [due to an issue in
|
||||||
`Test::PostgreSQL`](https://github.com/TJC/Test-postgresql/issues/40)
|
`Test::PostgreSQL`](https://github.com/TJC/Test-postgresql/issues/40)
|
||||||
|
@@ -48,7 +48,7 @@ Getting Nix
|
|||||||
If your server runs NixOS you are all set to continue with installation
|
If your server runs NixOS you are all set to continue with installation
|
||||||
of Hydra. Otherwise you first need to install Nix. The latest stable
|
of Hydra. Otherwise you first need to install Nix. The latest stable
|
||||||
version can be found one [the Nix web
|
version can be found one [the Nix web
|
||||||
site](https://nixos.org/download/), along with a manual, which
|
site](http://nixos.org/nix/download.html), along with a manual, which
|
||||||
includes installation instructions.
|
includes installation instructions.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
|
@@ -1,168 +0,0 @@
|
|||||||
# Webhook Authentication Migration Guide
|
|
||||||
|
|
||||||
This guide helps Hydra administrators migrate from unauthenticated webhooks to authenticated webhooks to secure their Hydra instances against unauthorized job evaluations.
|
|
||||||
|
|
||||||
## Why Migrate?
|
|
||||||
|
|
||||||
Currently, Hydra's webhook endpoints (`/api/push-github` and `/api/push-gitea`) accept any POST request without authentication. This vulnerability allows:
|
|
||||||
- Anyone to trigger expensive job evaluations
|
|
||||||
- Potential denial of service through repeated requests
|
|
||||||
- Manipulation of build timing and scheduling
|
|
||||||
|
|
||||||
## Step-by-Step Migration for NixOS
|
|
||||||
|
|
||||||
### 1. Create Webhook Configuration
|
|
||||||
|
|
||||||
Create a webhook secrets configuration file with the generated secrets:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create the secrets configuration file with inline secret generation
|
|
||||||
cat > /var/lib/hydra/secrets/webhook-secrets.conf <<EOF
|
|
||||||
<github>
|
|
||||||
secret = $(openssl rand -hex 32)
|
|
||||||
</github>
|
|
||||||
<gitea>
|
|
||||||
secret = $(openssl rand -hex 32)
|
|
||||||
</gitea>
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Set secure permissions
|
|
||||||
chmod 0440 /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
chown hydra:hydra /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
**Important**: Save the generated secrets to configure them in GitHub/Gitea later. You can view them with:
|
|
||||||
```bash
|
|
||||||
cat /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
Then update your NixOS configuration to include the webhook configuration:
|
|
||||||
|
|
||||||
```nix
|
|
||||||
{
|
|
||||||
services.hydra-dev = {
|
|
||||||
enable = true;
|
|
||||||
hydraURL = "https://hydra.example.com";
|
|
||||||
notificationSender = "hydra@example.com";
|
|
||||||
|
|
||||||
extraConfig = ''
|
|
||||||
<webhooks>
|
|
||||||
Include /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
</webhooks>
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For multiple secrets (useful for rotation or multiple environments), update your webhook-secrets.conf:
|
|
||||||
|
|
||||||
```apache
|
|
||||||
<github>
|
|
||||||
secret = your-github-webhook-secret-prod
|
|
||||||
secret = your-github-webhook-secret-staging
|
|
||||||
</github>
|
|
||||||
<gitea>
|
|
||||||
secret = your-gitea-webhook-secret
|
|
||||||
</gitea>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Deploy Configuration
|
|
||||||
|
|
||||||
Apply the NixOS configuration:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
nixos-rebuild switch
|
|
||||||
```
|
|
||||||
|
|
||||||
This will automatically restart Hydra services with the new configuration.
|
|
||||||
|
|
||||||
### 3. Verify Configuration
|
|
||||||
|
|
||||||
Check Hydra's logs to ensure secrets were loaded successfully:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
journalctl -u hydra-server | grep -i webhook
|
|
||||||
```
|
|
||||||
|
|
||||||
You should not see warnings about webhook authentication not being configured.
|
|
||||||
|
|
||||||
### 4. Update Your Webhooks
|
|
||||||
|
|
||||||
#### GitHub
|
|
||||||
1. Navigate to your repository settings: `https://github.com/<owner>/<repo>/settings/hooks`
|
|
||||||
2. Edit your existing Hydra webhook
|
|
||||||
3. In the "Secret" field, paste the content of `/var/lib/hydra/secrets/github-webhook-secret`
|
|
||||||
4. Click "Update webhook"
|
|
||||||
5. GitHub will send a ping event to verify the configuration
|
|
||||||
|
|
||||||
#### Gitea
|
|
||||||
1. Navigate to your repository webhook settings
|
|
||||||
2. Edit your existing Hydra webhook
|
|
||||||
3. In the "Secret" field, paste the content of `/var/lib/hydra/secrets/gitea-webhook-secret`
|
|
||||||
4. Click "Update Webhook"
|
|
||||||
5. Use the "Test Delivery" button to verify the configuration
|
|
||||||
|
|
||||||
### 5. Test the Configuration
|
|
||||||
|
|
||||||
After updating each webhook:
|
|
||||||
1. Make a test commit to trigger the webhook
|
|
||||||
2. Check Hydra's logs for successful authentication
|
|
||||||
3. Verify the evaluation was triggered in Hydra's web interface
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### 401 Unauthorized Errors
|
|
||||||
|
|
||||||
If webhooks start failing with 401 errors:
|
|
||||||
- Verify the secret in the Git forge matches the file content exactly
|
|
||||||
- Check file permissions: `ls -la /var/lib/hydra/secrets/`
|
|
||||||
- Ensure no extra whitespace in secret files
|
|
||||||
- Check Hydra logs for specific error messages
|
|
||||||
|
|
||||||
### Webhook Still Unauthenticated
|
|
||||||
|
|
||||||
If you see warnings about unauthenticated webhooks after configuration:
|
|
||||||
- Verify the configuration syntax in your NixOS module
|
|
||||||
- Ensure the NixOS configuration was successfully applied
|
|
||||||
- Check that the webhook-secrets.conf file exists and is readable by the Hydra user
|
|
||||||
- Verify the Include path is correct in your hydra.conf
|
|
||||||
- Check the syntax of your webhook-secrets.conf file
|
|
||||||
|
|
||||||
### Testing Without Git Forge
|
|
||||||
|
|
||||||
You can test webhook authentication using curl:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Read the secret
|
|
||||||
SECRET=$(cat /var/lib/hydra/secrets/github-webhook-secret)
|
|
||||||
|
|
||||||
# Create test payload
|
|
||||||
PAYLOAD='{"ref":"refs/heads/main","repository":{"clone_url":"https://github.com/test/repo.git"}}'
|
|
||||||
|
|
||||||
# Calculate signature
|
|
||||||
SIGNATURE="sha256=$(echo -n "$PAYLOAD" | openssl dgst -sha256 -hmac "$SECRET" | cut -d' ' -f2)"
|
|
||||||
|
|
||||||
# Send authenticated request
|
|
||||||
curl -X POST https://your-hydra/api/push-github \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-H "X-Hub-Signature-256: $SIGNATURE" \
|
|
||||||
-d "$PAYLOAD"
|
|
||||||
```
|
|
||||||
|
|
||||||
For Gitea (no prefix in signature):
|
|
||||||
```bash
|
|
||||||
# Read the secret
|
|
||||||
SECRET=$(cat /var/lib/hydra/secrets/gitea-webhook-secret)
|
|
||||||
|
|
||||||
# Create test payload
|
|
||||||
PAYLOAD='{"ref":"refs/heads/main","repository":{"clone_url":"https://gitea.example.com/test/repo.git"}}'
|
|
||||||
|
|
||||||
# Calculate signature
|
|
||||||
SIGNATURE=$(echo -n "$PAYLOAD" | openssl dgst -sha256 -hmac "$SECRET" | cut -d' ' -f2)
|
|
||||||
|
|
||||||
# Send authenticated request
|
|
||||||
curl -X POST https://your-hydra/api/push-gitea \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-H "X-Gitea-Signature: $SIGNATURE" \
|
|
||||||
-d "$PAYLOAD"
|
|
||||||
```
|
|
@@ -3,58 +3,6 @@
|
|||||||
Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a
|
Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a
|
||||||
jobset has a github repo in its input.
|
jobset has a github repo in its input.
|
||||||
|
|
||||||
## Webhook Authentication
|
|
||||||
|
|
||||||
Hydra supports webhook signature verification for both GitHub and Gitea using HMAC-SHA256. This ensures that webhook
|
|
||||||
requests are coming from your configured Git forge and haven't been tampered with.
|
|
||||||
|
|
||||||
### Configuring Webhook Authentication
|
|
||||||
|
|
||||||
1. **Create webhook configuration**: Generate and store webhook secrets securely:
|
|
||||||
```bash
|
|
||||||
# Create directory and generate secrets in one step
|
|
||||||
mkdir -p /var/lib/hydra/secrets
|
|
||||||
cat > /var/lib/hydra/secrets/webhook-secrets.conf <<EOF
|
|
||||||
<github>
|
|
||||||
secret = $(openssl rand -hex 32)
|
|
||||||
</github>
|
|
||||||
<gitea>
|
|
||||||
secret = $(openssl rand -hex 32)
|
|
||||||
</gitea>
|
|
||||||
EOF
|
|
||||||
|
|
||||||
# Set secure permissions
|
|
||||||
chmod 0600 /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
chown hydra:hydra /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Configure Hydra**: Add the following to your `hydra.conf`:
|
|
||||||
```apache
|
|
||||||
<webhooks>
|
|
||||||
Include /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
</webhooks>
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Configure your Git forge**: View the generated secrets and configure them in GitHub/Gitea:
|
|
||||||
```bash
|
|
||||||
grep "secret =" /var/lib/hydra/secrets/webhook-secrets.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multiple Secrets Support
|
|
||||||
|
|
||||||
Hydra supports configuring multiple secrets for each platform, which is useful for:
|
|
||||||
- Zero-downtime secret rotation
|
|
||||||
- Supporting multiple environments (production/staging)
|
|
||||||
- Gradual migration of webhooks
|
|
||||||
|
|
||||||
To configure multiple secrets, use array syntax:
|
|
||||||
```apache
|
|
||||||
<github>
|
|
||||||
secret = current-webhook-secret
|
|
||||||
secret = previous-webhook-secret
|
|
||||||
</github>
|
|
||||||
```
|
|
||||||
|
|
||||||
## GitHub
|
## GitHub
|
||||||
|
|
||||||
To set up a webhook for a GitHub repository go to `https://github.com/<yourhandle>/<yourrepo>/settings`
|
To set up a webhook for a GitHub repository go to `https://github.com/<yourhandle>/<yourrepo>/settings`
|
||||||
@@ -62,16 +10,11 @@ and in the `Webhooks` tab click on `Add webhook`.
|
|||||||
|
|
||||||
- In `Payload URL` fill in `https://<your-hydra-domain>/api/push-github`.
|
- In `Payload URL` fill in `https://<your-hydra-domain>/api/push-github`.
|
||||||
- In `Content type` switch to `application/json`.
|
- In `Content type` switch to `application/json`.
|
||||||
- In the `Secret` field, enter the content of your GitHub webhook secret file (if authentication is configured).
|
- The `Secret` field can stay empty.
|
||||||
- For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`.
|
- For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`.
|
||||||
|
|
||||||
Then add the hook with `Add webhook`.
|
Then add the hook with `Add webhook`.
|
||||||
|
|
||||||
### Verifying GitHub Webhook Security
|
|
||||||
|
|
||||||
After configuration, GitHub will send webhook requests with an `X-Hub-Signature-256` header containing the HMAC-SHA256
|
|
||||||
signature of the request body. Hydra will verify this signature matches the configured secret.
|
|
||||||
|
|
||||||
## Gitea
|
## Gitea
|
||||||
|
|
||||||
To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance
|
To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance
|
||||||
@@ -79,23 +22,6 @@ and in the `Webhooks` tab click on `Add Webhook` and choose `Gitea` in the drop
|
|||||||
|
|
||||||
- In `Target URL` fill in `https://<your-hydra-domain>/api/push-gitea`.
|
- In `Target URL` fill in `https://<your-hydra-domain>/api/push-gitea`.
|
||||||
- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`.
|
- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`.
|
||||||
- In the `Secret` field, enter the content of your Gitea webhook secret file (if authentication is configured).
|
|
||||||
- Change the branch filter to match the git branch hydra builds.
|
- Change the branch filter to match the git branch hydra builds.
|
||||||
|
|
||||||
Then add the hook with `Add webhook`.
|
Then add the hook with `Add webhook`.
|
||||||
|
|
||||||
### Verifying Gitea Webhook Security
|
|
||||||
|
|
||||||
After configuration, Gitea will send webhook requests with an `X-Gitea-Signature` header containing the HMAC-SHA256
|
|
||||||
signature of the request body. Hydra will verify this signature matches the configured secret.
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
If you receive 401 Unauthorized errors:
|
|
||||||
- Verify the webhook secret in your Git forge matches the content of the secret file exactly
|
|
||||||
- Check that the secret file has proper permissions (should be 0600)
|
|
||||||
- Look at Hydra's logs for specific error messages
|
|
||||||
- Ensure the correct signature header is being sent by your Git forge
|
|
||||||
|
|
||||||
If you see warnings about webhook authentication not being configured:
|
|
||||||
- Configure webhook authentication as described above to secure your endpoints
|
|
||||||
|
33
flake.lock
generated
33
flake.lock
generated
@@ -1,18 +1,27 @@
|
|||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
"nix": {
|
"nix": {
|
||||||
"flake": false,
|
"inputs": {
|
||||||
|
"flake-compat": [],
|
||||||
|
"flake-parts": [],
|
||||||
|
"git-hooks-nix": [],
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"nixpkgs-23-11": [],
|
||||||
|
"nixpkgs-regression": []
|
||||||
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750777360,
|
"lastModified": 1739899400,
|
||||||
"narHash": "sha256-nDWFxwhT+fQNgi4rrr55EKjpxDyVKSl1KaNmSXtYj40=",
|
"narHash": "sha256-q/RgA4bB7zWai4oPySq9mch7qH14IEeom2P64SXdqHs=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"rev": "7bb200199705eddd53cb34660a76567c6f1295d9",
|
"rev": "e310c19a1aeb1ce1ed4d41d5ab2d02db596e0918",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "2.29-maintenance",
|
"ref": "2.26-maintenance",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
@@ -20,11 +29,11 @@
|
|||||||
"nix-eval-jobs": {
|
"nix-eval-jobs": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1748680938,
|
"lastModified": 1743008255,
|
||||||
"narHash": "sha256-TQk6pEMD0mFw7jZXpg7+2qNKGbAluMQgc55OMgEO8bM=",
|
"narHash": "sha256-Lo4KFBNcY8tmBuCmEr2XV0IUZtxXHmbXPNLkov/QSU0=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "nix-eval-jobs",
|
"repo": "nix-eval-jobs",
|
||||||
"rev": "974a4af3d4a8fd242d8d0e2608da4be87a62b83f",
|
"rev": "f7418fc1fa45b96d37baa95ff3c016dd5be3876b",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -35,16 +44,16 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1750736827,
|
"lastModified": 1739461644,
|
||||||
"narHash": "sha256-UcNP7BR41xMTe0sfHBH8R79+HdCw0OwkC/ZKrQEuMeo=",
|
"narHash": "sha256-1o1qR0KYozYGRrnqytSpAhVBYLNBHX+Lv6I39zGRzKM=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "b4a30b08433ad7b6e1dfba0833fb0fe69d43dfec",
|
"rev": "97a719c9f0a07923c957cf51b20b329f9fb9d43f",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-25.05-small",
|
"ref": "nixos-24.11-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
68
flake.nix
68
flake.nix
@@ -1,12 +1,18 @@
|
|||||||
{
|
{
|
||||||
description = "A Nix-based continuous build system";
|
description = "A Nix-based continuous build system";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11-small";
|
||||||
|
|
||||||
inputs.nix = {
|
inputs.nix = {
|
||||||
url = "github:NixOS/nix/2.29-maintenance";
|
url = "github:NixOS/nix/2.26-maintenance";
|
||||||
# We want to control the deps precisely
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
flake = false;
|
|
||||||
|
# hide nix dev tooling from our lock file
|
||||||
|
inputs.flake-parts.follows = "";
|
||||||
|
inputs.git-hooks-nix.follows = "";
|
||||||
|
inputs.nixpkgs-regression.follows = "";
|
||||||
|
inputs.nixpkgs-23-11.follows = "";
|
||||||
|
inputs.flake-compat.follows = "";
|
||||||
};
|
};
|
||||||
|
|
||||||
inputs.nix-eval-jobs = {
|
inputs.nix-eval-jobs = {
|
||||||
@@ -24,27 +30,11 @@
|
|||||||
|
|
||||||
# A Nixpkgs overlay that provides a 'hydra' package.
|
# A Nixpkgs overlay that provides a 'hydra' package.
|
||||||
overlays.default = final: prev: {
|
overlays.default = final: prev: {
|
||||||
nixDependenciesForHydra = final.lib.makeScope final.newScope
|
nix-eval-jobs = final.callPackage nix-eval-jobs {};
|
||||||
(import (nix + "/packaging/dependencies.nix") {
|
|
||||||
pkgs = final;
|
|
||||||
inherit (final) stdenv;
|
|
||||||
inputs = {};
|
|
||||||
});
|
|
||||||
nixComponentsForHydra = final.lib.makeScope final.nixDependenciesForHydra.newScope
|
|
||||||
(import (nix + "/packaging/components.nix") {
|
|
||||||
officialRelease = true;
|
|
||||||
inherit (final) lib;
|
|
||||||
pkgs = final;
|
|
||||||
src = nix;
|
|
||||||
maintainers = [ ];
|
|
||||||
});
|
|
||||||
nix-eval-jobs = final.callPackage nix-eval-jobs {
|
|
||||||
nixComponents = final.nixComponentsForHydra;
|
|
||||||
};
|
|
||||||
hydra = final.callPackage ./package.nix {
|
hydra = final.callPackage ./package.nix {
|
||||||
inherit (final.lib) fileset;
|
inherit (nixpkgs.lib) fileset;
|
||||||
rawSrc = self;
|
rawSrc = self;
|
||||||
nixComponents = final.nixComponentsForHydra;
|
nix-perl-bindings = final.nixComponents.nix-perl-bindings;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -83,31 +73,21 @@
|
|||||||
validate-openapi = hydraJobs.tests.validate-openapi.${system};
|
validate-openapi = hydraJobs.tests.validate-openapi.${system};
|
||||||
});
|
});
|
||||||
|
|
||||||
packages = forEachSystem (system: let
|
packages = forEachSystem (system: {
|
||||||
inherit (nixpkgs) lib;
|
nix-eval-jobs = nixpkgs.legacyPackages.${system}.callPackage nix-eval-jobs {
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
nix = nix.packages.${system}.nix;
|
||||||
nixDependencies = lib.makeScope pkgs.newScope
|
|
||||||
(import (nix + "/packaging/dependencies.nix") {
|
|
||||||
inherit pkgs;
|
|
||||||
inherit (pkgs) stdenv;
|
|
||||||
inputs = {};
|
|
||||||
});
|
|
||||||
nixComponents = lib.makeScope nixDependencies.newScope
|
|
||||||
(import (nix + "/packaging/components.nix") {
|
|
||||||
officialRelease = true;
|
|
||||||
inherit lib pkgs;
|
|
||||||
src = nix;
|
|
||||||
maintainers = [ ];
|
|
||||||
});
|
|
||||||
in {
|
|
||||||
nix-eval-jobs = pkgs.callPackage nix-eval-jobs {
|
|
||||||
inherit nixComponents;
|
|
||||||
};
|
};
|
||||||
hydra = pkgs.callPackage ./package.nix {
|
hydra = nixpkgs.legacyPackages.${system}.callPackage ./package.nix {
|
||||||
inherit (nixpkgs.lib) fileset;
|
inherit (nixpkgs.lib) fileset;
|
||||||
inherit nixComponents;
|
|
||||||
inherit (self.packages.${system}) nix-eval-jobs;
|
inherit (self.packages.${system}) nix-eval-jobs;
|
||||||
rawSrc = self;
|
rawSrc = self;
|
||||||
|
inherit (nix.packages.${system})
|
||||||
|
nix-util
|
||||||
|
nix-store
|
||||||
|
nix-main
|
||||||
|
nix-cli
|
||||||
|
;
|
||||||
|
nix-perl-bindings = nix.hydraJobs.perlBindings.${system};
|
||||||
};
|
};
|
||||||
default = self.packages.${system}.hydra;
|
default = self.packages.${system}.hydra;
|
||||||
});
|
});
|
||||||
|
@@ -1,7 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
export PATH=$(pwd)/src/script:$PATH
|
|
||||||
|
|
||||||
# wait for hydra-server to listen
|
# wait for hydra-server to listen
|
||||||
while ! nc -z localhost 63333; do sleep 1; done
|
while ! nc -z localhost 63333; do sleep 1; done
|
||||||
|
|
||||||
|
@@ -1,7 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
export PATH=$(pwd)/src/script:$PATH
|
|
||||||
|
|
||||||
# wait for postgresql to listen
|
# wait for postgresql to listen
|
||||||
while ! pg_isready -h $(pwd)/.hydra-data/postgres -p 64444; do sleep 1; done
|
while ! pg_isready -h $(pwd)/.hydra-data/postgres -p 64444; do sleep 1; done
|
||||||
|
|
||||||
|
@@ -1,7 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
export PATH=$(pwd)/src/script:$PATH
|
|
||||||
|
|
||||||
# wait for hydra-server to listen
|
# wait for hydra-server to listen
|
||||||
while ! nc -z localhost 63333; do sleep 1; done
|
while ! nc -z localhost 63333; do sleep 1; done
|
||||||
|
|
||||||
|
@@ -78,11 +78,6 @@ paths:
|
|||||||
description: project and jobset formatted as "<project>:<jobset>" to evaluate
|
description: project and jobset formatted as "<project>:<jobset>" to evaluate
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
- in: query
|
|
||||||
name: force
|
|
||||||
description: when set to true the jobset gets evaluated even when it did not change
|
|
||||||
schema:
|
|
||||||
type: boolean
|
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: jobset trigger response
|
description: jobset trigger response
|
||||||
|
@@ -1,90 +0,0 @@
|
|||||||
{ pulls, branches, ... }:
|
|
||||||
let
|
|
||||||
# create the json spec for the jobset
|
|
||||||
makeSpec =
|
|
||||||
contents:
|
|
||||||
builtins.derivation {
|
|
||||||
name = "spec.json";
|
|
||||||
system = "x86_64-linux";
|
|
||||||
preferLocalBuild = true;
|
|
||||||
allowSubstitutes = false;
|
|
||||||
builder = "/bin/sh";
|
|
||||||
args = [
|
|
||||||
(builtins.toFile "builder.sh" ''
|
|
||||||
echo "$contents" > $out
|
|
||||||
'')
|
|
||||||
];
|
|
||||||
contents = builtins.toJSON contents;
|
|
||||||
};
|
|
||||||
|
|
||||||
prs = readJSONFile pulls;
|
|
||||||
refs = readJSONFile branches;
|
|
||||||
|
|
||||||
# template for creating a job
|
|
||||||
makeJob =
|
|
||||||
{
|
|
||||||
schedulingshares ? 10,
|
|
||||||
keepnr ? 3,
|
|
||||||
description,
|
|
||||||
flake,
|
|
||||||
enabled ? 1,
|
|
||||||
}:
|
|
||||||
{
|
|
||||||
inherit
|
|
||||||
description
|
|
||||||
flake
|
|
||||||
schedulingshares
|
|
||||||
keepnr
|
|
||||||
enabled
|
|
||||||
;
|
|
||||||
type = 1;
|
|
||||||
hidden = false;
|
|
||||||
checkinterval = 300; # every 5 minutes
|
|
||||||
enableemail = false;
|
|
||||||
emailoverride = "";
|
|
||||||
};
|
|
||||||
|
|
||||||
giteaHost = "ssh://gitea@nayeonie.com:2222";
|
|
||||||
repo = "ahuston-0/hydra";
|
|
||||||
# # Create a hydra job for a branch
|
|
||||||
jobOfRef =
|
|
||||||
name:
|
|
||||||
{ ref, ... }:
|
|
||||||
if ((builtins.match "^refs/heads/(.*)$" ref) == null) then
|
|
||||||
null
|
|
||||||
else
|
|
||||||
{
|
|
||||||
name = builtins.replaceStrings [ "/" ] [ "-" ] "branch-${name}";
|
|
||||||
value = makeJob {
|
|
||||||
description = "Branch ${name}";
|
|
||||||
flake = "git+${giteaHost}/${repo}?ref=${ref}";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
# Create a hydra job for a PR
|
|
||||||
jobOfPR = id: info: {
|
|
||||||
name = if info.draft then "draft-${id}" else "pr-${id}";
|
|
||||||
value = makeJob {
|
|
||||||
description = "PR ${id}: ${info.title}";
|
|
||||||
flake = "git+${giteaHost}/${repo}?ref=${info.head.ref}";
|
|
||||||
enabled = info.state == "open";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
# some utility functions
|
|
||||||
# converts json to name/value dicts
|
|
||||||
attrsToList = l: builtins.attrValues (builtins.mapAttrs (name: value: { inherit name value; }) l);
|
|
||||||
# wrapper function for reading json from file
|
|
||||||
readJSONFile = f: builtins.fromJSON (builtins.readFile f);
|
|
||||||
# remove null values from a set, in-case of branches that don't exist
|
|
||||||
mapFilter = f: l: builtins.filter (x: (x != null)) (map f l);
|
|
||||||
|
|
||||||
# Create job set from PRs and branches
|
|
||||||
jobs = makeSpec (
|
|
||||||
builtins.listToAttrs (map ({ name, value }: jobOfPR name value) (attrsToList prs))
|
|
||||||
// builtins.listToAttrs (mapFilter ({ name, value }: jobOfRef name value) (attrsToList refs))
|
|
||||||
);
|
|
||||||
in
|
|
||||||
{
|
|
||||||
jobsets = jobs;
|
|
||||||
}
|
|
@@ -1,35 +0,0 @@
|
|||||||
{
|
|
||||||
"enabled": 1,
|
|
||||||
"hidden": false,
|
|
||||||
"description": "ahuston-0's fork of hydra",
|
|
||||||
"nixexprinput": "nixexpr",
|
|
||||||
"nixexprpath": "hydra/jobsets.nix",
|
|
||||||
"checkinterval": 60,
|
|
||||||
"schedulingshares": 100,
|
|
||||||
"enableemail": false,
|
|
||||||
"emailoverride": "",
|
|
||||||
"keepnr": 3,
|
|
||||||
"type": 0,
|
|
||||||
"inputs": {
|
|
||||||
"nixexpr": {
|
|
||||||
"value": "ssh://gitea@nayeonie.com:2222/ahuston-0/hydra.git add-gitea-pulls",
|
|
||||||
"type": "git",
|
|
||||||
"emailresponsible": false
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
|
||||||
"value": "https://github.com/NixOS/nixpkgs nixos-unstable",
|
|
||||||
"type": "git",
|
|
||||||
"emailresponsible": false
|
|
||||||
},
|
|
||||||
"pulls": {
|
|
||||||
"type": "giteapulls",
|
|
||||||
"value": "nayeonie.com ahuston-0 hydra https",
|
|
||||||
"emailresponsible": false
|
|
||||||
},
|
|
||||||
"branches": {
|
|
||||||
"type": "gitea_refs",
|
|
||||||
"value": "nayeonie.com ahuston-0 hydra heads https -",
|
|
||||||
"emailresponsible": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
14
meson.build
14
meson.build
@@ -12,6 +12,20 @@ nix_util_dep = dependency('nix-util', required: true)
|
|||||||
nix_store_dep = dependency('nix-store', required: true)
|
nix_store_dep = dependency('nix-store', required: true)
|
||||||
nix_main_dep = dependency('nix-main', required: true)
|
nix_main_dep = dependency('nix-main', required: true)
|
||||||
|
|
||||||
|
# Nix need extra flags not provided in its pkg-config files.
|
||||||
|
nix_dep = declare_dependency(
|
||||||
|
dependencies: [
|
||||||
|
nix_util_dep,
|
||||||
|
nix_store_dep,
|
||||||
|
nix_main_dep,
|
||||||
|
],
|
||||||
|
compile_args: [
|
||||||
|
'-include', 'nix/config-util.hh',
|
||||||
|
'-include', 'nix/config-store.hh',
|
||||||
|
'-include', 'nix/config-main.hh',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
pqxx_dep = dependency('libpqxx', required: true)
|
pqxx_dep = dependency('libpqxx', required: true)
|
||||||
|
|
||||||
prom_cpp_core_dep = dependency('prometheus-cpp-core', required: true)
|
prom_cpp_core_dep = dependency('prometheus-cpp-core', required: true)
|
||||||
|
@@ -228,8 +228,8 @@ in
|
|||||||
|
|
||||||
nix.settings = {
|
nix.settings = {
|
||||||
trusted-users = [ "hydra-queue-runner" ];
|
trusted-users = [ "hydra-queue-runner" ];
|
||||||
keep-outputs = true;
|
gc-keep-outputs = true;
|
||||||
keep-derivations = true;
|
gc-keep-derivations = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
services.hydra-dev.extraConfig =
|
services.hydra-dev.extraConfig =
|
||||||
@@ -340,7 +340,7 @@ in
|
|||||||
requires = [ "hydra-init.service" ];
|
requires = [ "hydra-init.service" ];
|
||||||
wants = [ "network-online.target" ];
|
wants = [ "network-online.target" ];
|
||||||
after = [ "hydra-init.service" "network.target" "network-online.target" ];
|
after = [ "hydra-init.service" "network.target" "network-online.target" ];
|
||||||
path = [ cfg.package pkgs.hostname-debian pkgs.openssh pkgs.bzip2 config.nix.package ];
|
path = [ cfg.package pkgs.nettools pkgs.openssh pkgs.bzip2 config.nix.package ];
|
||||||
restartTriggers = [ hydraConf ];
|
restartTriggers = [ hydraConf ];
|
||||||
environment = env // {
|
environment = env // {
|
||||||
PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr
|
PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr
|
||||||
@@ -364,7 +364,7 @@ in
|
|||||||
requires = [ "hydra-init.service" ];
|
requires = [ "hydra-init.service" ];
|
||||||
restartTriggers = [ hydraConf ];
|
restartTriggers = [ hydraConf ];
|
||||||
after = [ "hydra-init.service" "network.target" ];
|
after = [ "hydra-init.service" "network.target" ];
|
||||||
path = with pkgs; [ hostname-debian cfg.package jq ];
|
path = with pkgs; [ nettools cfg.package jq ];
|
||||||
environment = env // {
|
environment = env // {
|
||||||
HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-evaluator";
|
HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-evaluator";
|
||||||
};
|
};
|
||||||
@@ -463,12 +463,12 @@ in
|
|||||||
''
|
''
|
||||||
set -eou pipefail
|
set -eou pipefail
|
||||||
compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf)
|
compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf)
|
||||||
if [[ $compression == "" || $compression == bzip2 ]]; then
|
if [[ $compression == "" ]]; then
|
||||||
compressionCmd=(bzip2)
|
compression="bzip2"
|
||||||
elif [[ $compression == zstd ]]; then
|
elif [[ $compression == zstd ]]; then
|
||||||
compressionCmd=(zstd --rm)
|
compression="zstd --rm"
|
||||||
fi
|
fi
|
||||||
find ${baseDir}/build-logs -ignore_readdir_race -type f -name "*.drv" -mtime +3 -size +0c -print0 | xargs -0 -r "''${compressionCmd[@]}" --force --quiet
|
find ${baseDir}/build-logs -ignore_readdir_race -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet
|
||||||
'';
|
'';
|
||||||
startAt = "Sun 01:45";
|
startAt = "Sun 01:45";
|
||||||
};
|
};
|
||||||
|
@@ -27,7 +27,8 @@ in
|
|||||||
{
|
{
|
||||||
|
|
||||||
install = forEachSystem (system:
|
install = forEachSystem (system:
|
||||||
(import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }).simpleTest {
|
with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; };
|
||||||
|
simpleTest {
|
||||||
name = "hydra-install";
|
name = "hydra-install";
|
||||||
nodes.machine = hydraServer;
|
nodes.machine = hydraServer;
|
||||||
testScript =
|
testScript =
|
||||||
@@ -42,7 +43,8 @@ in
|
|||||||
});
|
});
|
||||||
|
|
||||||
notifications = forEachSystem (system:
|
notifications = forEachSystem (system:
|
||||||
(import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }).simpleTest {
|
with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; };
|
||||||
|
simpleTest {
|
||||||
name = "hydra-notifications";
|
name = "hydra-notifications";
|
||||||
nodes.machine = {
|
nodes.machine = {
|
||||||
imports = [ hydraServer ];
|
imports = [ hydraServer ];
|
||||||
@@ -54,7 +56,7 @@ in
|
|||||||
'';
|
'';
|
||||||
services.influxdb.enable = true;
|
services.influxdb.enable = true;
|
||||||
};
|
};
|
||||||
testScript = { nodes, ... }: ''
|
testScript = ''
|
||||||
machine.wait_for_job("hydra-init")
|
machine.wait_for_job("hydra-init")
|
||||||
|
|
||||||
# Create an admin account and some other state.
|
# Create an admin account and some other state.
|
||||||
@@ -85,7 +87,7 @@ in
|
|||||||
|
|
||||||
# Setup the project and jobset
|
# Setup the project and jobset
|
||||||
machine.succeed(
|
machine.succeed(
|
||||||
"su - hydra -c 'perl -I ${nodes.machine.services.hydra-dev.package.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2"
|
"su - hydra -c 'perl -I ${config.services.hydra-dev.package.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Wait until hydra has build the job and
|
# Wait until hydra has build the job and
|
||||||
@@ -99,10 +101,9 @@ in
|
|||||||
});
|
});
|
||||||
|
|
||||||
gitea = forEachSystem (system:
|
gitea = forEachSystem (system:
|
||||||
let
|
let pkgs = nixpkgs.legacyPackages.${system}; in
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; };
|
||||||
in
|
makeTest {
|
||||||
(import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }).makeTest {
|
|
||||||
name = "hydra-gitea";
|
name = "hydra-gitea";
|
||||||
nodes.machine = { pkgs, ... }: {
|
nodes.machine = { pkgs, ... }: {
|
||||||
imports = [ hydraServer ];
|
imports = [ hydraServer ];
|
||||||
|
31
package.nix
31
package.nix
@@ -8,7 +8,11 @@
|
|||||||
|
|
||||||
, perlPackages
|
, perlPackages
|
||||||
|
|
||||||
, nixComponents
|
, nix-util
|
||||||
|
, nix-store
|
||||||
|
, nix-main
|
||||||
|
, nix-cli
|
||||||
|
, nix-perl-bindings
|
||||||
, git
|
, git
|
||||||
|
|
||||||
, makeWrapper
|
, makeWrapper
|
||||||
@@ -61,7 +65,7 @@ let
|
|||||||
name = "hydra-perl-deps";
|
name = "hydra-perl-deps";
|
||||||
paths = lib.closePropagation
|
paths = lib.closePropagation
|
||||||
([
|
([
|
||||||
nixComponents.nix-perl-bindings
|
nix-perl-bindings
|
||||||
git
|
git
|
||||||
] ++ (with perlPackages; [
|
] ++ (with perlPackages; [
|
||||||
AuthenSASL
|
AuthenSASL
|
||||||
@@ -89,7 +93,6 @@ let
|
|||||||
DateTime
|
DateTime
|
||||||
DBDPg
|
DBDPg
|
||||||
DBDSQLite
|
DBDSQLite
|
||||||
DBIxClassHelpers
|
|
||||||
DigestSHA1
|
DigestSHA1
|
||||||
EmailMIME
|
EmailMIME
|
||||||
EmailSender
|
EmailSender
|
||||||
@@ -110,7 +113,6 @@ let
|
|||||||
NetAmazonS3
|
NetAmazonS3
|
||||||
NetPrometheus
|
NetPrometheus
|
||||||
NetStatsd
|
NetStatsd
|
||||||
NumberBytesHuman
|
|
||||||
PadWalker
|
PadWalker
|
||||||
ParallelForkManager
|
ParallelForkManager
|
||||||
PerlCriticCommunity
|
PerlCriticCommunity
|
||||||
@@ -163,7 +165,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
nukeReferences
|
nukeReferences
|
||||||
pkg-config
|
pkg-config
|
||||||
mdbook
|
mdbook
|
||||||
nixComponents.nix-cli
|
nix-cli
|
||||||
perlDeps
|
perlDeps
|
||||||
perl
|
perl
|
||||||
unzip
|
unzip
|
||||||
@@ -173,9 +175,9 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
libpqxx
|
libpqxx
|
||||||
openssl
|
openssl
|
||||||
libxslt
|
libxslt
|
||||||
nixComponents.nix-util
|
nix-util
|
||||||
nixComponents.nix-store
|
nix-store
|
||||||
nixComponents.nix-main
|
nix-main
|
||||||
perlDeps
|
perlDeps
|
||||||
perl
|
perl
|
||||||
boost
|
boost
|
||||||
@@ -202,14 +204,14 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
glibcLocales
|
glibcLocales
|
||||||
libressl.nc
|
libressl.nc
|
||||||
python3
|
python3
|
||||||
nixComponents.nix-cli
|
nix-cli
|
||||||
];
|
];
|
||||||
|
|
||||||
hydraPath = lib.makeBinPath (
|
hydraPath = lib.makeBinPath (
|
||||||
[
|
[
|
||||||
subversion
|
subversion
|
||||||
openssh
|
openssh
|
||||||
nixComponents.nix-cli
|
nix-cli
|
||||||
coreutils
|
coreutils
|
||||||
findutils
|
findutils
|
||||||
pixz
|
pixz
|
||||||
@@ -239,7 +241,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
shellHook = ''
|
shellHook = ''
|
||||||
pushd $(git rev-parse --show-toplevel) >/dev/null
|
pushd $(git rev-parse --show-toplevel) >/dev/null
|
||||||
|
|
||||||
PATH=$(pwd)/build/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/build/src/hydra-queue-runner:$PATH
|
PATH=$(pwd)/build/src/hydra-evaluator:$(pwd)/build/src/script:$(pwd)/build/src/hydra-queue-runner:$PATH
|
||||||
PERL5LIB=$(pwd)/src/lib:$PERL5LIB
|
PERL5LIB=$(pwd)/src/lib:$PERL5LIB
|
||||||
export HYDRA_HOME="$(pwd)/src/"
|
export HYDRA_HOME="$(pwd)/src/"
|
||||||
mkdir -p .hydra-data
|
mkdir -p .hydra-data
|
||||||
@@ -270,7 +272,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
--prefix PATH ':' $out/bin:$hydraPath \
|
--prefix PATH ':' $out/bin:$hydraPath \
|
||||||
--set HYDRA_RELEASE ${version} \
|
--set HYDRA_RELEASE ${version} \
|
||||||
--set HYDRA_HOME $out/libexec/hydra \
|
--set HYDRA_HOME $out/libexec/hydra \
|
||||||
--set NIX_RELEASE ${nixComponents.nix-cli.name or "unknown"} \
|
--set NIX_RELEASE ${nix-cli.name or "unknown"} \
|
||||||
--set NIX_EVAL_JOBS_RELEASE ${nix-eval-jobs.name or "unknown"}
|
--set NIX_EVAL_JOBS_RELEASE ${nix-eval-jobs.name or "unknown"}
|
||||||
done
|
done
|
||||||
'';
|
'';
|
||||||
@@ -278,8 +280,5 @@ stdenv.mkDerivation (finalAttrs: {
|
|||||||
dontStrip = true;
|
dontStrip = true;
|
||||||
|
|
||||||
meta.description = "Build of Hydra on ${stdenv.system}";
|
meta.description = "Build of Hydra on ${stdenv.system}";
|
||||||
passthru = {
|
passthru = { inherit perlDeps; };
|
||||||
inherit perlDeps;
|
|
||||||
nix = nixComponents.nix-cli;
|
|
||||||
};
|
|
||||||
})
|
})
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
#include "db.hh"
|
#include "db.hh"
|
||||||
#include "hydra-config.hh"
|
#include "hydra-config.hh"
|
||||||
#include <nix/util/pool.hh>
|
#include "pool.hh"
|
||||||
#include <nix/main/shared.hh>
|
#include "shared.hh"
|
||||||
#include <nix/util/signals.hh>
|
#include "signals.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <thread>
|
#include <thread>
|
||||||
@@ -180,8 +180,10 @@ struct Evaluator
|
|||||||
{
|
{
|
||||||
auto conn(dbPool.get());
|
auto conn(dbPool.get());
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
txn.exec("update Jobsets set startTime = $1 where id = $2",
|
txn.exec_params0
|
||||||
pqxx::params{now, jobset.name.id}).no_rows();
|
("update Jobsets set startTime = $1 where id = $2",
|
||||||
|
now,
|
||||||
|
jobset.name.id);
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,7 +234,7 @@ struct Evaluator
|
|||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
|
|
||||||
if (jobset.evaluation_style == EvaluationStyle::ONE_AT_A_TIME) {
|
if (jobset.evaluation_style == EvaluationStyle::ONE_AT_A_TIME) {
|
||||||
auto evaluation_res = txn.exec
|
auto evaluation_res = txn.exec_params
|
||||||
("select id from JobsetEvals "
|
("select id from JobsetEvals "
|
||||||
"where jobset_id = $1 "
|
"where jobset_id = $1 "
|
||||||
"order by id desc limit 1"
|
"order by id desc limit 1"
|
||||||
@@ -248,7 +250,7 @@ struct Evaluator
|
|||||||
|
|
||||||
auto evaluation_id = evaluation_res[0][0].as<int>();
|
auto evaluation_id = evaluation_res[0][0].as<int>();
|
||||||
|
|
||||||
auto unfinished_build_res = txn.exec
|
auto unfinished_build_res = txn.exec_params
|
||||||
("select id from Builds "
|
("select id from Builds "
|
||||||
"join JobsetEvalMembers "
|
"join JobsetEvalMembers "
|
||||||
" on (JobsetEvalMembers.build = Builds.id) "
|
" on (JobsetEvalMembers.build = Builds.id) "
|
||||||
@@ -418,18 +420,21 @@ struct Evaluator
|
|||||||
/* Clear the trigger time to prevent this
|
/* Clear the trigger time to prevent this
|
||||||
jobset from getting stuck in an endless
|
jobset from getting stuck in an endless
|
||||||
failing eval loop. */
|
failing eval loop. */
|
||||||
txn.exec
|
txn.exec_params0
|
||||||
("update Jobsets set triggerTime = null where id = $1 and startTime is not null and triggerTime <= startTime",
|
("update Jobsets set triggerTime = null where id = $1 and startTime is not null and triggerTime <= startTime",
|
||||||
jobset.name.id).no_rows();
|
jobset.name.id);
|
||||||
|
|
||||||
/* Clear the start time. */
|
/* Clear the start time. */
|
||||||
txn.exec
|
txn.exec_params0
|
||||||
("update Jobsets set startTime = null where id = $1",
|
("update Jobsets set startTime = null where id = $1",
|
||||||
jobset.name.id).no_rows();
|
jobset.name.id);
|
||||||
|
|
||||||
if (!WIFEXITED(status) || WEXITSTATUS(status) > 1) {
|
if (!WIFEXITED(status) || WEXITSTATUS(status) > 1) {
|
||||||
txn.exec("update Jobsets set errorMsg = $1, lastCheckedTime = $2, errorTime = $2, fetchErrorMsg = null where id = $3",
|
txn.exec_params0
|
||||||
pqxx::params{fmt("evaluation %s", statusToString(status)), now, jobset.name.id}).no_rows();
|
("update Jobsets set errorMsg = $1, lastCheckedTime = $2, errorTime = $2, fetchErrorMsg = null where id = $3",
|
||||||
|
fmt("evaluation %s", statusToString(status)),
|
||||||
|
now,
|
||||||
|
jobset.name.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.commit();
|
txn.commit();
|
||||||
@@ -454,7 +459,7 @@ struct Evaluator
|
|||||||
{
|
{
|
||||||
auto conn(dbPool.get());
|
auto conn(dbPool.get());
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
txn.exec("update Jobsets set startTime = null").no_rows();
|
txn.exec("update Jobsets set startTime = null");
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -2,8 +2,7 @@ hydra_evaluator = executable('hydra-evaluator',
|
|||||||
'hydra-evaluator.cc',
|
'hydra-evaluator.cc',
|
||||||
dependencies: [
|
dependencies: [
|
||||||
libhydra_dep,
|
libhydra_dep,
|
||||||
nix_util_dep,
|
nix_dep,
|
||||||
nix_main_dep,
|
|
||||||
pqxx_dep,
|
pqxx_dep,
|
||||||
],
|
],
|
||||||
install: true,
|
install: true,
|
||||||
|
@@ -5,20 +5,20 @@
|
|||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
|
|
||||||
#include <nix/store/build-result.hh>
|
#include "build-result.hh"
|
||||||
#include <nix/store/path.hh>
|
#include "path.hh"
|
||||||
#include <nix/store/legacy-ssh-store.hh>
|
#include "legacy-ssh-store.hh"
|
||||||
#include <nix/store/serve-protocol.hh>
|
#include "serve-protocol.hh"
|
||||||
#include <nix/store/serve-protocol-impl.hh>
|
#include "serve-protocol-impl.hh"
|
||||||
#include "state.hh"
|
#include "state.hh"
|
||||||
#include <nix/util/current-process.hh>
|
#include "current-process.hh"
|
||||||
#include <nix/util/processes.hh>
|
#include "processes.hh"
|
||||||
#include <nix/util/util.hh>
|
#include "util.hh"
|
||||||
#include <nix/store/serve-protocol.hh>
|
#include "serve-protocol.hh"
|
||||||
#include <nix/store/serve-protocol-impl.hh>
|
#include "serve-protocol-impl.hh"
|
||||||
#include <nix/store/ssh.hh>
|
#include "ssh.hh"
|
||||||
#include <nix/util/finally.hh>
|
#include "finally.hh"
|
||||||
#include <nix/util/url.hh>
|
#include "url.hh"
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ static std::unique_ptr<SSHMaster::Connection> openConnection(
|
|||||||
auto remoteStore = machine->storeUri.params.find("remote-store");
|
auto remoteStore = machine->storeUri.params.find("remote-store");
|
||||||
if (remoteStore != machine->storeUri.params.end()) {
|
if (remoteStore != machine->storeUri.params.end()) {
|
||||||
command.push_back("--store");
|
command.push_back("--store");
|
||||||
command.push_back(escapeShellArgAlways(remoteStore->second));
|
command.push_back(shellEscape(remoteStore->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -386,19 +386,8 @@ void RemoteResult::updateWithBuildResult(const nix::BuildResult & buildResult)
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Utility guard object to auto-release a semaphore on destruction. */
|
|
||||||
template <typename T>
|
|
||||||
class SemaphoreReleaser {
|
|
||||||
public:
|
|
||||||
SemaphoreReleaser(T* s) : sem(s) {}
|
|
||||||
~SemaphoreReleaser() { sem->release(); }
|
|
||||||
|
|
||||||
private:
|
|
||||||
T* sem;
|
|
||||||
};
|
|
||||||
|
|
||||||
void State::buildRemote(ref<Store> destStore,
|
void State::buildRemote(ref<Store> destStore,
|
||||||
std::unique_ptr<MachineReservation> reservation,
|
|
||||||
::Machine::ptr machine, Step::ptr step,
|
::Machine::ptr machine, Step::ptr step,
|
||||||
const ServeProto::BuildOptions & buildOptions,
|
const ServeProto::BuildOptions & buildOptions,
|
||||||
RemoteResult & result, std::shared_ptr<ActiveStep> activeStep,
|
RemoteResult & result, std::shared_ptr<ActiveStep> activeStep,
|
||||||
@@ -538,23 +527,6 @@ void State::buildRemote(ref<Store> destStore,
|
|||||||
result.logFile = "";
|
result.logFile = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Throttle CPU-bound work. Opportunistically skip updating the current
|
|
||||||
* step, since this requires a DB roundtrip. */
|
|
||||||
if (!localWorkThrottler.try_acquire()) {
|
|
||||||
MaintainCount<counter> mc(nrStepsWaitingForDownloadSlot);
|
|
||||||
updateStep(ssWaitingForLocalSlot);
|
|
||||||
localWorkThrottler.acquire();
|
|
||||||
}
|
|
||||||
SemaphoreReleaser releaser(&localWorkThrottler);
|
|
||||||
|
|
||||||
/* Once we've started copying outputs, release the machine reservation
|
|
||||||
* so further builds can happen. We do not release the machine earlier
|
|
||||||
* to avoid situations where the queue runner is bottlenecked on
|
|
||||||
* copying outputs and we end up building too many things that we
|
|
||||||
* haven't been able to allow copy slots for. */
|
|
||||||
reservation.reset();
|
|
||||||
wakeDispatcher();
|
|
||||||
|
|
||||||
StorePathSet outputs;
|
StorePathSet outputs;
|
||||||
for (auto & [_, realisation] : buildResult.builtOutputs)
|
for (auto & [_, realisation] : buildResult.builtOutputs)
|
||||||
outputs.insert(realisation.outPath);
|
outputs.insert(realisation.outPath);
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
#include "hydra-build-result.hh"
|
#include "hydra-build-result.hh"
|
||||||
#include <nix/store/store-api.hh>
|
#include "store-api.hh"
|
||||||
#include <nix/util/util.hh>
|
#include "util.hh"
|
||||||
#include <nix/util/source-accessor.hh>
|
#include "source-accessor.hh"
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
|
||||||
@@ -51,8 +51,8 @@ BuildOutput getBuildOutput(
|
|||||||
"[[:space:]]+"
|
"[[:space:]]+"
|
||||||
"([a-zA-Z0-9_-]+)" // subtype (e.g. "readme")
|
"([a-zA-Z0-9_-]+)" // subtype (e.g. "readme")
|
||||||
"[[:space:]]+"
|
"[[:space:]]+"
|
||||||
"(\"[^\"]+\"|[^[:space:]<>\"]+)" // path (may be quoted)
|
"(\"[^\"]+\"|[^[:space:]\"]+)" // path (may be quoted)
|
||||||
"([[:space:]]+([^[:space:]<>]+))?" // entry point
|
"([[:space:]]+([^[:space:]]+))?" // entry point
|
||||||
, std::regex::extended);
|
, std::regex::extended);
|
||||||
|
|
||||||
for (auto & output : outputs) {
|
for (auto & output : outputs) {
|
||||||
@@ -78,7 +78,7 @@ BuildOutput getBuildOutput(
|
|||||||
product.type = match[1];
|
product.type = match[1];
|
||||||
product.subtype = match[2];
|
product.subtype = match[2];
|
||||||
std::string s(match[3]);
|
std::string s(match[3]);
|
||||||
product.path = s[0] == '"' && s.back() == '"' ? std::string(s, 1, s.size() - 2) : s;
|
product.path = s[0] == '"' ? std::string(s, 1, s.size() - 2) : s;
|
||||||
product.defaultPath = match[5];
|
product.defaultPath = match[5];
|
||||||
|
|
||||||
/* Ensure that the path exists and points into the Nix
|
/* Ensure that the path exists and points into the Nix
|
||||||
@@ -93,8 +93,6 @@ BuildOutput getBuildOutput(
|
|||||||
if (file == narMembers.end()) continue;
|
if (file == narMembers.end()) continue;
|
||||||
|
|
||||||
product.name = product.path == store->printStorePath(output) ? "" : baseNameOf(product.path);
|
product.name = product.path == store->printStorePath(output) ? "" : baseNameOf(product.path);
|
||||||
if (!std::regex_match(product.name, std::regex("[a-zA-Z0-9.@:_ -]*")))
|
|
||||||
product.name = "";
|
|
||||||
|
|
||||||
if (file->second.type == SourceAccessor::Type::tRegular) {
|
if (file->second.type == SourceAccessor::Type::tRegular) {
|
||||||
product.isRegular = true;
|
product.isRegular = true;
|
||||||
@@ -129,9 +127,8 @@ BuildOutput getBuildOutput(
|
|||||||
if (file == narMembers.end() ||
|
if (file == narMembers.end() ||
|
||||||
file->second.type != SourceAccessor::Type::tRegular)
|
file->second.type != SourceAccessor::Type::tRegular)
|
||||||
continue;
|
continue;
|
||||||
auto contents = trim(file->second.contents.value());
|
res.releaseName = trim(file->second.contents.value());
|
||||||
if (std::regex_match(contents, std::regex("[a-zA-Z0-9.@:_-]+")))
|
// FIXME: validate release name
|
||||||
res.releaseName = contents;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get metrics. */
|
/* Get metrics. */
|
||||||
@@ -143,18 +140,10 @@ BuildOutput getBuildOutput(
|
|||||||
for (auto & line : tokenizeString<Strings>(file->second.contents.value(), "\n")) {
|
for (auto & line : tokenizeString<Strings>(file->second.contents.value(), "\n")) {
|
||||||
auto fields = tokenizeString<std::vector<std::string>>(line);
|
auto fields = tokenizeString<std::vector<std::string>>(line);
|
||||||
if (fields.size() < 2) continue;
|
if (fields.size() < 2) continue;
|
||||||
if (!std::regex_match(fields[0], std::regex("[a-zA-Z0-9._-]+")))
|
|
||||||
continue;
|
|
||||||
BuildMetric metric;
|
BuildMetric metric;
|
||||||
metric.name = fields[0];
|
metric.name = fields[0]; // FIXME: validate
|
||||||
try {
|
metric.value = atof(fields[1].c_str()); // FIXME
|
||||||
metric.value = std::stod(fields[1]);
|
|
||||||
} catch (...) {
|
|
||||||
continue; // skip this metric
|
|
||||||
}
|
|
||||||
metric.unit = fields.size() >= 3 ? fields[2] : "";
|
metric.unit = fields.size() >= 3 ? fields[2] : "";
|
||||||
if (!std::regex_match(metric.unit, std::regex("[a-zA-Z0-9._%-]+")))
|
|
||||||
metric.unit = "";
|
|
||||||
res.metrics[metric.name] = metric;
|
res.metrics[metric.name] = metric;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
#include "state.hh"
|
#include "state.hh"
|
||||||
#include "hydra-build-result.hh"
|
#include "hydra-build-result.hh"
|
||||||
#include <nix/util/finally.hh>
|
#include "finally.hh"
|
||||||
#include <nix/store/binary-cache-store.hh>
|
#include "binary-cache-store.hh"
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
@@ -16,7 +16,7 @@ void setThreadName(const std::string & name)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void State::builder(std::unique_ptr<MachineReservation> reservation)
|
void State::builder(MachineReservation::ptr reservation)
|
||||||
{
|
{
|
||||||
setThreadName("bld~" + std::string(reservation->step->drvPath.to_string()));
|
setThreadName("bld~" + std::string(reservation->step->drvPath.to_string()));
|
||||||
|
|
||||||
@@ -35,20 +35,22 @@ void State::builder(std::unique_ptr<MachineReservation> reservation)
|
|||||||
activeSteps_.lock()->erase(activeStep);
|
activeSteps_.lock()->erase(activeStep);
|
||||||
});
|
});
|
||||||
|
|
||||||
std::string machine = reservation->machine->storeUri.render();
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto destStore = getDestStore();
|
auto destStore = getDestStore();
|
||||||
// Might release the reservation.
|
res = doBuildStep(destStore, reservation, activeStep);
|
||||||
res = doBuildStep(destStore, std::move(reservation), activeStep);
|
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printMsg(lvlError, "uncaught exception building ‘%s’ on ‘%s’: %s",
|
printMsg(lvlError, "uncaught exception building ‘%s’ on ‘%s’: %s",
|
||||||
localStore->printStorePath(activeStep->step->drvPath),
|
localStore->printStorePath(reservation->step->drvPath),
|
||||||
machine,
|
reservation->machine->storeUri.render(),
|
||||||
e.what());
|
e.what());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Release the machine and wake up the dispatcher. */
|
||||||
|
assert(reservation.unique());
|
||||||
|
reservation = 0;
|
||||||
|
wakeDispatcher();
|
||||||
|
|
||||||
/* If there was a temporary failure, retry the step after an
|
/* If there was a temporary failure, retry the step after an
|
||||||
exponentially increasing interval. */
|
exponentially increasing interval. */
|
||||||
Step::ptr step = wstep.lock();
|
Step::ptr step = wstep.lock();
|
||||||
@@ -70,11 +72,11 @@ void State::builder(std::unique_ptr<MachineReservation> reservation)
|
|||||||
|
|
||||||
|
|
||||||
State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
std::unique_ptr<MachineReservation> reservation,
|
MachineReservation::ptr reservation,
|
||||||
std::shared_ptr<ActiveStep> activeStep)
|
std::shared_ptr<ActiveStep> activeStep)
|
||||||
{
|
{
|
||||||
auto step(reservation->step);
|
auto & step(reservation->step);
|
||||||
auto machine(reservation->machine);
|
auto & machine(reservation->machine);
|
||||||
|
|
||||||
{
|
{
|
||||||
auto step_(step->state.lock());
|
auto step_(step->state.lock());
|
||||||
@@ -209,7 +211,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
/* FIXME: referring builds may have conflicting timeouts. */
|
/* FIXME: referring builds may have conflicting timeouts. */
|
||||||
buildRemote(destStore, std::move(reservation), machine, step, buildOptions, result, activeStep, updateStep, narMembers);
|
buildRemote(destStore, machine, step, buildOptions, result, activeStep, updateStep, narMembers);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
if (activeStep->state_.lock()->cancelled) {
|
if (activeStep->state_.lock()->cancelled) {
|
||||||
printInfo("marking step %d of build %d as cancelled", stepNr, buildId);
|
printInfo("marking step %d of build %d as cancelled", stepNr, buildId);
|
||||||
@@ -458,12 +460,13 @@ void State::failStep(
|
|||||||
for (auto & build : indirect) {
|
for (auto & build : indirect) {
|
||||||
if (build->finishedInDB) continue;
|
if (build->finishedInDB) continue;
|
||||||
printError("marking build %1% as failed", build->id);
|
printError("marking build %1% as failed", build->id);
|
||||||
txn.exec("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, isCachedBuild = $5, notificationPendingSince = $4 where id = $1 and finished = 0",
|
txn.exec_params0
|
||||||
pqxx::params{build->id,
|
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, isCachedBuild = $5, notificationPendingSince = $4 where id = $1 and finished = 0",
|
||||||
|
build->id,
|
||||||
(int) (build->drvPath != step->drvPath && result.buildStatus() == bsFailed ? bsDepFailed : result.buildStatus()),
|
(int) (build->drvPath != step->drvPath && result.buildStatus() == bsFailed ? bsDepFailed : result.buildStatus()),
|
||||||
result.startTime,
|
result.startTime,
|
||||||
result.stopTime,
|
result.stopTime,
|
||||||
result.stepStatus == bsCachedFailure ? 1 : 0}).no_rows();
|
result.stepStatus == bsCachedFailure ? 1 : 0);
|
||||||
nrBuildsDone++;
|
nrBuildsDone++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -472,7 +475,7 @@ void State::failStep(
|
|||||||
if (result.stepStatus != bsCachedFailure && result.canCache)
|
if (result.stepStatus != bsCachedFailure && result.canCache)
|
||||||
for (auto & i : step->drv->outputsAndOptPaths(*localStore))
|
for (auto & i : step->drv->outputsAndOptPaths(*localStore))
|
||||||
if (i.second.second)
|
if (i.second.second)
|
||||||
txn.exec("insert into FailedPaths values ($1)", pqxx::params{localStore->printStorePath(*i.second.second)}).no_rows();
|
txn.exec_params0("insert into FailedPaths values ($1)", localStore->printStorePath(*i.second.second));
|
||||||
|
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
@@ -40,15 +40,13 @@ void State::dispatcher()
|
|||||||
printMsg(lvlDebug, "dispatcher woken up");
|
printMsg(lvlDebug, "dispatcher woken up");
|
||||||
nrDispatcherWakeups++;
|
nrDispatcherWakeups++;
|
||||||
|
|
||||||
auto t_before_work = std::chrono::steady_clock::now();
|
auto now1 = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
auto sleepUntil = doDispatch();
|
auto sleepUntil = doDispatch();
|
||||||
|
|
||||||
auto t_after_work = std::chrono::steady_clock::now();
|
auto now2 = std::chrono::steady_clock::now();
|
||||||
|
|
||||||
prom.dispatcher_time_spent_running.Increment(
|
dispatchTimeMs += std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
|
||||||
std::chrono::duration_cast<std::chrono::microseconds>(t_after_work - t_before_work).count());
|
|
||||||
dispatchTimeMs += std::chrono::duration_cast<std::chrono::milliseconds>(t_after_work - t_before_work).count();
|
|
||||||
|
|
||||||
/* Sleep until we're woken up (either because a runnable build
|
/* Sleep until we're woken up (either because a runnable build
|
||||||
is added, or because a build finishes). */
|
is added, or because a build finishes). */
|
||||||
@@ -62,10 +60,6 @@ void State::dispatcher()
|
|||||||
*dispatcherWakeup_ = false;
|
*dispatcherWakeup_ = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto t_after_sleep = std::chrono::steady_clock::now();
|
|
||||||
prom.dispatcher_time_spent_waiting.Increment(
|
|
||||||
std::chrono::duration_cast<std::chrono::microseconds>(t_after_sleep - t_after_work).count());
|
|
||||||
|
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printError("dispatcher: %s", e.what());
|
printError("dispatcher: %s", e.what());
|
||||||
sleep(1);
|
sleep(1);
|
||||||
@@ -134,8 +128,6 @@ system_time State::doDispatch()
|
|||||||
comparator is a partial ordering (see MachineInfo). */
|
comparator is a partial ordering (see MachineInfo). */
|
||||||
int highestGlobalPriority;
|
int highestGlobalPriority;
|
||||||
int highestLocalPriority;
|
int highestLocalPriority;
|
||||||
size_t numRequiredSystemFeatures;
|
|
||||||
size_t numRevDeps;
|
|
||||||
BuildID lowestBuildID;
|
BuildID lowestBuildID;
|
||||||
|
|
||||||
StepInfo(Step::ptr step, Step::State & step_) : step(step)
|
StepInfo(Step::ptr step, Step::State & step_) : step(step)
|
||||||
@@ -144,8 +136,6 @@ system_time State::doDispatch()
|
|||||||
lowestShareUsed = std::min(lowestShareUsed, jobset->shareUsed());
|
lowestShareUsed = std::min(lowestShareUsed, jobset->shareUsed());
|
||||||
highestGlobalPriority = step_.highestGlobalPriority;
|
highestGlobalPriority = step_.highestGlobalPriority;
|
||||||
highestLocalPriority = step_.highestLocalPriority;
|
highestLocalPriority = step_.highestLocalPriority;
|
||||||
numRequiredSystemFeatures = step->requiredSystemFeatures.size();
|
|
||||||
numRevDeps = step_.rdeps.size();
|
|
||||||
lowestBuildID = step_.lowestBuildID;
|
lowestBuildID = step_.lowestBuildID;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -198,8 +188,6 @@ system_time State::doDispatch()
|
|||||||
a.highestGlobalPriority != b.highestGlobalPriority ? a.highestGlobalPriority > b.highestGlobalPriority :
|
a.highestGlobalPriority != b.highestGlobalPriority ? a.highestGlobalPriority > b.highestGlobalPriority :
|
||||||
a.lowestShareUsed != b.lowestShareUsed ? a.lowestShareUsed < b.lowestShareUsed :
|
a.lowestShareUsed != b.lowestShareUsed ? a.lowestShareUsed < b.lowestShareUsed :
|
||||||
a.highestLocalPriority != b.highestLocalPriority ? a.highestLocalPriority > b.highestLocalPriority :
|
a.highestLocalPriority != b.highestLocalPriority ? a.highestLocalPriority > b.highestLocalPriority :
|
||||||
a.numRequiredSystemFeatures != b.numRequiredSystemFeatures ? a.numRequiredSystemFeatures > b.numRequiredSystemFeatures :
|
|
||||||
a.numRevDeps != b.numRevDeps ? a.numRevDeps > b.numRevDeps :
|
|
||||||
a.lowestBuildID < b.lowestBuildID;
|
a.lowestBuildID < b.lowestBuildID;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -294,7 +282,7 @@ system_time State::doDispatch()
|
|||||||
/* Make a slot reservation and start a thread to
|
/* Make a slot reservation and start a thread to
|
||||||
do the build. */
|
do the build. */
|
||||||
auto builderThread = std::thread(&State::builder, this,
|
auto builderThread = std::thread(&State::builder, this,
|
||||||
std::make_unique<MachineReservation>(*this, step, mi.machine));
|
std::make_shared<MachineReservation>(*this, step, mi.machine));
|
||||||
builderThread.detach(); // FIXME?
|
builderThread.detach(); // FIXME?
|
||||||
|
|
||||||
keepGoing = true;
|
keepGoing = true;
|
||||||
|
@@ -2,9 +2,9 @@
|
|||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
|
||||||
#include <nix/util/hash.hh>
|
#include "hash.hh"
|
||||||
#include <nix/store/derivations.hh>
|
#include "derivations.hh"
|
||||||
#include <nix/store/store-api.hh>
|
#include "store-api.hh"
|
||||||
#include "nar-extractor.hh"
|
#include "nar-extractor.hh"
|
||||||
|
|
||||||
struct BuildProduct
|
struct BuildProduct
|
||||||
|
@@ -11,16 +11,16 @@
|
|||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
#include <nix/util/signals.hh>
|
#include "signals.hh"
|
||||||
#include "state.hh"
|
#include "state.hh"
|
||||||
#include "hydra-build-result.hh"
|
#include "hydra-build-result.hh"
|
||||||
#include <nix/store/store-open.hh>
|
#include "store-api.hh"
|
||||||
#include <nix/store/remote-store.hh>
|
#include "remote-store.hh"
|
||||||
|
|
||||||
#include <nix/store/globals.hh>
|
#include "globals.hh"
|
||||||
#include "hydra-config.hh"
|
#include "hydra-config.hh"
|
||||||
#include <nix/store/s3-binary-cache-store.hh>
|
#include "s3-binary-cache-store.hh"
|
||||||
#include <nix/main/shared.hh>
|
#include "shared.hh"
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
using nlohmann::json;
|
using nlohmann::json;
|
||||||
@@ -70,31 +70,10 @@ State::PromMetrics::PromMetrics()
|
|||||||
.Register(*registry)
|
.Register(*registry)
|
||||||
.Add({})
|
.Add({})
|
||||||
)
|
)
|
||||||
, dispatcher_time_spent_running(
|
, queue_max_id(
|
||||||
prometheus::BuildCounter()
|
prometheus::BuildGauge()
|
||||||
.Name("hydraqueuerunner_dispatcher_time_spent_running")
|
.Name("hydraqueuerunner_queue_max_build_id_info")
|
||||||
.Help("Time (in micros) spent running the dispatcher")
|
.Help("Maximum build record ID in the queue")
|
||||||
.Register(*registry)
|
|
||||||
.Add({})
|
|
||||||
)
|
|
||||||
, dispatcher_time_spent_waiting(
|
|
||||||
prometheus::BuildCounter()
|
|
||||||
.Name("hydraqueuerunner_dispatcher_time_spent_waiting")
|
|
||||||
.Help("Time (in micros) spent waiting for the dispatcher to obtain work")
|
|
||||||
.Register(*registry)
|
|
||||||
.Add({})
|
|
||||||
)
|
|
||||||
, queue_monitor_time_spent_running(
|
|
||||||
prometheus::BuildCounter()
|
|
||||||
.Name("hydraqueuerunner_queue_monitor_time_spent_running")
|
|
||||||
.Help("Time (in micros) spent running the queue monitor")
|
|
||||||
.Register(*registry)
|
|
||||||
.Add({})
|
|
||||||
)
|
|
||||||
, queue_monitor_time_spent_waiting(
|
|
||||||
prometheus::BuildCounter()
|
|
||||||
.Name("hydraqueuerunner_queue_monitor_time_spent_waiting")
|
|
||||||
.Help("Time (in micros) spent waiting for the queue monitor to obtain work")
|
|
||||||
.Register(*registry)
|
.Register(*registry)
|
||||||
.Add({})
|
.Add({})
|
||||||
)
|
)
|
||||||
@@ -106,7 +85,6 @@ State::State(std::optional<std::string> metricsAddrOpt)
|
|||||||
: config(std::make_unique<HydraConfig>())
|
: config(std::make_unique<HydraConfig>())
|
||||||
, maxUnsupportedTime(config->getIntOption("max_unsupported_time", 0))
|
, maxUnsupportedTime(config->getIntOption("max_unsupported_time", 0))
|
||||||
, dbPool(config->getIntOption("max_db_connections", 128))
|
, dbPool(config->getIntOption("max_db_connections", 128))
|
||||||
, localWorkThrottler(config->getIntOption("max_local_worker_threads", std::min(maxSupportedLocalWorkers, std::max(4u, std::thread::hardware_concurrency()) - 2)))
|
|
||||||
, maxOutputSize(config->getIntOption("max_output_size", 2ULL << 30))
|
, maxOutputSize(config->getIntOption("max_output_size", 2ULL << 30))
|
||||||
, maxLogSize(config->getIntOption("max_log_size", 64ULL << 20))
|
, maxLogSize(config->getIntOption("max_log_size", 64ULL << 20))
|
||||||
, uploadLogsToBinaryCache(config->getBoolOption("upload_logs_to_binary_cache", false))
|
, uploadLogsToBinaryCache(config->getBoolOption("upload_logs_to_binary_cache", false))
|
||||||
@@ -276,16 +254,17 @@ void State::monitorMachinesFile()
|
|||||||
void State::clearBusy(Connection & conn, time_t stopTime)
|
void State::clearBusy(Connection & conn, time_t stopTime)
|
||||||
{
|
{
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
txn.exec("update BuildSteps set busy = 0, status = $1, stopTime = $2 where busy != 0",
|
txn.exec_params0
|
||||||
pqxx::params{(int) bsAborted,
|
("update BuildSteps set busy = 0, status = $1, stopTime = $2 where busy != 0",
|
||||||
stopTime != 0 ? std::make_optional(stopTime) : std::nullopt}).no_rows();
|
(int) bsAborted,
|
||||||
|
stopTime != 0 ? std::make_optional(stopTime) : std::nullopt);
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
unsigned int State::allocBuildStep(pqxx::work & txn, BuildID buildId)
|
unsigned int State::allocBuildStep(pqxx::work & txn, BuildID buildId)
|
||||||
{
|
{
|
||||||
auto res = txn.exec("select max(stepnr) from BuildSteps where build = $1", buildId).one_row();
|
auto res = txn.exec_params1("select max(stepnr) from BuildSteps where build = $1", buildId);
|
||||||
return res[0].is_null() ? 1 : res[0].as<int>() + 1;
|
return res[0].is_null() ? 1 : res[0].as<int>() + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -296,8 +275,9 @@ unsigned int State::createBuildStep(pqxx::work & txn, time_t startTime, BuildID
|
|||||||
restart:
|
restart:
|
||||||
auto stepNr = allocBuildStep(txn, buildId);
|
auto stepNr = allocBuildStep(txn, buildId);
|
||||||
|
|
||||||
auto r = txn.exec("insert into BuildSteps (build, stepnr, type, drvPath, busy, startTime, system, status, propagatedFrom, errorMsg, stopTime, machine) values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) on conflict do nothing",
|
auto r = txn.exec_params
|
||||||
pqxx::params{buildId,
|
("insert into BuildSteps (build, stepnr, type, drvPath, busy, startTime, system, status, propagatedFrom, errorMsg, stopTime, machine) values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) on conflict do nothing",
|
||||||
|
buildId,
|
||||||
stepNr,
|
stepNr,
|
||||||
0, // == build
|
0, // == build
|
||||||
localStore->printStorePath(step->drvPath),
|
localStore->printStorePath(step->drvPath),
|
||||||
@@ -308,16 +288,17 @@ unsigned int State::createBuildStep(pqxx::work & txn, time_t startTime, BuildID
|
|||||||
propagatedFrom != 0 ? std::make_optional(propagatedFrom) : std::nullopt, // internal::params
|
propagatedFrom != 0 ? std::make_optional(propagatedFrom) : std::nullopt, // internal::params
|
||||||
errorMsg != "" ? std::make_optional(errorMsg) : std::nullopt,
|
errorMsg != "" ? std::make_optional(errorMsg) : std::nullopt,
|
||||||
startTime != 0 && status != bsBusy ? std::make_optional(startTime) : std::nullopt,
|
startTime != 0 && status != bsBusy ? std::make_optional(startTime) : std::nullopt,
|
||||||
machine});
|
machine);
|
||||||
|
|
||||||
if (r.affected_rows() == 0) goto restart;
|
if (r.affected_rows() == 0) goto restart;
|
||||||
|
|
||||||
for (auto & [name, output] : getDestStore()->queryPartialDerivationOutputMap(step->drvPath, &*localStore))
|
for (auto & [name, output] : getDestStore()->queryPartialDerivationOutputMap(step->drvPath, &*localStore))
|
||||||
txn.exec("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
txn.exec_params0
|
||||||
pqxx::params{buildId, stepNr, name,
|
("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
||||||
|
buildId, stepNr, name,
|
||||||
output
|
output
|
||||||
? std::optional { localStore->printStorePath(*output)}
|
? std::optional { localStore->printStorePath(*output)}
|
||||||
: std::nullopt}).no_rows();
|
: std::nullopt);
|
||||||
|
|
||||||
if (status == bsBusy)
|
if (status == bsBusy)
|
||||||
txn.exec(fmt("notify step_started, '%d\t%d'", buildId, stepNr));
|
txn.exec(fmt("notify step_started, '%d\t%d'", buildId, stepNr));
|
||||||
@@ -328,10 +309,11 @@ unsigned int State::createBuildStep(pqxx::work & txn, time_t startTime, BuildID
|
|||||||
|
|
||||||
void State::updateBuildStep(pqxx::work & txn, BuildID buildId, unsigned int stepNr, StepState stepState)
|
void State::updateBuildStep(pqxx::work & txn, BuildID buildId, unsigned int stepNr, StepState stepState)
|
||||||
{
|
{
|
||||||
if (txn.exec("update BuildSteps set busy = $1 where build = $2 and stepnr = $3 and busy != 0 and status is null",
|
if (txn.exec_params
|
||||||
pqxx::params{(int) stepState,
|
("update BuildSteps set busy = $1 where build = $2 and stepnr = $3 and busy != 0 and status is null",
|
||||||
|
(int) stepState,
|
||||||
buildId,
|
buildId,
|
||||||
stepNr}).affected_rows() != 1)
|
stepNr).affected_rows() != 1)
|
||||||
throw Error("step %d of build %d is in an unexpected state", stepNr, buildId);
|
throw Error("step %d of build %d is in an unexpected state", stepNr, buildId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -341,27 +323,29 @@ void State::finishBuildStep(pqxx::work & txn, const RemoteResult & result,
|
|||||||
{
|
{
|
||||||
assert(result.startTime);
|
assert(result.startTime);
|
||||||
assert(result.stopTime);
|
assert(result.stopTime);
|
||||||
txn.exec("update BuildSteps set busy = 0, status = $1, errorMsg = $4, startTime = $5, stopTime = $6, machine = $7, overhead = $8, timesBuilt = $9, isNonDeterministic = $10 where build = $2 and stepnr = $3",
|
txn.exec_params0
|
||||||
pqxx::params{(int) result.stepStatus, buildId, stepNr,
|
("update BuildSteps set busy = 0, status = $1, errorMsg = $4, startTime = $5, stopTime = $6, machine = $7, overhead = $8, timesBuilt = $9, isNonDeterministic = $10 where build = $2 and stepnr = $3",
|
||||||
|
(int) result.stepStatus, buildId, stepNr,
|
||||||
result.errorMsg != "" ? std::make_optional(result.errorMsg) : std::nullopt,
|
result.errorMsg != "" ? std::make_optional(result.errorMsg) : std::nullopt,
|
||||||
result.startTime, result.stopTime,
|
result.startTime, result.stopTime,
|
||||||
machine != "" ? std::make_optional(machine) : std::nullopt,
|
machine != "" ? std::make_optional(machine) : std::nullopt,
|
||||||
result.overhead != 0 ? std::make_optional(result.overhead) : std::nullopt,
|
result.overhead != 0 ? std::make_optional(result.overhead) : std::nullopt,
|
||||||
result.timesBuilt > 0 ? std::make_optional(result.timesBuilt) : std::nullopt,
|
result.timesBuilt > 0 ? std::make_optional(result.timesBuilt) : std::nullopt,
|
||||||
result.timesBuilt > 1 ? std::make_optional(result.isNonDeterministic) : std::nullopt}).no_rows();
|
result.timesBuilt > 1 ? std::make_optional(result.isNonDeterministic) : std::nullopt);
|
||||||
assert(result.logFile.find('\t') == std::string::npos);
|
assert(result.logFile.find('\t') == std::string::npos);
|
||||||
txn.exec(fmt("notify step_finished, '%d\t%d\t%s'",
|
txn.exec(fmt("notify step_finished, '%d\t%d\t%s'",
|
||||||
buildId, stepNr, result.logFile));
|
buildId, stepNr, result.logFile));
|
||||||
|
|
||||||
if (result.stepStatus == bsSuccess) {
|
if (result.stepStatus == bsSuccess) {
|
||||||
// Update the corresponding `BuildStepOutputs` row to add the output path
|
// Update the corresponding `BuildStepOutputs` row to add the output path
|
||||||
auto res = txn.exec("select drvPath from BuildSteps where build = $1 and stepnr = $2", pqxx::params{buildId, stepNr}).one_row();
|
auto res = txn.exec_params1("select drvPath from BuildSteps where build = $1 and stepnr = $2", buildId, stepNr);
|
||||||
assert(res.size());
|
assert(res.size());
|
||||||
StorePath drvPath = localStore->parseStorePath(res[0].as<std::string>());
|
StorePath drvPath = localStore->parseStorePath(res[0].as<std::string>());
|
||||||
// If we've finished building, all the paths should be known
|
// If we've finished building, all the paths should be known
|
||||||
for (auto & [name, output] : getDestStore()->queryDerivationOutputMap(drvPath, &*localStore))
|
for (auto & [name, output] : getDestStore()->queryDerivationOutputMap(drvPath, &*localStore))
|
||||||
txn.exec("update BuildStepOutputs set path = $4 where build = $1 and stepnr = $2 and name = $3",
|
txn.exec_params0
|
||||||
pqxx::params{buildId, stepNr, name, localStore->printStorePath(output)}).no_rows();
|
("update BuildStepOutputs set path = $4 where build = $1 and stepnr = $2 and name = $3",
|
||||||
|
buildId, stepNr, name, localStore->printStorePath(output));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -372,21 +356,23 @@ int State::createSubstitutionStep(pqxx::work & txn, time_t startTime, time_t sto
|
|||||||
restart:
|
restart:
|
||||||
auto stepNr = allocBuildStep(txn, build->id);
|
auto stepNr = allocBuildStep(txn, build->id);
|
||||||
|
|
||||||
auto r = txn.exec("insert into BuildSteps (build, stepnr, type, drvPath, busy, status, startTime, stopTime) values ($1, $2, $3, $4, $5, $6, $7, $8) on conflict do nothing",
|
auto r = txn.exec_params
|
||||||
pqxx::params{build->id,
|
("insert into BuildSteps (build, stepnr, type, drvPath, busy, status, startTime, stopTime) values ($1, $2, $3, $4, $5, $6, $7, $8) on conflict do nothing",
|
||||||
|
build->id,
|
||||||
stepNr,
|
stepNr,
|
||||||
1, // == substitution
|
1, // == substitution
|
||||||
(localStore->printStorePath(drvPath)),
|
(localStore->printStorePath(drvPath)),
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
startTime,
|
startTime,
|
||||||
stopTime});
|
stopTime);
|
||||||
|
|
||||||
if (r.affected_rows() == 0) goto restart;
|
if (r.affected_rows() == 0) goto restart;
|
||||||
|
|
||||||
txn.exec("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
txn.exec_params0
|
||||||
pqxx::params{build->id, stepNr, outputName,
|
("insert into BuildStepOutputs (build, stepnr, name, path) values ($1, $2, $3, $4)",
|
||||||
localStore->printStorePath(storePath)}).no_rows();
|
build->id, stepNr, outputName,
|
||||||
|
localStore->printStorePath(storePath));
|
||||||
|
|
||||||
return stepNr;
|
return stepNr;
|
||||||
}
|
}
|
||||||
@@ -453,32 +439,35 @@ void State::markSucceededBuild(pqxx::work & txn, Build::ptr build,
|
|||||||
{
|
{
|
||||||
if (build->finishedInDB) return;
|
if (build->finishedInDB) return;
|
||||||
|
|
||||||
if (txn.exec("select 1 from Builds where id = $1 and finished = 0", pqxx::params{build->id}).empty()) return;
|
if (txn.exec_params("select 1 from Builds where id = $1 and finished = 0", build->id).empty()) return;
|
||||||
|
|
||||||
txn.exec("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, size = $5, closureSize = $6, releaseName = $7, isCachedBuild = $8, notificationPendingSince = $4 where id = $1",
|
txn.exec_params0
|
||||||
pqxx::params{build->id,
|
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, size = $5, closureSize = $6, releaseName = $7, isCachedBuild = $8, notificationPendingSince = $4 where id = $1",
|
||||||
|
build->id,
|
||||||
(int) (res.failed ? bsFailedWithOutput : bsSuccess),
|
(int) (res.failed ? bsFailedWithOutput : bsSuccess),
|
||||||
startTime,
|
startTime,
|
||||||
stopTime,
|
stopTime,
|
||||||
res.size,
|
res.size,
|
||||||
res.closureSize,
|
res.closureSize,
|
||||||
res.releaseName != "" ? std::make_optional(res.releaseName) : std::nullopt,
|
res.releaseName != "" ? std::make_optional(res.releaseName) : std::nullopt,
|
||||||
isCachedBuild ? 1 : 0}).no_rows();
|
isCachedBuild ? 1 : 0);
|
||||||
|
|
||||||
for (auto & [outputName, outputPath] : res.outputs) {
|
for (auto & [outputName, outputPath] : res.outputs) {
|
||||||
txn.exec("update BuildOutputs set path = $3 where build = $1 and name = $2",
|
txn.exec_params0
|
||||||
pqxx::params{build->id,
|
("update BuildOutputs set path = $3 where build = $1 and name = $2",
|
||||||
|
build->id,
|
||||||
outputName,
|
outputName,
|
||||||
localStore->printStorePath(outputPath)}
|
localStore->printStorePath(outputPath)
|
||||||
).no_rows();
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.exec("delete from BuildProducts where build = $1", pqxx::params{build->id}).no_rows();
|
txn.exec_params0("delete from BuildProducts where build = $1", build->id);
|
||||||
|
|
||||||
unsigned int productNr = 1;
|
unsigned int productNr = 1;
|
||||||
for (auto & product : res.products) {
|
for (auto & product : res.products) {
|
||||||
txn.exec("insert into BuildProducts (build, productnr, type, subtype, fileSize, sha256hash, path, name, defaultPath) values ($1, $2, $3, $4, $5, $6, $7, $8, $9)",
|
txn.exec_params0
|
||||||
pqxx::params{build->id,
|
("insert into BuildProducts (build, productnr, type, subtype, fileSize, sha256hash, path, name, defaultPath) values ($1, $2, $3, $4, $5, $6, $7, $8, $9)",
|
||||||
|
build->id,
|
||||||
productNr++,
|
productNr++,
|
||||||
product.type,
|
product.type,
|
||||||
product.subtype,
|
product.subtype,
|
||||||
@@ -486,21 +475,22 @@ void State::markSucceededBuild(pqxx::work & txn, Build::ptr build,
|
|||||||
product.sha256hash ? std::make_optional(product.sha256hash->to_string(HashFormat::Base16, false)) : std::nullopt,
|
product.sha256hash ? std::make_optional(product.sha256hash->to_string(HashFormat::Base16, false)) : std::nullopt,
|
||||||
product.path,
|
product.path,
|
||||||
product.name,
|
product.name,
|
||||||
product.defaultPath}).no_rows();
|
product.defaultPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
txn.exec("delete from BuildMetrics where build = $1", pqxx::params{build->id}).no_rows();
|
txn.exec_params0("delete from BuildMetrics where build = $1", build->id);
|
||||||
|
|
||||||
for (auto & metric : res.metrics) {
|
for (auto & metric : res.metrics) {
|
||||||
txn.exec("insert into BuildMetrics (build, name, unit, value, project, jobset, job, timestamp) values ($1, $2, $3, $4, $5, $6, $7, $8)",
|
txn.exec_params0
|
||||||
pqxx::params{build->id,
|
("insert into BuildMetrics (build, name, unit, value, project, jobset, job, timestamp) values ($1, $2, $3, $4, $5, $6, $7, $8)",
|
||||||
|
build->id,
|
||||||
metric.second.name,
|
metric.second.name,
|
||||||
metric.second.unit != "" ? std::make_optional(metric.second.unit) : std::nullopt,
|
metric.second.unit != "" ? std::make_optional(metric.second.unit) : std::nullopt,
|
||||||
metric.second.value,
|
metric.second.value,
|
||||||
build->projectName,
|
build->projectName,
|
||||||
build->jobsetName,
|
build->jobsetName,
|
||||||
build->jobName,
|
build->jobName,
|
||||||
build->timestamp}).no_rows();
|
build->timestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
nrBuildsDone++;
|
nrBuildsDone++;
|
||||||
@@ -512,7 +502,7 @@ bool State::checkCachedFailure(Step::ptr step, Connection & conn)
|
|||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
for (auto & i : step->drv->outputsAndOptPaths(*localStore))
|
for (auto & i : step->drv->outputsAndOptPaths(*localStore))
|
||||||
if (i.second.second)
|
if (i.second.second)
|
||||||
if (!txn.exec("select 1 from FailedPaths where path = $1", pqxx::params{localStore->printStorePath(*i.second.second)}).empty())
|
if (!txn.exec_params("select 1 from FailedPaths where path = $1", localStore->printStorePath(*i.second.second)).empty())
|
||||||
return true;
|
return true;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -561,7 +551,6 @@ void State::dumpStatus(Connection & conn)
|
|||||||
{"nrActiveSteps", activeSteps_.lock()->size()},
|
{"nrActiveSteps", activeSteps_.lock()->size()},
|
||||||
{"nrStepsBuilding", nrStepsBuilding.load()},
|
{"nrStepsBuilding", nrStepsBuilding.load()},
|
||||||
{"nrStepsCopyingTo", nrStepsCopyingTo.load()},
|
{"nrStepsCopyingTo", nrStepsCopyingTo.load()},
|
||||||
{"nrStepsWaitingForDownloadSlot", nrStepsWaitingForDownloadSlot.load()},
|
|
||||||
{"nrStepsCopyingFrom", nrStepsCopyingFrom.load()},
|
{"nrStepsCopyingFrom", nrStepsCopyingFrom.load()},
|
||||||
{"nrStepsWaiting", nrStepsWaiting.load()},
|
{"nrStepsWaiting", nrStepsWaiting.load()},
|
||||||
{"nrUnsupportedSteps", nrUnsupportedSteps.load()},
|
{"nrUnsupportedSteps", nrUnsupportedSteps.load()},
|
||||||
@@ -603,7 +592,6 @@ void State::dumpStatus(Connection & conn)
|
|||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
auto machines_json = json::object();
|
|
||||||
auto machines_(machines.lock());
|
auto machines_(machines.lock());
|
||||||
for (auto & i : *machines_) {
|
for (auto & i : *machines_) {
|
||||||
auto & m(i.second);
|
auto & m(i.second);
|
||||||
@@ -630,9 +618,8 @@ void State::dumpStatus(Connection & conn)
|
|||||||
machine["avgStepTime"] = (float) s->totalStepTime / s->nrStepsDone;
|
machine["avgStepTime"] = (float) s->totalStepTime / s->nrStepsDone;
|
||||||
machine["avgStepBuildTime"] = (float) s->totalStepBuildTime / s->nrStepsDone;
|
machine["avgStepBuildTime"] = (float) s->totalStepBuildTime / s->nrStepsDone;
|
||||||
}
|
}
|
||||||
machines_json[m->storeUri.render()] = machine;
|
statusJson["machines"][m->storeUri.render()] = machine;
|
||||||
}
|
}
|
||||||
statusJson["machines"] = machines_json;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@@ -691,7 +678,6 @@ void State::dumpStatus(Connection & conn)
|
|||||||
: 0.0},
|
: 0.0},
|
||||||
};
|
};
|
||||||
|
|
||||||
#if NIX_WITH_S3_SUPPORT
|
|
||||||
auto s3Store = dynamic_cast<S3BinaryCacheStore *>(&*store);
|
auto s3Store = dynamic_cast<S3BinaryCacheStore *>(&*store);
|
||||||
if (s3Store) {
|
if (s3Store) {
|
||||||
auto & s3Stats = s3Store->getS3Stats();
|
auto & s3Stats = s3Store->getS3Stats();
|
||||||
@@ -717,15 +703,14 @@ void State::dumpStatus(Connection & conn)
|
|||||||
+ s3Stats.getBytes / (1024.0 * 1024.0 * 1024.0) * 0.09},
|
+ s3Stats.getBytes / (1024.0 * 1024.0 * 1024.0) * 0.09},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
auto mc = startDbUpdate();
|
auto mc = startDbUpdate();
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
// FIXME: use PostgreSQL 9.5 upsert.
|
// FIXME: use PostgreSQL 9.5 upsert.
|
||||||
txn.exec("delete from SystemStatus where what = 'queue-runner'").no_rows();
|
txn.exec("delete from SystemStatus where what = 'queue-runner'");
|
||||||
txn.exec("insert into SystemStatus values ('queue-runner', $1)", pqxx::params{statusJson.dump()}).no_rows();
|
txn.exec_params0("insert into SystemStatus values ('queue-runner', $1)", statusJson.dump());
|
||||||
txn.exec("notify status_dumped");
|
txn.exec("notify status_dumped");
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
@@ -790,7 +775,7 @@ void State::unlock()
|
|||||||
|
|
||||||
{
|
{
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
txn.exec("delete from SystemStatus where what = 'queue-runner'").no_rows();
|
txn.exec("delete from SystemStatus where what = 'queue-runner'");
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -820,7 +805,7 @@ void State::run(BuildID buildOne)
|
|||||||
<< metricsAddr << "/metrics (port " << exposerPort << ")"
|
<< metricsAddr << "/metrics (port " << exposerPort << ")"
|
||||||
<< std::endl;
|
<< std::endl;
|
||||||
|
|
||||||
Store::Config::Params localParams;
|
Store::Params localParams;
|
||||||
localParams["max-connections"] = "16";
|
localParams["max-connections"] = "16";
|
||||||
localParams["max-connection-age"] = "600";
|
localParams["max-connection-age"] = "600";
|
||||||
localStore = openStore(getEnv("NIX_REMOTE").value_or(""), localParams);
|
localStore = openStore(getEnv("NIX_REMOTE").value_or(""), localParams);
|
||||||
@@ -868,10 +853,11 @@ void State::run(BuildID buildOne)
|
|||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
for (auto & step : steps) {
|
for (auto & step : steps) {
|
||||||
printMsg(lvlError, "cleaning orphaned step %d of build %d", step.second, step.first);
|
printMsg(lvlError, "cleaning orphaned step %d of build %d", step.second, step.first);
|
||||||
txn.exec("update BuildSteps set busy = 0, status = $1 where build = $2 and stepnr = $3 and busy != 0",
|
txn.exec_params0
|
||||||
pqxx::params{(int) bsAborted,
|
("update BuildSteps set busy = 0, status = $1 where build = $2 and stepnr = $3 and busy != 0",
|
||||||
|
(int) bsAborted,
|
||||||
step.first,
|
step.first,
|
||||||
step.second}).no_rows();
|
step.second);
|
||||||
}
|
}
|
||||||
txn.commit();
|
txn.commit();
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
|
@@ -13,9 +13,7 @@ hydra_queue_runner = executable('hydra-queue-runner',
|
|||||||
srcs,
|
srcs,
|
||||||
dependencies: [
|
dependencies: [
|
||||||
libhydra_dep,
|
libhydra_dep,
|
||||||
nix_util_dep,
|
nix_dep,
|
||||||
nix_store_dep,
|
|
||||||
nix_main_dep,
|
|
||||||
pqxx_dep,
|
pqxx_dep,
|
||||||
prom_cpp_core_dep,
|
prom_cpp_core_dep,
|
||||||
prom_cpp_pull_dep,
|
prom_cpp_pull_dep,
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
#include "nar-extractor.hh"
|
#include "nar-extractor.hh"
|
||||||
|
|
||||||
#include <nix/util/archive.hh>
|
#include "archive.hh"
|
||||||
|
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
|
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <nix/util/source-accessor.hh>
|
#include "source-accessor.hh"
|
||||||
#include <nix/util/types.hh>
|
#include "types.hh"
|
||||||
#include <nix/util/serialise.hh>
|
#include "serialise.hh"
|
||||||
#include <nix/util/hash.hh>
|
#include "hash.hh"
|
||||||
|
|
||||||
struct NarMemberData
|
struct NarMemberData
|
||||||
{
|
{
|
||||||
|
@@ -1,11 +1,8 @@
|
|||||||
#include "state.hh"
|
#include "state.hh"
|
||||||
#include "hydra-build-result.hh"
|
#include "hydra-build-result.hh"
|
||||||
#include <nix/store/globals.hh>
|
#include "globals.hh"
|
||||||
#include <nix/store/parsed-derivations.hh>
|
|
||||||
#include <nix/util/thread-pool.hh>
|
|
||||||
|
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <signal.h>
|
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
@@ -40,21 +37,16 @@ void State::queueMonitorLoop(Connection & conn)
|
|||||||
|
|
||||||
auto destStore = getDestStore();
|
auto destStore = getDestStore();
|
||||||
|
|
||||||
|
unsigned int lastBuildId = 0;
|
||||||
|
|
||||||
bool quit = false;
|
bool quit = false;
|
||||||
while (!quit) {
|
while (!quit) {
|
||||||
auto t_before_work = std::chrono::steady_clock::now();
|
|
||||||
|
|
||||||
localStore->clearPathInfoCache();
|
localStore->clearPathInfoCache();
|
||||||
|
|
||||||
bool done = getQueuedBuilds(conn, destStore);
|
bool done = getQueuedBuilds(conn, destStore, lastBuildId);
|
||||||
|
|
||||||
if (buildOne && buildOneDone) quit = true;
|
if (buildOne && buildOneDone) quit = true;
|
||||||
|
|
||||||
auto t_after_work = std::chrono::steady_clock::now();
|
|
||||||
|
|
||||||
prom.queue_monitor_time_spent_running.Increment(
|
|
||||||
std::chrono::duration_cast<std::chrono::microseconds>(t_after_work - t_before_work).count());
|
|
||||||
|
|
||||||
/* Sleep until we get notification from the database about an
|
/* Sleep until we get notification from the database about an
|
||||||
event. */
|
event. */
|
||||||
if (done && !quit) {
|
if (done && !quit) {
|
||||||
@@ -64,10 +56,12 @@ void State::queueMonitorLoop(Connection & conn)
|
|||||||
conn.get_notifs();
|
conn.get_notifs();
|
||||||
|
|
||||||
if (auto lowestId = buildsAdded.get()) {
|
if (auto lowestId = buildsAdded.get()) {
|
||||||
|
lastBuildId = std::min(lastBuildId, static_cast<unsigned>(std::stoul(*lowestId) - 1));
|
||||||
printMsg(lvlTalkative, "got notification: new builds added to the queue");
|
printMsg(lvlTalkative, "got notification: new builds added to the queue");
|
||||||
}
|
}
|
||||||
if (buildsRestarted.get()) {
|
if (buildsRestarted.get()) {
|
||||||
printMsg(lvlTalkative, "got notification: builds restarted");
|
printMsg(lvlTalkative, "got notification: builds restarted");
|
||||||
|
lastBuildId = 0; // check all builds
|
||||||
}
|
}
|
||||||
if (buildsCancelled.get() || buildsDeleted.get() || buildsBumped.get()) {
|
if (buildsCancelled.get() || buildsDeleted.get() || buildsBumped.get()) {
|
||||||
printMsg(lvlTalkative, "got notification: builds cancelled or bumped");
|
printMsg(lvlTalkative, "got notification: builds cancelled or bumped");
|
||||||
@@ -77,10 +71,6 @@ void State::queueMonitorLoop(Connection & conn)
|
|||||||
printMsg(lvlTalkative, "got notification: jobset shares changed");
|
printMsg(lvlTalkative, "got notification: jobset shares changed");
|
||||||
processJobsetSharesChange(conn);
|
processJobsetSharesChange(conn);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto t_after_sleep = std::chrono::steady_clock::now();
|
|
||||||
prom.queue_monitor_time_spent_waiting.Increment(
|
|
||||||
std::chrono::duration_cast<std::chrono::microseconds>(t_after_sleep - t_after_work).count());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exit(0);
|
exit(0);
|
||||||
@@ -94,31 +84,39 @@ struct PreviousFailure : public std::exception {
|
|||||||
|
|
||||||
|
|
||||||
bool State::getQueuedBuilds(Connection & conn,
|
bool State::getQueuedBuilds(Connection & conn,
|
||||||
ref<Store> destStore)
|
ref<Store> destStore, unsigned int & lastBuildId)
|
||||||
{
|
{
|
||||||
prom.queue_checks_started.Increment();
|
prom.queue_checks_started.Increment();
|
||||||
|
|
||||||
printInfo("checking the queue for builds...");
|
printInfo("checking the queue for builds > %d...", lastBuildId);
|
||||||
|
|
||||||
/* Grab the queued builds from the database, but don't process
|
/* Grab the queued builds from the database, but don't process
|
||||||
them yet (since we don't want a long-running transaction). */
|
them yet (since we don't want a long-running transaction). */
|
||||||
std::vector<BuildID> newIDs;
|
std::vector<BuildID> newIDs;
|
||||||
std::unordered_map<BuildID, Build::ptr> newBuildsByID;
|
std::map<BuildID, Build::ptr> newBuildsByID;
|
||||||
std::multimap<StorePath, BuildID> newBuildsByPath;
|
std::multimap<StorePath, BuildID> newBuildsByPath;
|
||||||
|
|
||||||
|
unsigned int newLastBuildId = lastBuildId;
|
||||||
|
|
||||||
{
|
{
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
|
|
||||||
auto res = txn.exec("select builds.id, builds.jobset_id, jobsets.project as project, "
|
auto res = txn.exec_params
|
||||||
|
("select builds.id, builds.jobset_id, jobsets.project as project, "
|
||||||
"jobsets.name as jobset, job, drvPath, maxsilent, timeout, timestamp, "
|
"jobsets.name as jobset, job, drvPath, maxsilent, timeout, timestamp, "
|
||||||
"globalPriority, priority from Builds "
|
"globalPriority, priority from Builds "
|
||||||
"inner join jobsets on builds.jobset_id = jobsets.id "
|
"inner join jobsets on builds.jobset_id = jobsets.id "
|
||||||
"where finished = 0 order by globalPriority desc, random()");
|
"where builds.id > $1 and finished = 0 order by globalPriority desc, builds.id",
|
||||||
|
lastBuildId);
|
||||||
|
|
||||||
for (auto const & row : res) {
|
for (auto const & row : res) {
|
||||||
auto builds_(builds.lock());
|
auto builds_(builds.lock());
|
||||||
BuildID id = row["id"].as<BuildID>();
|
BuildID id = row["id"].as<BuildID>();
|
||||||
if (buildOne && id != buildOne) continue;
|
if (buildOne && id != buildOne) continue;
|
||||||
|
if (id > newLastBuildId) {
|
||||||
|
newLastBuildId = id;
|
||||||
|
prom.queue_max_id.Set(id);
|
||||||
|
}
|
||||||
if (builds_->count(id)) continue;
|
if (builds_->count(id)) continue;
|
||||||
|
|
||||||
auto build = std::make_shared<Build>(
|
auto build = std::make_shared<Build>(
|
||||||
@@ -158,10 +156,11 @@ bool State::getQueuedBuilds(Connection & conn,
|
|||||||
if (!build->finishedInDB) {
|
if (!build->finishedInDB) {
|
||||||
auto mc = startDbUpdate();
|
auto mc = startDbUpdate();
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
txn.exec("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $3 where id = $1 and finished = 0",
|
txn.exec_params0
|
||||||
pqxx::params{build->id,
|
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $3 where id = $1 and finished = 0",
|
||||||
|
build->id,
|
||||||
(int) bsAborted,
|
(int) bsAborted,
|
||||||
time(0)}).no_rows();
|
time(0));
|
||||||
txn.commit();
|
txn.commit();
|
||||||
build->finishedInDB = true;
|
build->finishedInDB = true;
|
||||||
nrBuildsDone++;
|
nrBuildsDone++;
|
||||||
@@ -191,20 +190,22 @@ bool State::getQueuedBuilds(Connection & conn,
|
|||||||
derivation path, then by output path. */
|
derivation path, then by output path. */
|
||||||
BuildID propagatedFrom = 0;
|
BuildID propagatedFrom = 0;
|
||||||
|
|
||||||
auto res = txn.exec("select max(build) from BuildSteps where drvPath = $1 and startTime != 0 and stopTime != 0 and status = 1",
|
auto res = txn.exec_params1
|
||||||
pqxx::params{localStore->printStorePath(ex.step->drvPath)}).one_row();
|
("select max(build) from BuildSteps where drvPath = $1 and startTime != 0 and stopTime != 0 and status = 1",
|
||||||
|
localStore->printStorePath(ex.step->drvPath));
|
||||||
if (!res[0].is_null()) propagatedFrom = res[0].as<BuildID>();
|
if (!res[0].is_null()) propagatedFrom = res[0].as<BuildID>();
|
||||||
|
|
||||||
if (!propagatedFrom) {
|
if (!propagatedFrom) {
|
||||||
for (auto & [outputName, optOutputPath] : destStore->queryPartialDerivationOutputMap(ex.step->drvPath, &*localStore)) {
|
for (auto & [outputName, optOutputPath] : destStore->queryPartialDerivationOutputMap(ex.step->drvPath, &*localStore)) {
|
||||||
constexpr std::string_view common = "select max(s.build) from BuildSteps s join BuildStepOutputs o on s.build = o.build where startTime != 0 and stopTime != 0 and status = 1";
|
constexpr std::string_view common = "select max(s.build) from BuildSteps s join BuildStepOutputs o on s.build = o.build where startTime != 0 and stopTime != 0 and status = 1";
|
||||||
auto res = optOutputPath
|
auto res = optOutputPath
|
||||||
? txn.exec(
|
? txn.exec_params(
|
||||||
std::string { common } + " and path = $1",
|
std::string { common } + " and path = $1",
|
||||||
pqxx::params{localStore->printStorePath(*optOutputPath)})
|
localStore->printStorePath(*optOutputPath))
|
||||||
: txn.exec(
|
: txn.exec_params(
|
||||||
std::string { common } + " and drvPath = $1 and name = $2",
|
std::string { common } + " and drvPath = $1 and name = $2",
|
||||||
pqxx::params{localStore->printStorePath(ex.step->drvPath), outputName});
|
localStore->printStorePath(ex.step->drvPath),
|
||||||
|
outputName);
|
||||||
if (!res[0][0].is_null()) {
|
if (!res[0][0].is_null()) {
|
||||||
propagatedFrom = res[0][0].as<BuildID>();
|
propagatedFrom = res[0][0].as<BuildID>();
|
||||||
break;
|
break;
|
||||||
@@ -213,11 +214,12 @@ bool State::getQueuedBuilds(Connection & conn,
|
|||||||
}
|
}
|
||||||
|
|
||||||
createBuildStep(txn, 0, build->id, ex.step, "", bsCachedFailure, "", propagatedFrom);
|
createBuildStep(txn, 0, build->id, ex.step, "", bsCachedFailure, "", propagatedFrom);
|
||||||
txn.exec("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $3, isCachedBuild = 1, notificationPendingSince = $3 "
|
txn.exec_params
|
||||||
|
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $3, isCachedBuild = 1, notificationPendingSince = $3 "
|
||||||
"where id = $1 and finished = 0",
|
"where id = $1 and finished = 0",
|
||||||
pqxx::params{build->id,
|
build->id,
|
||||||
(int) (ex.step->drvPath == build->drvPath ? bsFailed : bsDepFailed),
|
(int) (ex.step->drvPath == build->drvPath ? bsFailed : bsDepFailed),
|
||||||
time(0)}).no_rows();
|
time(0));
|
||||||
notifyBuildFinished(txn, build->id, {});
|
notifyBuildFinished(txn, build->id, {});
|
||||||
txn.commit();
|
txn.commit();
|
||||||
build->finishedInDB = true;
|
build->finishedInDB = true;
|
||||||
@@ -316,13 +318,15 @@ bool State::getQueuedBuilds(Connection & conn,
|
|||||||
|
|
||||||
/* Stop after a certain time to allow priority bumps to be
|
/* Stop after a certain time to allow priority bumps to be
|
||||||
processed. */
|
processed. */
|
||||||
if (std::chrono::system_clock::now() > start + std::chrono::seconds(60)) {
|
if (std::chrono::system_clock::now() > start + std::chrono::seconds(600)) {
|
||||||
prom.queue_checks_early_exits.Increment();
|
prom.queue_checks_early_exits.Increment();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
prom.queue_checks_finished.Increment();
|
prom.queue_checks_finished.Increment();
|
||||||
|
|
||||||
|
lastBuildId = newBuildsByID.empty() ? newLastBuildId : newBuildsByID.begin()->first - 1;
|
||||||
return newBuildsByID.empty();
|
return newBuildsByID.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -401,34 +405,6 @@ void State::processQueueChange(Connection & conn)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::map<DrvOutput, std::optional<StorePath>> State::getMissingRemotePaths(
|
|
||||||
ref<Store> destStore,
|
|
||||||
const std::map<DrvOutput, std::optional<StorePath>> & paths)
|
|
||||||
{
|
|
||||||
Sync<std::map<DrvOutput, std::optional<StorePath>>> missing_;
|
|
||||||
ThreadPool tp;
|
|
||||||
|
|
||||||
for (auto & [output, maybeOutputPath] : paths) {
|
|
||||||
if (!maybeOutputPath) {
|
|
||||||
auto missing(missing_.lock());
|
|
||||||
missing->insert({output, maybeOutputPath});
|
|
||||||
} else {
|
|
||||||
tp.enqueue([&] {
|
|
||||||
if (!destStore->isValidPath(*maybeOutputPath)) {
|
|
||||||
auto missing(missing_.lock());
|
|
||||||
missing->insert({output, maybeOutputPath});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tp.process();
|
|
||||||
|
|
||||||
auto missing(missing_.lock());
|
|
||||||
return *missing;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Step::ptr State::createStep(ref<Store> destStore,
|
Step::ptr State::createStep(ref<Store> destStore,
|
||||||
Connection & conn, Build::ptr build, const StorePath & drvPath,
|
Connection & conn, Build::ptr build, const StorePath & drvPath,
|
||||||
Build::ptr referringBuild, Step::ptr referringStep, std::set<StorePath> & finishedDrvs,
|
Build::ptr referringBuild, Step::ptr referringStep, std::set<StorePath> & finishedDrvs,
|
||||||
@@ -487,23 +463,14 @@ Step::ptr State::createStep(ref<Store> destStore,
|
|||||||
it's not runnable yet, and other threads won't make it
|
it's not runnable yet, and other threads won't make it
|
||||||
runnable while step->created == false. */
|
runnable while step->created == false. */
|
||||||
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
|
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
|
||||||
{
|
step->parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *step->drv);
|
||||||
auto parsedOpt = StructuredAttrs::tryParse(step->drv->env);
|
|
||||||
try {
|
|
||||||
step->drvOptions = std::make_unique<DerivationOptions>(
|
|
||||||
DerivationOptions::fromStructuredAttrs(step->drv->env, parsedOpt ? &*parsedOpt : nullptr));
|
|
||||||
} catch (Error & e) {
|
|
||||||
e.addTrace({}, "while parsing derivation '%s'", localStore->printStorePath(drvPath));
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
step->preferLocalBuild = step->drvOptions->willBuildLocally(*localStore, *step->drv);
|
step->preferLocalBuild = step->parsedDrv->willBuildLocally(*localStore);
|
||||||
step->isDeterministic = getOr(step->drv->env, "isDetermistic", "0") == "1";
|
step->isDeterministic = getOr(step->drv->env, "isDetermistic", "0") == "1";
|
||||||
|
|
||||||
step->systemType = step->drv->platform;
|
step->systemType = step->drv->platform;
|
||||||
{
|
{
|
||||||
StringSet features = step->requiredSystemFeatures = step->drvOptions->getRequiredSystemFeatures(*step->drv);
|
StringSet features = step->requiredSystemFeatures = step->parsedDrv->getRequiredSystemFeatures();
|
||||||
if (step->preferLocalBuild)
|
if (step->preferLocalBuild)
|
||||||
features.insert("local");
|
features.insert("local");
|
||||||
if (!features.empty()) {
|
if (!features.empty()) {
|
||||||
@@ -518,15 +485,16 @@ Step::ptr State::createStep(ref<Store> destStore,
|
|||||||
|
|
||||||
/* Are all outputs valid? */
|
/* Are all outputs valid? */
|
||||||
auto outputHashes = staticOutputHashes(*localStore, *(step->drv));
|
auto outputHashes = staticOutputHashes(*localStore, *(step->drv));
|
||||||
std::map<DrvOutput, std::optional<StorePath>> paths;
|
bool valid = true;
|
||||||
|
std::map<DrvOutput, std::optional<StorePath>> missing;
|
||||||
for (auto & [outputName, maybeOutputPath] : destStore->queryPartialDerivationOutputMap(drvPath, &*localStore)) {
|
for (auto & [outputName, maybeOutputPath] : destStore->queryPartialDerivationOutputMap(drvPath, &*localStore)) {
|
||||||
auto outputHash = outputHashes.at(outputName);
|
auto outputHash = outputHashes.at(outputName);
|
||||||
paths.insert({{outputHash, outputName}, maybeOutputPath});
|
if (maybeOutputPath && destStore->isValidPath(*maybeOutputPath))
|
||||||
|
continue;
|
||||||
|
valid = false;
|
||||||
|
missing.insert({{outputHash, outputName}, maybeOutputPath});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto missing = getMissingRemotePaths(destStore, paths);
|
|
||||||
bool valid = missing.empty();
|
|
||||||
|
|
||||||
/* Try to copy the missing paths from the local store or from
|
/* Try to copy the missing paths from the local store or from
|
||||||
substitutes. */
|
substitutes. */
|
||||||
if (!missing.empty()) {
|
if (!missing.empty()) {
|
||||||
@@ -649,8 +617,10 @@ Jobset::ptr State::createJobset(pqxx::work & txn,
|
|||||||
if (i != jobsets_->end()) return i->second;
|
if (i != jobsets_->end()) return i->second;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto res = txn.exec("select schedulingShares from Jobsets where id = $1",
|
auto res = txn.exec_params1
|
||||||
pqxx::params{jobsetID}).one_row();
|
("select schedulingShares from Jobsets where id = $1",
|
||||||
|
jobsetID);
|
||||||
|
if (res.empty()) throw Error("missing jobset - can't happen");
|
||||||
|
|
||||||
auto shares = res["schedulingShares"].as<unsigned int>();
|
auto shares = res["schedulingShares"].as<unsigned int>();
|
||||||
|
|
||||||
@@ -658,10 +628,11 @@ Jobset::ptr State::createJobset(pqxx::work & txn,
|
|||||||
jobset->setShares(shares);
|
jobset->setShares(shares);
|
||||||
|
|
||||||
/* Load the build steps from the last 24 hours. */
|
/* Load the build steps from the last 24 hours. */
|
||||||
auto res2 = txn.exec("select s.startTime, s.stopTime from BuildSteps s join Builds b on build = id "
|
auto res2 = txn.exec_params
|
||||||
|
("select s.startTime, s.stopTime from BuildSteps s join Builds b on build = id "
|
||||||
"where s.startTime is not null and s.stopTime > $1 and jobset_id = $2",
|
"where s.startTime is not null and s.stopTime > $1 and jobset_id = $2",
|
||||||
pqxx::params{time(0) - Jobset::schedulingWindow * 10,
|
time(0) - Jobset::schedulingWindow * 10,
|
||||||
jobsetID});
|
jobsetID);
|
||||||
for (auto const & row : res2) {
|
for (auto const & row : res2) {
|
||||||
time_t startTime = row["startTime"].as<time_t>();
|
time_t startTime = row["startTime"].as<time_t>();
|
||||||
time_t stopTime = row["stopTime"].as<time_t>();
|
time_t stopTime = row["stopTime"].as<time_t>();
|
||||||
@@ -698,10 +669,11 @@ BuildOutput State::getBuildOutputCached(Connection & conn, nix::ref<nix::Store>
|
|||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
|
|
||||||
for (auto & [name, output] : derivationOutputs) {
|
for (auto & [name, output] : derivationOutputs) {
|
||||||
auto r = txn.exec("select id, buildStatus, releaseName, closureSize, size from Builds b "
|
auto r = txn.exec_params
|
||||||
|
("select id, buildStatus, releaseName, closureSize, size from Builds b "
|
||||||
"join BuildOutputs o on b.id = o.build "
|
"join BuildOutputs o on b.id = o.build "
|
||||||
"where finished = 1 and (buildStatus = 0 or buildStatus = 6) and path = $1",
|
"where finished = 1 and (buildStatus = 0 or buildStatus = 6) and path = $1",
|
||||||
pqxx::params{localStore->printStorePath(output)});
|
localStore->printStorePath(output));
|
||||||
if (r.empty()) continue;
|
if (r.empty()) continue;
|
||||||
BuildID id = r[0][0].as<BuildID>();
|
BuildID id = r[0][0].as<BuildID>();
|
||||||
|
|
||||||
@@ -713,8 +685,9 @@ BuildOutput State::getBuildOutputCached(Connection & conn, nix::ref<nix::Store>
|
|||||||
res.closureSize = r[0][3].is_null() ? 0 : r[0][3].as<uint64_t>();
|
res.closureSize = r[0][3].is_null() ? 0 : r[0][3].as<uint64_t>();
|
||||||
res.size = r[0][4].is_null() ? 0 : r[0][4].as<uint64_t>();
|
res.size = r[0][4].is_null() ? 0 : r[0][4].as<uint64_t>();
|
||||||
|
|
||||||
auto products = txn.exec("select type, subtype, fileSize, sha256hash, path, name, defaultPath from BuildProducts where build = $1 order by productnr",
|
auto products = txn.exec_params
|
||||||
pqxx::params{id});
|
("select type, subtype, fileSize, sha256hash, path, name, defaultPath from BuildProducts where build = $1 order by productnr",
|
||||||
|
id);
|
||||||
|
|
||||||
for (auto row : products) {
|
for (auto row : products) {
|
||||||
BuildProduct product;
|
BuildProduct product;
|
||||||
@@ -736,8 +709,9 @@ BuildOutput State::getBuildOutputCached(Connection & conn, nix::ref<nix::Store>
|
|||||||
res.products.emplace_back(product);
|
res.products.emplace_back(product);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto metrics = txn.exec("select name, unit, value from BuildMetrics where build = $1",
|
auto metrics = txn.exec_params
|
||||||
pqxx::params{id});
|
("select name, unit, value from BuildMetrics where build = $1",
|
||||||
|
id);
|
||||||
|
|
||||||
for (auto row : metrics) {
|
for (auto row : metrics) {
|
||||||
BuildMetric metric;
|
BuildMetric metric;
|
||||||
|
@@ -6,8 +6,6 @@
|
|||||||
#include <map>
|
#include <map>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
#include <regex>
|
|
||||||
#include <semaphore>
|
|
||||||
|
|
||||||
#include <prometheus/counter.h>
|
#include <prometheus/counter.h>
|
||||||
#include <prometheus/gauge.h>
|
#include <prometheus/gauge.h>
|
||||||
@@ -15,18 +13,17 @@
|
|||||||
|
|
||||||
#include "db.hh"
|
#include "db.hh"
|
||||||
|
|
||||||
#include <nix/store/derivations.hh>
|
#include "parsed-derivations.hh"
|
||||||
#include <nix/store/derivation-options.hh>
|
#include "pathlocks.hh"
|
||||||
#include <nix/store/pathlocks.hh>
|
#include "pool.hh"
|
||||||
#include <nix/util/pool.hh>
|
#include "build-result.hh"
|
||||||
#include <nix/store/build-result.hh>
|
#include "store-api.hh"
|
||||||
#include <nix/store/store-api.hh>
|
#include "sync.hh"
|
||||||
#include <nix/util/sync.hh>
|
|
||||||
#include "nar-extractor.hh"
|
#include "nar-extractor.hh"
|
||||||
#include <nix/store/serve-protocol.hh>
|
#include "serve-protocol.hh"
|
||||||
#include <nix/store/serve-protocol-impl.hh>
|
#include "serve-protocol-impl.hh"
|
||||||
#include <nix/store/serve-protocol-connection.hh>
|
#include "serve-protocol-connection.hh"
|
||||||
#include <nix/store/machines.hh>
|
#include "machines.hh"
|
||||||
|
|
||||||
|
|
||||||
typedef unsigned int BuildID;
|
typedef unsigned int BuildID;
|
||||||
@@ -60,7 +57,6 @@ typedef enum {
|
|||||||
ssConnecting = 10,
|
ssConnecting = 10,
|
||||||
ssSendingInputs = 20,
|
ssSendingInputs = 20,
|
||||||
ssBuilding = 30,
|
ssBuilding = 30,
|
||||||
ssWaitingForLocalSlot = 35,
|
|
||||||
ssReceivingOutputs = 40,
|
ssReceivingOutputs = 40,
|
||||||
ssPostProcessing = 50,
|
ssPostProcessing = 50,
|
||||||
} StepState;
|
} StepState;
|
||||||
@@ -171,8 +167,8 @@ struct Step
|
|||||||
|
|
||||||
nix::StorePath drvPath;
|
nix::StorePath drvPath;
|
||||||
std::unique_ptr<nix::Derivation> drv;
|
std::unique_ptr<nix::Derivation> drv;
|
||||||
std::unique_ptr<nix::DerivationOptions> drvOptions;
|
std::unique_ptr<nix::ParsedDerivation> parsedDrv;
|
||||||
nix::StringSet requiredSystemFeatures;
|
std::set<std::string> requiredSystemFeatures;
|
||||||
bool preferLocalBuild;
|
bool preferLocalBuild;
|
||||||
bool isDeterministic;
|
bool isDeterministic;
|
||||||
std::string systemType; // concatenation of drv.platform and requiredSystemFeatures
|
std::string systemType; // concatenation of drv.platform and requiredSystemFeatures
|
||||||
@@ -356,10 +352,6 @@ private:
|
|||||||
typedef std::map<nix::StoreReference::Variant, Machine::ptr> Machines;
|
typedef std::map<nix::StoreReference::Variant, Machine::ptr> Machines;
|
||||||
nix::Sync<Machines> machines; // FIXME: use atomic_shared_ptr
|
nix::Sync<Machines> machines; // FIXME: use atomic_shared_ptr
|
||||||
|
|
||||||
/* Throttler for CPU-bound local work. */
|
|
||||||
static constexpr unsigned int maxSupportedLocalWorkers = 1024;
|
|
||||||
std::counting_semaphore<maxSupportedLocalWorkers> localWorkThrottler;
|
|
||||||
|
|
||||||
/* Various stats. */
|
/* Various stats. */
|
||||||
time_t startedAt;
|
time_t startedAt;
|
||||||
counter nrBuildsRead{0};
|
counter nrBuildsRead{0};
|
||||||
@@ -369,7 +361,6 @@ private:
|
|||||||
counter nrStepsDone{0};
|
counter nrStepsDone{0};
|
||||||
counter nrStepsBuilding{0};
|
counter nrStepsBuilding{0};
|
||||||
counter nrStepsCopyingTo{0};
|
counter nrStepsCopyingTo{0};
|
||||||
counter nrStepsWaitingForDownloadSlot{0};
|
|
||||||
counter nrStepsCopyingFrom{0};
|
counter nrStepsCopyingFrom{0};
|
||||||
counter nrStepsWaiting{0};
|
counter nrStepsWaiting{0};
|
||||||
counter nrUnsupportedSteps{0};
|
counter nrUnsupportedSteps{0};
|
||||||
@@ -400,6 +391,7 @@ private:
|
|||||||
|
|
||||||
struct MachineReservation
|
struct MachineReservation
|
||||||
{
|
{
|
||||||
|
typedef std::shared_ptr<MachineReservation> ptr;
|
||||||
State & state;
|
State & state;
|
||||||
Step::ptr step;
|
Step::ptr step;
|
||||||
Machine::ptr machine;
|
Machine::ptr machine;
|
||||||
@@ -457,12 +449,7 @@ private:
|
|||||||
prometheus::Counter& queue_steps_created;
|
prometheus::Counter& queue_steps_created;
|
||||||
prometheus::Counter& queue_checks_early_exits;
|
prometheus::Counter& queue_checks_early_exits;
|
||||||
prometheus::Counter& queue_checks_finished;
|
prometheus::Counter& queue_checks_finished;
|
||||||
|
prometheus::Gauge& queue_max_id;
|
||||||
prometheus::Counter& dispatcher_time_spent_running;
|
|
||||||
prometheus::Counter& dispatcher_time_spent_waiting;
|
|
||||||
|
|
||||||
prometheus::Counter& queue_monitor_time_spent_running;
|
|
||||||
prometheus::Counter& queue_monitor_time_spent_waiting;
|
|
||||||
|
|
||||||
PromMetrics();
|
PromMetrics();
|
||||||
};
|
};
|
||||||
@@ -506,7 +493,8 @@ private:
|
|||||||
void queueMonitorLoop(Connection & conn);
|
void queueMonitorLoop(Connection & conn);
|
||||||
|
|
||||||
/* Check the queue for new builds. */
|
/* Check the queue for new builds. */
|
||||||
bool getQueuedBuilds(Connection & conn, nix::ref<nix::Store> destStore);
|
bool getQueuedBuilds(Connection & conn,
|
||||||
|
nix::ref<nix::Store> destStore, unsigned int & lastBuildId);
|
||||||
|
|
||||||
/* Handle cancellation, deletion and priority bumps. */
|
/* Handle cancellation, deletion and priority bumps. */
|
||||||
void processQueueChange(Connection & conn);
|
void processQueueChange(Connection & conn);
|
||||||
@@ -514,12 +502,6 @@ private:
|
|||||||
BuildOutput getBuildOutputCached(Connection & conn, nix::ref<nix::Store> destStore,
|
BuildOutput getBuildOutputCached(Connection & conn, nix::ref<nix::Store> destStore,
|
||||||
const nix::StorePath & drvPath);
|
const nix::StorePath & drvPath);
|
||||||
|
|
||||||
/* Returns paths missing from the remote store. Paths are processed in
|
|
||||||
* parallel to work around the possible latency of remote stores. */
|
|
||||||
std::map<nix::DrvOutput, std::optional<nix::StorePath>> getMissingRemotePaths(
|
|
||||||
nix::ref<nix::Store> destStore,
|
|
||||||
const std::map<nix::DrvOutput, std::optional<nix::StorePath>> & paths);
|
|
||||||
|
|
||||||
Step::ptr createStep(nix::ref<nix::Store> store,
|
Step::ptr createStep(nix::ref<nix::Store> store,
|
||||||
Connection & conn, Build::ptr build, const nix::StorePath & drvPath,
|
Connection & conn, Build::ptr build, const nix::StorePath & drvPath,
|
||||||
Build::ptr referringBuild, Step::ptr referringStep, std::set<nix::StorePath> & finishedDrvs,
|
Build::ptr referringBuild, Step::ptr referringStep, std::set<nix::StorePath> & finishedDrvs,
|
||||||
@@ -549,17 +531,16 @@ private:
|
|||||||
|
|
||||||
void abortUnsupported();
|
void abortUnsupported();
|
||||||
|
|
||||||
void builder(std::unique_ptr<MachineReservation> reservation);
|
void builder(MachineReservation::ptr reservation);
|
||||||
|
|
||||||
/* Perform the given build step. Return true if the step is to be
|
/* Perform the given build step. Return true if the step is to be
|
||||||
retried. */
|
retried. */
|
||||||
enum StepResult { sDone, sRetry, sMaybeCancelled };
|
enum StepResult { sDone, sRetry, sMaybeCancelled };
|
||||||
StepResult doBuildStep(nix::ref<nix::Store> destStore,
|
StepResult doBuildStep(nix::ref<nix::Store> destStore,
|
||||||
std::unique_ptr<MachineReservation> reservation,
|
MachineReservation::ptr reservation,
|
||||||
std::shared_ptr<ActiveStep> activeStep);
|
std::shared_ptr<ActiveStep> activeStep);
|
||||||
|
|
||||||
void buildRemote(nix::ref<nix::Store> destStore,
|
void buildRemote(nix::ref<nix::Store> destStore,
|
||||||
std::unique_ptr<MachineReservation> reservation,
|
|
||||||
Machine::ptr machine, Step::ptr step,
|
Machine::ptr machine, Step::ptr step,
|
||||||
const nix::ServeProto::BuildOptions & buildOptions,
|
const nix::ServeProto::BuildOptions & buildOptions,
|
||||||
RemoteResult & result, std::shared_ptr<ActiveStep> activeStep,
|
RemoteResult & result, std::shared_ptr<ActiveStep> activeStep,
|
||||||
|
@@ -12,9 +12,6 @@ use DateTime;
|
|||||||
use Digest::SHA qw(sha256_hex);
|
use Digest::SHA qw(sha256_hex);
|
||||||
use Text::Diff;
|
use Text::Diff;
|
||||||
use IPC::Run qw(run);
|
use IPC::Run qw(run);
|
||||||
use Digest::SHA qw(hmac_sha256_hex);
|
|
||||||
use String::Compare::ConstantTime qw(equals);
|
|
||||||
use IPC::Run3;
|
|
||||||
|
|
||||||
|
|
||||||
sub api : Chained('/') PathPart('api') CaptureArgs(0) {
|
sub api : Chained('/') PathPart('api') CaptureArgs(0) {
|
||||||
@@ -219,13 +216,8 @@ sub scmdiff : Path('/api/scmdiff') Args(0) {
|
|||||||
} elsif ($type eq "git") {
|
} elsif ($type eq "git") {
|
||||||
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
|
my $clonePath = getSCMCacheDir . "/git/" . sha256_hex($uri);
|
||||||
die if ! -d $clonePath;
|
die if ! -d $clonePath;
|
||||||
my ($stdout1, $stderr1);
|
$diff .= `(cd $clonePath; git --git-dir .git log $rev1..$rev2)`;
|
||||||
run3(['git', '-C', $clonePath, 'log', "$rev1..$rev2"], \undef, \$stdout1, \$stderr1);
|
$diff .= `(cd $clonePath; git --git-dir .git diff $rev1..$rev2)`;
|
||||||
$diff .= $stdout1 if $? == 0;
|
|
||||||
|
|
||||||
my ($stdout2, $stderr2);
|
|
||||||
run3(['git', '-C', $clonePath, 'diff', "$rev1..$rev2"], \undef, \$stdout2, \$stderr2);
|
|
||||||
$diff .= $stdout2 if $? == 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
$c->stash->{'plain'} = { data => (scalar $diff) || " " };
|
$c->stash->{'plain'} = { data => (scalar $diff) || " " };
|
||||||
@@ -282,84 +274,13 @@ sub push : Chained('api') PathPart('push') Args(0) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
sub verifyWebhookSignature {
|
|
||||||
my ($c, $platform, $header_name, $signature_prefix) = @_;
|
|
||||||
|
|
||||||
# Get secrets from config
|
|
||||||
my $webhook_config = $c->config->{webhooks} // {};
|
|
||||||
my $platform_config = $webhook_config->{$platform} // {};
|
|
||||||
my $secrets = $platform_config->{secret};
|
|
||||||
|
|
||||||
# Normalize to array
|
|
||||||
$secrets = [] unless defined $secrets;
|
|
||||||
$secrets = [$secrets] unless ref($secrets) eq 'ARRAY';
|
|
||||||
|
|
||||||
# Trim whitespace from secrets
|
|
||||||
my @secrets = grep { defined && length } map { s/^\s+|\s+$//gr } @$secrets;
|
|
||||||
|
|
||||||
if (@secrets) {
|
|
||||||
my $signature = $c->request->header($header_name);
|
|
||||||
|
|
||||||
if (!$signature) {
|
|
||||||
$c->log->warn("Webhook authentication failed for $platform: Missing signature from IP " . $c->request->address);
|
|
||||||
$c->response->status(401);
|
|
||||||
$c->stash->{json} = { error => "Missing webhook signature" };
|
|
||||||
$c->forward('View::JSON');
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get the raw body content from the buffered PSGI input
|
|
||||||
# For JSON requests, Catalyst will have already read and buffered the body
|
|
||||||
my $input = $c->request->env->{'psgi.input'};
|
|
||||||
$input->seek(0, 0);
|
|
||||||
local $/;
|
|
||||||
my $payload = <$input>;
|
|
||||||
$input->seek(0, 0); # Reset for any other consumers
|
|
||||||
|
|
||||||
unless (defined $payload && length $payload) {
|
|
||||||
$c->log->warn("Webhook authentication failed for $platform: Empty request body from IP " . $c->request->address);
|
|
||||||
$c->response->status(400);
|
|
||||||
$c->stash->{json} = { error => "Empty request body" };
|
|
||||||
$c->forward('View::JSON');
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
my $valid = 0;
|
|
||||||
for my $secret (@secrets) {
|
|
||||||
my $expected = $signature_prefix . hmac_sha256_hex($payload, $secret);
|
|
||||||
if (equals($signature, $expected)) {
|
|
||||||
$valid = 1;
|
|
||||||
last;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!$valid) {
|
|
||||||
$c->log->warn("Webhook authentication failed for $platform: Invalid signature from IP " . $c->request->address);
|
|
||||||
$c->response->status(401);
|
|
||||||
$c->stash->{json} = { error => "Invalid webhook signature" };
|
|
||||||
$c->forward('View::JSON');
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 1;
|
|
||||||
} else {
|
|
||||||
$c->log->warn("Webhook authentication failed for $platform: Unable to validate signature from IP " . $c->request->address . " because no secrets are configured");
|
|
||||||
$c->response->status(401);
|
|
||||||
$c->stash->{json} = { error => "Invalid webhook signature" };
|
|
||||||
$c->forward('View::JSON');
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sub push_github : Chained('api') PathPart('push-github') Args(0) {
|
sub push_github : Chained('api') PathPart('push-github') Args(0) {
|
||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
|
|
||||||
$c->{stash}->{json}->{jobsetsTriggered} = [];
|
$c->{stash}->{json}->{jobsetsTriggered} = [];
|
||||||
|
|
||||||
return unless verifyWebhookSignature($c, 'github', 'X-Hub-Signature-256', 'sha256=');
|
|
||||||
|
|
||||||
my $in = $c->request->{data};
|
my $in = $c->request->{data};
|
||||||
my $owner = ($in->{repository}->{owner}->{name} // $in->{repository}->{owner}->{login}) or die;
|
my $owner = $in->{repository}->{owner}->{name} or die;
|
||||||
my $repo = $in->{repository}->{name} or die;
|
my $repo = $in->{repository}->{name} or die;
|
||||||
print STDERR "got push from GitHub repository $owner/$repo\n";
|
print STDERR "got push from GitHub repository $owner/$repo\n";
|
||||||
|
|
||||||
@@ -376,9 +297,6 @@ sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) {
|
|||||||
|
|
||||||
$c->{stash}->{json}->{jobsetsTriggered} = [];
|
$c->{stash}->{json}->{jobsetsTriggered} = [];
|
||||||
|
|
||||||
# Note: Gitea doesn't use sha256= prefix
|
|
||||||
return unless verifyWebhookSignature($c, 'gitea', 'X-Gitea-Signature', '');
|
|
||||||
|
|
||||||
my $in = $c->request->{data};
|
my $in = $c->request->{data};
|
||||||
my $url = $in->{repository}->{clone_url} or die;
|
my $url = $in->{repository}->{clone_url} or die;
|
||||||
$url =~ s/.git$//;
|
$url =~ s/.git$//;
|
||||||
|
@@ -13,8 +13,6 @@ use Data::Dump qw(dump);
|
|||||||
use List::SomeUtils qw(all);
|
use List::SomeUtils qw(all);
|
||||||
use Encode;
|
use Encode;
|
||||||
use JSON::PP;
|
use JSON::PP;
|
||||||
use IPC::Run qw(run);
|
|
||||||
use IPC::Run3;
|
|
||||||
use WWW::Form::UrlEncoded::PP qw();
|
use WWW::Form::UrlEncoded::PP qw();
|
||||||
|
|
||||||
use feature 'state';
|
use feature 'state';
|
||||||
@@ -240,7 +238,7 @@ sub serveFile {
|
|||||||
# XSS hole.
|
# XSS hole.
|
||||||
$c->response->header('Content-Security-Policy' => 'sandbox allow-scripts');
|
$c->response->header('Content-Security-Policy' => 'sandbox allow-scripts');
|
||||||
|
|
||||||
$c->stash->{'plain'} = { data => readIntoSocket(cmd => ["nix", "--experimental-features", "nix-command",
|
$c->stash->{'plain'} = { data => grab(cmd => ["nix", "--experimental-features", "nix-command",
|
||||||
"store", "cat", "--store", getStoreUri(), "$path"]) };
|
"store", "cat", "--store", getStoreUri(), "$path"]) };
|
||||||
|
|
||||||
# Detect MIME type.
|
# Detect MIME type.
|
||||||
@@ -350,21 +348,19 @@ sub contents : Chained('buildChain') PathPart Args(1) {
|
|||||||
|
|
||||||
notFound($c, "Product $path has disappeared.") unless -e $path;
|
notFound($c, "Product $path has disappeared.") unless -e $path;
|
||||||
|
|
||||||
|
# Sanitize $path to prevent shell injection attacks.
|
||||||
|
$path =~ /^\/[\/[A-Za-z0-9_\-\.=+:]+$/ or die "Filename contains illegal characters.\n";
|
||||||
|
|
||||||
|
# FIXME: don't use shell invocations below.
|
||||||
|
|
||||||
# FIXME: use nix store cat
|
# FIXME: use nix store cat
|
||||||
|
|
||||||
my $res;
|
my $res;
|
||||||
|
|
||||||
if ($product->type eq "nix-build" && -d $path) {
|
if ($product->type eq "nix-build" && -d $path) {
|
||||||
# FIXME: use nix ls-store -R --json
|
# FIXME: use nix ls-store -R --json
|
||||||
# We need to use a pipe between find and xargs, so we'll use IPC::Run
|
$res = `cd '$path' && find . -print0 | xargs -0 ls -ld --`;
|
||||||
my $error;
|
error($c, "`ls -lR' error: $?") if $? != 0;
|
||||||
# Run find with absolute path and post-process to get relative paths
|
|
||||||
my $success = run(['find', $path, '-print0'], '|', ['xargs', '-0', 'ls', '-ld', '--'], \$res, \$error);
|
|
||||||
error($c, "`find $path -print0 | xargs -0 ls -ld --' error: $error") unless $success;
|
|
||||||
|
|
||||||
# Strip the base path to show relative paths
|
|
||||||
my $escaped_path = quotemeta($path);
|
|
||||||
$res =~ s/^(.*\s)$escaped_path(\/|$)/$1.$2/mg;
|
|
||||||
|
|
||||||
#my $baseuri = $c->uri_for('/build', $c->stash->{build}->id, 'download', $product->productnr);
|
#my $baseuri = $c->uri_for('/build', $c->stash->{build}->id, 'download', $product->productnr);
|
||||||
#$baseuri .= "/".$product->name if $product->name;
|
#$baseuri .= "/".$product->name if $product->name;
|
||||||
@@ -372,59 +368,34 @@ sub contents : Chained('buildChain') PathPart Args(1) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.rpm$/) {
|
elsif ($path =~ /\.rpm$/) {
|
||||||
my ($stdout1, $stderr1);
|
$res = `rpm --query --info --package '$path'`;
|
||||||
run3(['rpm', '--query', '--info', '--package', $path], \undef, \$stdout1, \$stderr1);
|
error($c, "RPM error: $?") if $? != 0;
|
||||||
error($c, "RPM error: $stderr1") if $? != 0;
|
|
||||||
$res = $stdout1;
|
|
||||||
|
|
||||||
$res .= "===\n";
|
$res .= "===\n";
|
||||||
|
$res .= `rpm --query --list --verbose --package '$path'`;
|
||||||
my ($stdout2, $stderr2);
|
error($c, "RPM error: $?") if $? != 0;
|
||||||
run3(['rpm', '--query', '--list', '--verbose', '--package', $path], \undef, \$stdout2, \$stderr2);
|
|
||||||
error($c, "RPM error: $stderr2") if $? != 0;
|
|
||||||
$res .= $stdout2;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.deb$/) {
|
elsif ($path =~ /\.deb$/) {
|
||||||
my ($stdout1, $stderr1);
|
$res = `dpkg-deb --info '$path'`;
|
||||||
run3(['dpkg-deb', '--info', $path], \undef, \$stdout1, \$stderr1);
|
error($c, "`dpkg-deb' error: $?") if $? != 0;
|
||||||
error($c, "`dpkg-deb' error: $stderr1") if $? != 0;
|
|
||||||
$res = $stdout1;
|
|
||||||
|
|
||||||
$res .= "===\n";
|
$res .= "===\n";
|
||||||
|
$res .= `dpkg-deb --contents '$path'`;
|
||||||
my ($stdout2, $stderr2);
|
error($c, "`dpkg-deb' error: $?") if $? != 0;
|
||||||
run3(['dpkg-deb', '--contents', $path], \undef, \$stdout2, \$stderr2);
|
|
||||||
error($c, "`dpkg-deb' error: $stderr2") if $? != 0;
|
|
||||||
$res .= $stdout2;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.(tar(\.gz|\.bz2|\.xz|\.lzma)?|tgz)$/ ) {
|
elsif ($path =~ /\.(tar(\.gz|\.bz2|\.xz|\.lzma)?|tgz)$/ ) {
|
||||||
my ($stdout, $stderr);
|
$res = `tar tvfa '$path'`;
|
||||||
run3(['tar', 'tvfa', $path], \undef, \$stdout, \$stderr);
|
error($c, "`tar' error: $?") if $? != 0;
|
||||||
error($c, "`tar' error: $stderr") if $? != 0;
|
|
||||||
$res = $stdout;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.(zip|jar)$/ ) {
|
elsif ($path =~ /\.(zip|jar)$/ ) {
|
||||||
my ($stdout, $stderr);
|
$res = `unzip -v '$path'`;
|
||||||
run3(['unzip', '-v', $path], \undef, \$stdout, \$stderr);
|
error($c, "`unzip' error: $?") if $? != 0;
|
||||||
error($c, "`unzip' error: $stderr") if $? != 0;
|
|
||||||
$res = $stdout;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
elsif ($path =~ /\.iso$/ ) {
|
elsif ($path =~ /\.iso$/ ) {
|
||||||
# Run first isoinfo command
|
$res = `isoinfo -d -i '$path' && isoinfo -l -R -i '$path'`;
|
||||||
my ($stdout1, $stderr1);
|
error($c, "`isoinfo' error: $?") if $? != 0;
|
||||||
run3(['isoinfo', '-d', '-i', $path], \undef, \$stdout1, \$stderr1);
|
|
||||||
error($c, "`isoinfo' error: $stderr1") if $? != 0;
|
|
||||||
$res = $stdout1;
|
|
||||||
|
|
||||||
# Run second isoinfo command
|
|
||||||
my ($stdout2, $stderr2);
|
|
||||||
run3(['isoinfo', '-l', '-R', '-i', $path], \undef, \$stdout2, \$stderr2);
|
|
||||||
error($c, "`isoinfo' error: $stderr2") if $? != 0;
|
|
||||||
$res .= $stdout2;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
@@ -364,21 +364,6 @@ sub evals_GET {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
sub errors :Chained('jobsetChain') :PathPart('errors') :Args(0) :ActionClass('REST') { }
|
|
||||||
|
|
||||||
sub errors_GET {
|
|
||||||
my ($self, $c) = @_;
|
|
||||||
|
|
||||||
$c->stash->{template} = 'eval-error.tt';
|
|
||||||
|
|
||||||
my $jobsetName = $c->stash->{params}->{name};
|
|
||||||
$c->stash->{jobset} = $c->stash->{project}->jobsets->find(
|
|
||||||
{ name => $jobsetName },
|
|
||||||
{ '+columns' => { 'errormsg' => 'errormsg' } }
|
|
||||||
);
|
|
||||||
|
|
||||||
$self->status_ok($c, entity => $c->stash->{jobset});
|
|
||||||
}
|
|
||||||
|
|
||||||
# Redirect to the latest finished evaluation of this jobset.
|
# Redirect to the latest finished evaluation of this jobset.
|
||||||
sub latest_eval : Chained('jobsetChain') PathPart('latest-eval') {
|
sub latest_eval : Chained('jobsetChain') PathPart('latest-eval') {
|
||||||
|
@@ -76,9 +76,7 @@ sub view_GET {
|
|||||||
$c->stash->{removed} = $diff->{removed};
|
$c->stash->{removed} = $diff->{removed};
|
||||||
$c->stash->{unfinished} = $diff->{unfinished};
|
$c->stash->{unfinished} = $diff->{unfinished};
|
||||||
$c->stash->{aborted} = $diff->{aborted};
|
$c->stash->{aborted} = $diff->{aborted};
|
||||||
$c->stash->{totalAborted} = $diff->{totalAborted};
|
$c->stash->{failed} = $diff->{failed};
|
||||||
$c->stash->{totalFailed} = $diff->{totalFailed};
|
|
||||||
$c->stash->{totalQueued} = $diff->{totalQueued};
|
|
||||||
|
|
||||||
$c->stash->{full} = ($c->req->params->{full} || "0") eq "1";
|
$c->stash->{full} = ($c->req->params->{full} || "0") eq "1";
|
||||||
|
|
||||||
@@ -88,17 +86,6 @@ sub view_GET {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
sub errors :Chained('evalChain') :PathPart('errors') :Args(0) :ActionClass('REST') { }
|
|
||||||
|
|
||||||
sub errors_GET {
|
|
||||||
my ($self, $c) = @_;
|
|
||||||
|
|
||||||
$c->stash->{template} = 'eval-error.tt';
|
|
||||||
|
|
||||||
$c->stash->{eval} = $c->model('DB::JobsetEvals')->find($c->stash->{eval}->id, { prefetch => 'evaluationerror' });
|
|
||||||
|
|
||||||
$self->status_ok($c, entity => $c->stash->{eval});
|
|
||||||
}
|
|
||||||
|
|
||||||
sub create_jobset : Chained('evalChain') PathPart('create-jobset') Args(0) {
|
sub create_jobset : Chained('evalChain') PathPart('create-jobset') Args(0) {
|
||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
|
@@ -9,12 +9,9 @@ use Hydra::Helper::CatalystUtils;
|
|||||||
use Hydra::View::TT;
|
use Hydra::View::TT;
|
||||||
use Nix::Store;
|
use Nix::Store;
|
||||||
use Nix::Config;
|
use Nix::Config;
|
||||||
use Number::Bytes::Human qw(format_bytes);
|
|
||||||
use Encode;
|
use Encode;
|
||||||
use File::Basename;
|
use File::Basename;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use HTML::Entities;
|
|
||||||
use IPC::Run3;
|
|
||||||
use List::Util qw[min max];
|
use List::Util qw[min max];
|
||||||
use List::SomeUtils qw{any};
|
use List::SomeUtils qw{any};
|
||||||
use Net::Prometheus;
|
use Net::Prometheus;
|
||||||
@@ -60,7 +57,6 @@ sub begin :Private {
|
|||||||
$c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : "";
|
$c->stash->{tracker} = defined $c->config->{tracker} ? $c->config->{tracker} : "";
|
||||||
$c->stash->{flashMsg} = $c->flash->{flashMsg};
|
$c->stash->{flashMsg} = $c->flash->{flashMsg};
|
||||||
$c->stash->{successMsg} = $c->flash->{successMsg};
|
$c->stash->{successMsg} = $c->flash->{successMsg};
|
||||||
$c->stash->{localStore} = isLocalStore;
|
|
||||||
|
|
||||||
$c->stash->{isPrivateHydra} = $c->config->{private} // "0" ne "0";
|
$c->stash->{isPrivateHydra} = $c->config->{private} // "0" ne "0";
|
||||||
|
|
||||||
@@ -166,7 +162,7 @@ sub status_GET {
|
|||||||
{ "buildsteps.busy" => { '!=', 0 } },
|
{ "buildsteps.busy" => { '!=', 0 } },
|
||||||
{ order_by => ["globalpriority DESC", "id"],
|
{ order_by => ["globalpriority DESC", "id"],
|
||||||
join => "buildsteps",
|
join => "buildsteps",
|
||||||
columns => [@buildListColumns, 'buildsteps.drvpath', 'buildsteps.type']
|
columns => [@buildListColumns]
|
||||||
})]
|
})]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -178,14 +174,8 @@ sub queue_runner_status_GET {
|
|||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
|
|
||||||
#my $status = from_json($c->model('DB::SystemStatus')->find('queue-runner')->status);
|
#my $status = from_json($c->model('DB::SystemStatus')->find('queue-runner')->status);
|
||||||
my ($stdout, $stderr);
|
my $status = decode_json(`hydra-queue-runner --status`);
|
||||||
run3(['hydra-queue-runner', '--status'], \undef, \$stdout, \$stderr);
|
if ($?) { $status->{status} = "unknown"; }
|
||||||
my $status;
|
|
||||||
if ($? != 0) {
|
|
||||||
$status = { status => "unknown" };
|
|
||||||
} else {
|
|
||||||
$status = decode_json($stdout);
|
|
||||||
}
|
|
||||||
my $json = JSON->new->pretty()->canonical();
|
my $json = JSON->new->pretty()->canonical();
|
||||||
|
|
||||||
$c->stash->{template} = 'queue-runner-status.tt';
|
$c->stash->{template} = 'queue-runner-status.tt';
|
||||||
@@ -198,10 +188,8 @@ sub machines :Local Args(0) {
|
|||||||
my ($self, $c) = @_;
|
my ($self, $c) = @_;
|
||||||
my $machines = getMachines;
|
my $machines = getMachines;
|
||||||
|
|
||||||
# Add entry for localhost. The implicit addition is not needed with queue runner v2
|
# Add entry for localhost.
|
||||||
if (not $c->config->{'queue_runner_endpoint'}) {
|
$machines->{''} //= {};
|
||||||
$machines->{''} //= {};
|
|
||||||
}
|
|
||||||
delete $machines->{'localhost'};
|
delete $machines->{'localhost'};
|
||||||
|
|
||||||
my $status = $c->model('DB::SystemStatus')->find("queue-runner");
|
my $status = $c->model('DB::SystemStatus')->find("queue-runner");
|
||||||
@@ -209,11 +197,9 @@ sub machines :Local Args(0) {
|
|||||||
my $ms = decode_json($status->status)->{"machines"};
|
my $ms = decode_json($status->status)->{"machines"};
|
||||||
foreach my $name (keys %{$ms}) {
|
foreach my $name (keys %{$ms}) {
|
||||||
$name = "" if $name eq "localhost";
|
$name = "" if $name eq "localhost";
|
||||||
my $outName = $name;
|
$machines->{$name} //= {disabled => 1};
|
||||||
$outName = "" if $name eq "ssh://localhost";
|
$machines->{$name}->{nrStepsDone} = $ms->{$name}->{nrStepsDone};
|
||||||
$machines->{$outName} //= {disabled => 1};
|
$machines->{$name}->{avgStepBuildTime} = $ms->{$name}->{avgStepBuildTime} // 0;
|
||||||
$machines->{$outName}->{nrStepsDone} = $ms->{$name}->{nrStepsDone};
|
|
||||||
$machines->{$outName}->{avgStepBuildTime} = $ms->{$name}->{avgStepBuildTime} // 0;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,19 +212,6 @@ sub machines :Local Args(0) {
|
|||||||
"where busy != 0 order by machine, stepnr",
|
"where busy != 0 order by machine, stepnr",
|
||||||
{ Slice => {} });
|
{ Slice => {} });
|
||||||
$c->stash->{template} = 'machine-status.tt';
|
$c->stash->{template} = 'machine-status.tt';
|
||||||
$c->stash->{human_bytes} = sub {
|
|
||||||
my ($bytes) = @_;
|
|
||||||
return format_bytes($bytes, si => 1);
|
|
||||||
};
|
|
||||||
$c->stash->{pretty_load} = sub {
|
|
||||||
my ($load) = @_;
|
|
||||||
return sprintf('%.2f', $load);
|
|
||||||
};
|
|
||||||
$c->stash->{pretty_percent} = sub {
|
|
||||||
my ($percent) = @_;
|
|
||||||
my $ret = sprintf('%.2f', $percent);
|
|
||||||
return (' ' x (6 - length($ret))) . encode_entities($ret);
|
|
||||||
};
|
|
||||||
$self->status_ok($c, entity => $c->stash->{machines});
|
$self->status_ok($c, entity => $c->stash->{machines});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -32,26 +32,12 @@ sub buildDiff {
|
|||||||
removed => [],
|
removed => [],
|
||||||
unfinished => [],
|
unfinished => [],
|
||||||
aborted => [],
|
aborted => [],
|
||||||
|
failed => [],
|
||||||
# These summary counters cut across the categories to determine whether
|
|
||||||
# actions such as "Restart all failed" or "Bump queue" are available.
|
|
||||||
totalAborted => 0,
|
|
||||||
totalFailed => 0,
|
|
||||||
totalQueued => 0,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
my $n = 0;
|
my $n = 0;
|
||||||
foreach my $build (@{$builds}) {
|
foreach my $build (@{$builds}) {
|
||||||
my $aborted = $build->finished != 0 && (
|
my $aborted = $build->finished != 0 && ($build->buildstatus == 3 || $build->buildstatus == 4);
|
||||||
# aborted
|
|
||||||
$build->buildstatus == 3
|
|
||||||
# cancelled
|
|
||||||
|| $build->buildstatus == 4
|
|
||||||
# timeout
|
|
||||||
|| $build->buildstatus == 7
|
|
||||||
# log limit exceeded
|
|
||||||
|| $build->buildstatus == 10
|
|
||||||
);
|
|
||||||
my $d;
|
my $d;
|
||||||
my $found = 0;
|
my $found = 0;
|
||||||
while ($n < scalar(@{$builds2})) {
|
while ($n < scalar(@{$builds2})) {
|
||||||
@@ -85,15 +71,8 @@ sub buildDiff {
|
|||||||
} else {
|
} else {
|
||||||
push @{$ret->{new}}, $build if !$found;
|
push @{$ret->{new}}, $build if !$found;
|
||||||
}
|
}
|
||||||
|
if (defined $build->buildstatus && $build->buildstatus != 0) {
|
||||||
if ($build->finished != 0 && $build->buildstatus != 0) {
|
push @{$ret->{failed}}, $build;
|
||||||
if ($aborted) {
|
|
||||||
++$ret->{totalAborted};
|
|
||||||
} else {
|
|
||||||
++$ret->{totalFailed};
|
|
||||||
}
|
|
||||||
} elsif ($build->finished == 0) {
|
|
||||||
++$ret->{totalQueued};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -12,14 +12,10 @@ use Nix::Store;
|
|||||||
use Encode;
|
use Encode;
|
||||||
use Sys::Hostname::Long;
|
use Sys::Hostname::Long;
|
||||||
use IPC::Run;
|
use IPC::Run;
|
||||||
use IPC::Run3;
|
|
||||||
use LWP::UserAgent;
|
|
||||||
use JSON::MaybeXS;
|
|
||||||
use UUID4::Tiny qw(is_uuid4_string);
|
use UUID4::Tiny qw(is_uuid4_string);
|
||||||
|
|
||||||
our @ISA = qw(Exporter);
|
our @ISA = qw(Exporter);
|
||||||
our @EXPORT = qw(
|
our @EXPORT = qw(
|
||||||
addToStore
|
|
||||||
cancelBuilds
|
cancelBuilds
|
||||||
constructRunCommandLogPath
|
constructRunCommandLogPath
|
||||||
findLog
|
findLog
|
||||||
@@ -40,7 +36,6 @@ our @EXPORT = qw(
|
|||||||
jobsetOverview
|
jobsetOverview
|
||||||
jobsetOverview_
|
jobsetOverview_
|
||||||
pathIsInsidePrefix
|
pathIsInsidePrefix
|
||||||
readIntoSocket
|
|
||||||
readNixFile
|
readNixFile
|
||||||
registerRoot
|
registerRoot
|
||||||
restartBuilds
|
restartBuilds
|
||||||
@@ -301,7 +296,8 @@ sub getEvals {
|
|||||||
|
|
||||||
my @evals = $evals_result_set->search(
|
my @evals = $evals_result_set->search(
|
||||||
{ hasnewbuilds => 1 },
|
{ hasnewbuilds => 1 },
|
||||||
{ order_by => "$me.id DESC", rows => $rows, offset => $offset });
|
{ order_by => "$me.id DESC", rows => $rows, offset => $offset
|
||||||
|
, prefetch => { evaluationerror => [ ] } });
|
||||||
my @res = ();
|
my @res = ();
|
||||||
my $cache = {};
|
my $cache = {};
|
||||||
|
|
||||||
@@ -344,68 +340,37 @@ sub getEvals {
|
|||||||
|
|
||||||
sub getMachines {
|
sub getMachines {
|
||||||
my %machines = ();
|
my %machines = ();
|
||||||
my $config = getHydraConfig();
|
|
||||||
|
|
||||||
if ($config->{'queue_runner_endpoint'}) {
|
my @machinesFiles = split /:/, ($ENV{"NIX_REMOTE_SYSTEMS"} || "/etc/nix/machines");
|
||||||
my $ua = LWP::UserAgent->new();
|
|
||||||
my $resp = $ua->get($config->{'queue_runner_endpoint'} . "/status/machines");
|
|
||||||
if (not $resp->is_success) {
|
|
||||||
print STDERR "Unable to ask queue runner for machines\n";
|
|
||||||
return \%machines;
|
|
||||||
}
|
|
||||||
|
|
||||||
my $data = decode_json($resp->decoded_content) or return \%machines;
|
for my $machinesFile (@machinesFiles) {
|
||||||
my $machinesData = $data->{machines};
|
next unless -e $machinesFile;
|
||||||
|
open(my $conf, "<", $machinesFile) or die;
|
||||||
|
while (my $line = <$conf>) {
|
||||||
|
chomp($line);
|
||||||
|
$line =~ s/\#.*$//g;
|
||||||
|
next if $line =~ /^\s*$/;
|
||||||
|
my @tokens = split /\s+/, $line;
|
||||||
|
|
||||||
foreach my $machineName (keys %$machinesData) {
|
if (!defined($tokens[5]) || $tokens[5] eq "-") {
|
||||||
my $machine = %$machinesData{$machineName};
|
$tokens[5] = "";
|
||||||
$machines{$machineName} =
|
}
|
||||||
{ systemTypes => $machine->{systems}
|
my @supportedFeatures = split(/,/, $tokens[5] || "");
|
||||||
, maxJobs => $machine->{maxJobs}
|
|
||||||
, speedFactor => $machine->{speedFactor}
|
if (!defined($tokens[6]) || $tokens[6] eq "-") {
|
||||||
, supportedFeatures => [ @{$machine->{supportedFeatures}}, @{$machine->{mandatoryFeatures}} ]
|
$tokens[6] = "";
|
||||||
, mandatoryFeatures => [ @{$machine->{mandatoryFeatures}} ]
|
}
|
||||||
# New fields for the machine status
|
my @mandatoryFeatures = split(/,/, $tokens[6] || "");
|
||||||
, primarySystemType => $machine->{systems}[0]
|
$machines{$tokens[0]} =
|
||||||
, hasCapacity => $machine->{hasCapacity}
|
{ systemTypes => [ split(/,/, $tokens[1]) ]
|
||||||
, hasDynamicCapacity => $machine->{hasDynamicCapacity}
|
, sshKeys => $tokens[2]
|
||||||
, hasStaticCapacity => $machine->{hasStaticCapacity}
|
, maxJobs => int($tokens[3])
|
||||||
, score => $machine->{score}
|
, speedFactor => 1.0 * (defined $tokens[4] ? int($tokens[4]) : 1)
|
||||||
, stats => $machine->{stats}
|
, supportedFeatures => [ @supportedFeatures, @mandatoryFeatures ]
|
||||||
, memTotal => $machine->{totalMem}
|
, mandatoryFeatures => [ @mandatoryFeatures ]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} else {
|
close $conf;
|
||||||
my @machinesFiles = split /:/, ($ENV{"NIX_REMOTE_SYSTEMS"} || "/etc/nix/machines");
|
|
||||||
|
|
||||||
for my $machinesFile (@machinesFiles) {
|
|
||||||
next unless -e $machinesFile;
|
|
||||||
open(my $conf, "<", $machinesFile) or die;
|
|
||||||
while (my $line = <$conf>) {
|
|
||||||
chomp($line);
|
|
||||||
$line =~ s/\#.*$//g;
|
|
||||||
next if $line =~ /^\s*$/;
|
|
||||||
my @tokens = split /\s+/, $line;
|
|
||||||
|
|
||||||
if (!defined($tokens[5]) || $tokens[5] eq "-") {
|
|
||||||
$tokens[5] = "";
|
|
||||||
}
|
|
||||||
my @supportedFeatures = split(/,/, $tokens[5] || "");
|
|
||||||
|
|
||||||
if (!defined($tokens[6]) || $tokens[6] eq "-") {
|
|
||||||
$tokens[6] = "";
|
|
||||||
}
|
|
||||||
my @mandatoryFeatures = split(/,/, $tokens[6] || "");
|
|
||||||
$machines{$tokens[0]} =
|
|
||||||
{ systemTypes => [ split(/,/, $tokens[1]) ]
|
|
||||||
, maxJobs => int($tokens[3])
|
|
||||||
, speedFactor => 1.0 * (defined $tokens[4] ? int($tokens[4]) : 1)
|
|
||||||
, supportedFeatures => [ @supportedFeatures, @mandatoryFeatures ]
|
|
||||||
, mandatoryFeatures => [ @mandatoryFeatures ]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
close $conf;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return \%machines;
|
return \%machines;
|
||||||
@@ -452,16 +417,6 @@ sub pathIsInsidePrefix {
|
|||||||
return $cur;
|
return $cur;
|
||||||
}
|
}
|
||||||
|
|
||||||
sub readIntoSocket{
|
|
||||||
my (%args) = @_;
|
|
||||||
my $sock;
|
|
||||||
|
|
||||||
eval {
|
|
||||||
open($sock, "-|", @{$args{cmd}}) or die q(failed to open socket from command:\n $x);
|
|
||||||
};
|
|
||||||
|
|
||||||
return $sock;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -616,14 +571,4 @@ sub constructRunCommandLogPath {
|
|||||||
return "$hydra_path/runcommand-logs/$bucket/$uuid";
|
return "$hydra_path/runcommand-logs/$bucket/$uuid";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
sub addToStore {
|
|
||||||
my ($path) = @_;
|
|
||||||
|
|
||||||
my ($stdout, $stderr);
|
|
||||||
run3(['nix-store', '--add', $path], \undef, \$stdout, \$stderr);
|
|
||||||
die "cannot add path $path to the Nix store: $stderr\n" if $? != 0;
|
|
||||||
return trim($stdout);
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
@@ -7,10 +7,8 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use Hydra::Helper::Nix;
|
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -49,8 +47,10 @@ sub fetchInput {
|
|||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%pulls;
|
print $fh encode_json \%pulls;
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/bitbucket-pulls-sorted.json") or die "jq command failed: $?";
|
system("jq -S . < $filename > $tempdir/bitbucket-pulls-sorted.json");
|
||||||
my $storePath = addToStore("$tempdir/bitbucket-pulls-sorted.json");
|
my $storePath = trim(`nix-store --add "$tempdir/bitbucket-pulls-sorted.json"`
|
||||||
|
or die "cannot copy path $filename to the Nix store.\n");
|
||||||
|
chomp $storePath;
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,6 @@ use Digest::SHA qw(sha256_hex);
|
|||||||
use File::Path;
|
use File::Path;
|
||||||
use Hydra::Helper::Exec;
|
use Hydra::Helper::Exec;
|
||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
use IPC::Run3;
|
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -71,11 +70,8 @@ sub fetchInput {
|
|||||||
(system "darcs", "get", "--lazy", $clonePath, "$tmpDir/export", "--quiet",
|
(system "darcs", "get", "--lazy", $clonePath, "$tmpDir/export", "--quiet",
|
||||||
"--to-match", "hash $revision") == 0
|
"--to-match", "hash $revision") == 0
|
||||||
or die "darcs export failed";
|
or die "darcs export failed";
|
||||||
my ($stdout, $stderr);
|
$revCount = `darcs changes --count --repodir $tmpDir/export`; chomp $revCount;
|
||||||
run3(['darcs', 'changes', '--count', '--repodir', "$tmpDir/export"], \undef, \$stdout, \$stderr);
|
die "darcs changes --count failed" if $? != 0;
|
||||||
die "darcs changes --count failed: $stderr\n" if $? != 0;
|
|
||||||
$revCount = $stdout;
|
|
||||||
chomp $revCount;
|
|
||||||
|
|
||||||
system "rm", "-rf", "$tmpDir/export/_darcs";
|
system "rm", "-rf", "$tmpDir/export/_darcs";
|
||||||
$storePath = $MACHINE_LOCAL_STORE->addToStore("$tmpDir/export", 1, "sha256");
|
$storePath = $MACHINE_LOCAL_STORE->addToStore("$tmpDir/export", 1, "sha256");
|
||||||
|
@@ -16,7 +16,6 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use Hydra::Helper::Nix;
|
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
@@ -27,18 +26,19 @@ sub supportedInputTypes {
|
|||||||
|
|
||||||
sub _iterate {
|
sub _iterate {
|
||||||
my ($url, $auth, $pulls, $ua) = @_;
|
my ($url, $auth, $pulls, $ua) = @_;
|
||||||
|
|
||||||
my $req = HTTP::Request->new('GET', $url);
|
my $req = HTTP::Request->new('GET', $url);
|
||||||
$req->header('Accept' => 'application/json');
|
|
||||||
$req->header('Authorization' => 'token ' . $auth) if defined $auth;
|
$req->header('Authorization' => 'token ' . $auth) if defined $auth;
|
||||||
|
|
||||||
my $res = $ua->request($req);
|
my $res = $ua->request($req);
|
||||||
my $content = $res->decoded_content;
|
my $content = $res->decoded_content;
|
||||||
die "Error pulling from the gitea pulls API: $content\n"
|
die "Error pulling from the gitea pulls API: $content\n"
|
||||||
unless $res->is_success;
|
unless $res->is_success;
|
||||||
|
|
||||||
my $pulls_list = decode_json $content;
|
my $pulls_list = decode_json $content;
|
||||||
# TODO Stream out the json instead
|
|
||||||
foreach my $pull (@$pulls_list) {
|
foreach my $pull (@$pulls_list) {
|
||||||
$pulls->{$pull->{number}} = $pull;
|
$pulls->{$pull->{number}} = $pull;
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO Make Link header parsing more robust!!!
|
# TODO Make Link header parsing more robust!!!
|
||||||
@@ -71,10 +71,12 @@ sub fetchInput {
|
|||||||
my $tempdir = File::Temp->newdir("gitea-pulls" . "XXXXX", TMPDIR => 1);
|
my $tempdir = File::Temp->newdir("gitea-pulls" . "XXXXX", TMPDIR => 1);
|
||||||
my $filename = "$tempdir/gitea-pulls.json";
|
my $filename = "$tempdir/gitea-pulls.json";
|
||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh JSON->new->utf8->canonical->encode(\%pulls);
|
print $fh encode_json \%pulls;
|
||||||
close $fh;
|
close $fh;
|
||||||
|
|
||||||
my $storePath = addToStore($filename);
|
my $storePath = trim(`nix-store --add "$filename"`
|
||||||
|
or die "cannot copy path $filename to the Nix store.\n");
|
||||||
|
chomp $storePath;
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -7,10 +7,8 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use Hydra::Helper::Nix;
|
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
=head1 NAME
|
=head1 NAME
|
||||||
|
|
||||||
@@ -120,8 +118,10 @@ sub fetchInput {
|
|||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%refs;
|
print $fh encode_json \%refs;
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/gitea-refs-sorted.json") or die "jq command failed: $?";
|
system("jq -S . < $filename > $tempdir/gitea-refs-sorted.json");
|
||||||
my $storePath = addToStore("$tempdir/gitea-refs-sorted.json");
|
my $storePath = trim(qx{nix-store --add "$tempdir/gitea-refs-sorted.json"}
|
||||||
|
or die "cannot copy path $filename to the Nix store.\n");
|
||||||
|
chomp $storePath;
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -7,7 +7,6 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use Hydra::Helper::Nix;
|
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
|
|
||||||
@@ -59,7 +58,9 @@ sub fetchInput {
|
|||||||
print $fh JSON->new->utf8->canonical->encode(\%pulls);
|
print $fh JSON->new->utf8->canonical->encode(\%pulls);
|
||||||
close $fh;
|
close $fh;
|
||||||
|
|
||||||
my $storePath = addToStore($filename);
|
my $storePath = trim(`nix-store --add "$filename"`
|
||||||
|
or die "cannot copy path $filename to the Nix store.\n");
|
||||||
|
chomp $storePath;
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -7,10 +7,8 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use Hydra::Helper::Nix;
|
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
=head1 NAME
|
=head1 NAME
|
||||||
|
|
||||||
@@ -116,8 +114,10 @@ sub fetchInput {
|
|||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%refs;
|
print $fh encode_json \%refs;
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/github-refs-sorted.json") or die "jq command failed: $?";
|
system("jq -S . < $filename > $tempdir/github-refs-sorted.json");
|
||||||
my $storePath = addToStore("$tempdir/github-refs-sorted.json");
|
my $storePath = trim(qx{nix-store --add "$tempdir/github-refs-sorted.json"}
|
||||||
|
or die "cannot copy path $filename to the Nix store.\n");
|
||||||
|
chomp $storePath;
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -21,10 +21,8 @@ use HTTP::Request;
|
|||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Hydra::Helper::CatalystUtils;
|
use Hydra::Helper::CatalystUtils;
|
||||||
use Hydra::Helper::Nix;
|
|
||||||
use File::Temp;
|
use File::Temp;
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use IPC::Run qw(run);
|
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -87,8 +85,10 @@ sub fetchInput {
|
|||||||
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
open(my $fh, ">", $filename) or die "Cannot open $filename for writing: $!";
|
||||||
print $fh encode_json \%pulls;
|
print $fh encode_json \%pulls;
|
||||||
close $fh;
|
close $fh;
|
||||||
run(["jq", "-S", "."], '<', $filename, '>', "$tempdir/gitlab-pulls-sorted.json") or die "jq command failed: $?";
|
system("jq -S . < $filename > $tempdir/gitlab-pulls-sorted.json");
|
||||||
my $storePath = addToStore("$tempdir/gitlab-pulls-sorted.json");
|
my $storePath = trim(`nix-store --add "$tempdir/gitlab-pulls-sorted.json"`
|
||||||
|
or die "cannot copy path $filename to the Nix store.\n");
|
||||||
|
chomp $storePath;
|
||||||
my $timestamp = time;
|
my $timestamp = time;
|
||||||
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
return { storePath => $storePath, revision => strftime "%Y%m%d%H%M%S", gmtime($timestamp) };
|
||||||
}
|
}
|
||||||
|
@@ -5,7 +5,6 @@ use warnings;
|
|||||||
use parent 'Hydra::Plugin';
|
use parent 'Hydra::Plugin';
|
||||||
use POSIX qw(strftime);
|
use POSIX qw(strftime);
|
||||||
use Hydra::Helper::Nix;
|
use Hydra::Helper::Nix;
|
||||||
use IPC::Run3;
|
|
||||||
|
|
||||||
sub supportedInputTypes {
|
sub supportedInputTypes {
|
||||||
my ($self, $inputTypes) = @_;
|
my ($self, $inputTypes) = @_;
|
||||||
@@ -38,16 +37,11 @@ sub fetchInput {
|
|||||||
|
|
||||||
print STDERR "copying input ", $name, " from $uri\n";
|
print STDERR "copying input ", $name, " from $uri\n";
|
||||||
if ( $uri =~ /^\// ) {
|
if ( $uri =~ /^\// ) {
|
||||||
$storePath = addToStore($uri);
|
$storePath = `nix-store --add "$uri"`
|
||||||
|
or die "cannot copy path $uri to the Nix store.\n";
|
||||||
} else {
|
} else {
|
||||||
# Run nix-prefetch-url with PRINT_PATH=1
|
$storePath = `PRINT_PATH=1 nix-prefetch-url "$uri" | tail -n 1`
|
||||||
my ($stdout, $stderr);
|
or die "cannot fetch $uri to the Nix store.\n";
|
||||||
local $ENV{PRINT_PATH} = 1;
|
|
||||||
run3(['nix-prefetch-url', $uri], \undef, \$stdout, \$stderr);
|
|
||||||
die "cannot fetch $uri to the Nix store: $stderr\n" if $? != 0;
|
|
||||||
# Get the last line (which is the store path)
|
|
||||||
my @output_lines = split /\n/, $stdout;
|
|
||||||
$storePath = $output_lines[-1] if @output_lines;
|
|
||||||
}
|
}
|
||||||
chomp $storePath;
|
chomp $storePath;
|
||||||
|
|
||||||
|
@@ -7,8 +7,6 @@ use File::Temp;
|
|||||||
use File::Basename;
|
use File::Basename;
|
||||||
use Fcntl;
|
use Fcntl;
|
||||||
use IO::File;
|
use IO::File;
|
||||||
use IPC::Run qw(run);
|
|
||||||
use IPC::Run3;
|
|
||||||
use Net::Amazon::S3;
|
use Net::Amazon::S3;
|
||||||
use Net::Amazon::S3::Client;
|
use Net::Amazon::S3::Client;
|
||||||
use Digest::SHA;
|
use Digest::SHA;
|
||||||
@@ -29,11 +27,11 @@ my %compressors = ();
|
|||||||
$compressors{"none"} = "";
|
$compressors{"none"} = "";
|
||||||
|
|
||||||
if (defined($Nix::Config::bzip2)) {
|
if (defined($Nix::Config::bzip2)) {
|
||||||
$compressors{"bzip2"} = "$Nix::Config::bzip2",
|
$compressors{"bzip2"} = "| $Nix::Config::bzip2",
|
||||||
}
|
}
|
||||||
|
|
||||||
if (defined($Nix::Config::xz)) {
|
if (defined($Nix::Config::xz)) {
|
||||||
$compressors{"xz"} = "$Nix::Config::xz",
|
$compressors{"xz"} = "| $Nix::Config::xz",
|
||||||
}
|
}
|
||||||
|
|
||||||
my $lockfile = Hydra::Model::DB::getHydraPath . "/.hydra-s3backup.lock";
|
my $lockfile = Hydra::Model::DB::getHydraPath . "/.hydra-s3backup.lock";
|
||||||
@@ -113,16 +111,7 @@ sub buildFinished {
|
|||||||
}
|
}
|
||||||
next unless @incomplete_buckets;
|
next unless @incomplete_buckets;
|
||||||
my $compressor = $compressors{$compression_type};
|
my $compressor = $compressors{$compression_type};
|
||||||
if ($compressor eq "") {
|
system("$Nix::Config::binDir/nix-store --dump $path $compressor > $tempdir/nar") == 0 or die;
|
||||||
# No compression - use IPC::Run3 to redirect stdout to file
|
|
||||||
run3(["$Nix::Config::binDir/nix-store", "--dump", $path],
|
|
||||||
\undef, "$tempdir/nar", \undef) or die "nix-store --dump failed: $!";
|
|
||||||
} else {
|
|
||||||
# With compression - use IPC::Run to pipe nix-store output to compressor
|
|
||||||
my $dump_cmd = ["$Nix::Config::binDir/nix-store", "--dump", $path];
|
|
||||||
my $compress_cmd = [$compressor];
|
|
||||||
run($dump_cmd, '|', $compress_cmd, '>', "$tempdir/nar") or die "Pipeline failed: $?";
|
|
||||||
}
|
|
||||||
my $digest = Digest::SHA->new(256);
|
my $digest = Digest::SHA->new(256);
|
||||||
$digest->addfile("$tempdir/nar");
|
$digest->addfile("$tempdir/nar");
|
||||||
my $file_hash = $digest->hexdigest;
|
my $file_hash = $digest->hexdigest;
|
||||||
|
@@ -105,6 +105,4 @@ __PACKAGE__->add_column(
|
|||||||
"+id" => { retrieve_on_insert => 1 }
|
"+id" => { retrieve_on_insert => 1 }
|
||||||
);
|
);
|
||||||
|
|
||||||
__PACKAGE__->mk_group_accessors('column' => 'has_error');
|
|
||||||
|
|
||||||
1;
|
1;
|
||||||
|
@@ -386,8 +386,6 @@ __PACKAGE__->add_column(
|
|||||||
"+id" => { retrieve_on_insert => 1 }
|
"+id" => { retrieve_on_insert => 1 }
|
||||||
);
|
);
|
||||||
|
|
||||||
__PACKAGE__->mk_group_accessors('column' => 'has_error');
|
|
||||||
|
|
||||||
sub supportsDynamicRunCommand {
|
sub supportsDynamicRunCommand {
|
||||||
my ($self) = @_;
|
my ($self) = @_;
|
||||||
|
|
||||||
|
@@ -1,30 +0,0 @@
|
|||||||
package Hydra::Schema::ResultSet::EvaluationErrors;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use utf8;
|
|
||||||
use warnings;
|
|
||||||
|
|
||||||
use parent 'DBIx::Class::ResultSet';
|
|
||||||
|
|
||||||
use Storable qw(dclone);
|
|
||||||
|
|
||||||
__PACKAGE__->load_components('Helper::ResultSet::RemoveColumns');
|
|
||||||
|
|
||||||
# Exclude expensive error message values unless explicitly requested, and
|
|
||||||
# replace them with a summary field describing their presence/absence.
|
|
||||||
sub search_rs {
|
|
||||||
my ( $class, $query, $attrs ) = @_;
|
|
||||||
|
|
||||||
if ($attrs) {
|
|
||||||
$attrs = dclone($attrs);
|
|
||||||
}
|
|
||||||
|
|
||||||
unless (exists $attrs->{'select'} || exists $attrs->{'columns'}) {
|
|
||||||
$attrs->{'+columns'}->{'has_error'} = "errormsg != ''";
|
|
||||||
}
|
|
||||||
unless (exists $attrs->{'+columns'}->{'errormsg'}) {
|
|
||||||
push @{ $attrs->{'remove_columns'} }, 'errormsg';
|
|
||||||
}
|
|
||||||
|
|
||||||
return $class->next::method($query, $attrs);
|
|
||||||
}
|
|
@@ -1,30 +0,0 @@
|
|||||||
package Hydra::Schema::ResultSet::Jobsets;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use utf8;
|
|
||||||
use warnings;
|
|
||||||
|
|
||||||
use parent 'DBIx::Class::ResultSet';
|
|
||||||
|
|
||||||
use Storable qw(dclone);
|
|
||||||
|
|
||||||
__PACKAGE__->load_components('Helper::ResultSet::RemoveColumns');
|
|
||||||
|
|
||||||
# Exclude expensive error message values unless explicitly requested, and
|
|
||||||
# replace them with a summary field describing their presence/absence.
|
|
||||||
sub search_rs {
|
|
||||||
my ( $class, $query, $attrs ) = @_;
|
|
||||||
|
|
||||||
if ($attrs) {
|
|
||||||
$attrs = dclone($attrs);
|
|
||||||
}
|
|
||||||
|
|
||||||
unless (exists $attrs->{'select'} || exists $attrs->{'columns'}) {
|
|
||||||
$attrs->{'+columns'}->{'has_error'} = "errormsg != ''";
|
|
||||||
}
|
|
||||||
unless (exists $attrs->{'+columns'}->{'errormsg'}) {
|
|
||||||
push @{ $attrs->{'remove_columns'} }, 'errormsg';
|
|
||||||
}
|
|
||||||
|
|
||||||
return $class->next::method($query, $attrs);
|
|
||||||
}
|
|
@@ -1,103 +0,0 @@
|
|||||||
package Perl::Critic::Policy::Hydra::ProhibitShellInvokingSystemCalls;
|
|
||||||
|
|
||||||
use strict;
|
|
||||||
use warnings;
|
|
||||||
use constant;
|
|
||||||
|
|
||||||
use Perl::Critic::Utils qw{ :severities :classification :ppi };
|
|
||||||
use base 'Perl::Critic::Policy';
|
|
||||||
|
|
||||||
our $VERSION = '1.000';
|
|
||||||
|
|
||||||
use constant DESC => q{Shell-invoking system calls are prohibited};
|
|
||||||
use constant EXPL => q{Use list form system() or IPC::Run3 for better security. String form invokes shell and is vulnerable to injection};
|
|
||||||
|
|
||||||
sub supported_parameters { return () }
|
|
||||||
sub default_severity { return $SEVERITY_HIGHEST }
|
|
||||||
sub default_themes { return qw( hydra security ) }
|
|
||||||
sub applies_to { return 'PPI::Token::Word' }
|
|
||||||
|
|
||||||
sub violates {
|
|
||||||
my ( $self, $elem, undef ) = @_;
|
|
||||||
|
|
||||||
# Only check system() and exec() calls
|
|
||||||
return () unless $elem->content() =~ /^(system|exec)$/;
|
|
||||||
return () unless is_function_call($elem);
|
|
||||||
|
|
||||||
# Skip method calls (->system or ->exec)
|
|
||||||
my $prev = $elem->sprevious_sibling();
|
|
||||||
return () if $prev && $prev->isa('PPI::Token::Operator') && $prev->content() eq '->';
|
|
||||||
|
|
||||||
# Get first argument after function name, skipping whitespace
|
|
||||||
my $args = $elem->snext_sibling();
|
|
||||||
return () unless $args;
|
|
||||||
$args = $args->snext_sibling() while $args && $args->isa('PPI::Token::Whitespace');
|
|
||||||
|
|
||||||
# For parenthesized calls, look inside
|
|
||||||
my $search_elem = $args;
|
|
||||||
if ($args && $args->isa('PPI::Structure::List')) {
|
|
||||||
$search_elem = $args->schild(0);
|
|
||||||
return () unless $search_elem;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if it's list form (has comma)
|
|
||||||
my $current = $search_elem;
|
|
||||||
if ($current && $current->isa('PPI::Statement')) {
|
|
||||||
# Look through statement children
|
|
||||||
for my $child ($current->schildren()) {
|
|
||||||
return () if $child->isa('PPI::Token::Operator') && $child->content() eq ',';
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
# Look through siblings for non-parenthesized calls
|
|
||||||
while ($current) {
|
|
||||||
return () if $current->isa('PPI::Token::Operator') && $current->content() eq ',';
|
|
||||||
last if $current->isa('PPI::Token::Structure') && $current->content() eq ';';
|
|
||||||
$current = $current->snext_sibling();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if first arg is array variable
|
|
||||||
my $first = $search_elem->isa('PPI::Statement') ?
|
|
||||||
$search_elem->schild(0) : $search_elem;
|
|
||||||
return () if $first && $first->isa('PPI::Token::Symbol') && $first->content() =~ /^[@]/;
|
|
||||||
|
|
||||||
# Check if it's a safe single-word command
|
|
||||||
if ($first && $first->isa('PPI::Token::Quote')) {
|
|
||||||
my $content = $first->string();
|
|
||||||
return () if $content =~ /^[a-zA-Z0-9_\-\.\/]+$/;
|
|
||||||
}
|
|
||||||
|
|
||||||
return $self->violation( DESC, EXPL, $elem );
|
|
||||||
}
|
|
||||||
|
|
||||||
1;
|
|
||||||
|
|
||||||
__END__
|
|
||||||
|
|
||||||
=pod
|
|
||||||
|
|
||||||
=head1 NAME
|
|
||||||
|
|
||||||
Perl::Critic::Policy::Hydra::ProhibitShellInvokingSystemCalls - Prohibit shell-invoking system() and exec() calls
|
|
||||||
|
|
||||||
=head1 DESCRIPTION
|
|
||||||
|
|
||||||
This policy prohibits the use of C<system()> and C<exec()> functions when called with a single string argument,
|
|
||||||
which invokes the shell and is vulnerable to injection attacks.
|
|
||||||
|
|
||||||
The list form (e.g., C<system('ls', '-la')>) is allowed as it executes directly without shell interpretation.
|
|
||||||
For better error handling and output capture, consider using C<IPC::Run3>.
|
|
||||||
|
|
||||||
=head1 CONFIGURATION
|
|
||||||
|
|
||||||
This Policy is not configurable except for the standard options.
|
|
||||||
|
|
||||||
=head1 AUTHOR
|
|
||||||
|
|
||||||
Hydra Development Team
|
|
||||||
|
|
||||||
=head1 COPYRIGHT
|
|
||||||
|
|
||||||
Copyright (c) 2025 Hydra Development Team. All rights reserved.
|
|
||||||
|
|
||||||
=cut
|
|
@@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
#include <pqxx/pqxx>
|
#include <pqxx/pqxx>
|
||||||
|
|
||||||
#include <nix/util/environment-variables.hh>
|
#include "environment-variables.hh"
|
||||||
#include <nix/util/util.hh>
|
#include "util.hh"
|
||||||
|
|
||||||
|
|
||||||
struct Connection : pqxx::connection
|
struct Connection : pqxx::connection
|
||||||
@@ -27,20 +27,19 @@ struct Connection : pqxx::connection
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
class receiver
|
class receiver : public pqxx::notification_receiver
|
||||||
{
|
{
|
||||||
std::optional<std::string> status;
|
std::optional<std::string> status;
|
||||||
pqxx::connection & conn;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
receiver(pqxx::connection_base & c, const std::string & channel)
|
receiver(pqxx::connection_base & c, const std::string & channel)
|
||||||
: conn(static_cast<pqxx::connection &>(c))
|
: pqxx::notification_receiver(c, channel) { }
|
||||||
|
|
||||||
|
void operator() (const std::string & payload, int pid) override
|
||||||
{
|
{
|
||||||
conn.listen(channel, [this](pqxx::notification n) {
|
status = payload;
|
||||||
status = std::string(n.payload);
|
};
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<std::string> get() {
|
std::optional<std::string> get() {
|
||||||
auto s = status;
|
auto s = status;
|
||||||
|
@@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
|
|
||||||
#include <nix/util/file-system.hh>
|
#include "file-system.hh"
|
||||||
#include <nix/util/util.hh>
|
#include "util.hh"
|
||||||
|
|
||||||
struct HydraConfig
|
struct HydraConfig
|
||||||
{
|
{
|
||||||
|
@@ -57,12 +57,20 @@ fontawesome = custom_target(
|
|||||||
command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'],
|
command: ['unzip', '-u', '-d', '@OUTDIR@', '@INPUT@'],
|
||||||
)
|
)
|
||||||
custom_target(
|
custom_target(
|
||||||
'name-fontawesome',
|
'name-fontawesome-css',
|
||||||
input: fontawesome,
|
input: fontawesome,
|
||||||
output: 'fontawesome',
|
output: 'css',
|
||||||
command: ['cp', '-r', '@INPUT@' , '@OUTPUT@'],
|
command: ['cp', '-r', '@INPUT@/css', '@OUTPUT@'],
|
||||||
install: true,
|
install: true,
|
||||||
install_dir: hydra_libexecdir_static,
|
install_dir: hydra_libexecdir_static / 'fontawesome',
|
||||||
|
)
|
||||||
|
custom_target(
|
||||||
|
'name-fontawesome-webfonts',
|
||||||
|
input: fontawesome,
|
||||||
|
output: 'webfonts',
|
||||||
|
command: ['cp', '-r', '@INPUT@/webfonts', '@OUTPUT@'],
|
||||||
|
install: true,
|
||||||
|
install_dir: hydra_libexecdir_static / 'fontawesome',
|
||||||
)
|
)
|
||||||
|
|
||||||
# Scripts
|
# Scripts
|
||||||
|
@@ -11,7 +11,7 @@ titleHTML="Latest builds" _
|
|||||||
"") %]
|
"") %]
|
||||||
[% PROCESS common.tt %]
|
[% PROCESS common.tt %]
|
||||||
|
|
||||||
<p>Showing builds [% (page - 1) * resultsPerPage + 1 %] - [% (page - 1) * resultsPerPage + builds.size %] out of [% HTML.escape(total) %] in order of descending finish time.</p>
|
<p>Showing builds [% (page - 1) * resultsPerPage + 1 %] - [% (page - 1) * resultsPerPage + builds.size %] out of [% total %] in order of descending finish time.</p>
|
||||||
|
|
||||||
[% INCLUDE renderBuildList hideProjectName=project hideJobsetName=jobset hideJobName=job %]
|
[% INCLUDE renderBuildList hideProjectName=project hideJobsetName=jobset hideJobName=job %]
|
||||||
[% INCLUDE renderPager %]
|
[% INCLUDE renderPager %]
|
||||||
|
@@ -37,7 +37,7 @@ END;
|
|||||||
seen.${step.drvpath} = 1;
|
seen.${step.drvpath} = 1;
|
||||||
log = c.uri_for('/build' build.id 'nixlog' step.stepnr); %]
|
log = c.uri_for('/build' build.id 'nixlog' step.stepnr); %]
|
||||||
<tr>
|
<tr>
|
||||||
<td>[% HTML.escape(step.stepnr) %]</td>
|
<td>[% step.stepnr %]</td>
|
||||||
<td>
|
<td>
|
||||||
[% IF step.type == 0 %]
|
[% IF step.type == 0 %]
|
||||||
Build of <tt>[% INCLUDE renderOutputs outputs=step.buildstepoutputs %]</tt>
|
Build of <tt>[% INCLUDE renderOutputs outputs=step.buildstepoutputs %]</tt>
|
||||||
@@ -61,7 +61,21 @@ END;
|
|||||||
<td>[% IF step.busy != 0 || ((step.machine || step.starttime) && (step.status == 0 || step.status == 1 || step.status == 3 || step.status == 4 || step.status == 7)); INCLUDE renderMachineName machine=step.machine; ELSE; "<em>n/a</em>"; END %]</td>
|
<td>[% IF step.busy != 0 || ((step.machine || step.starttime) && (step.status == 0 || step.status == 1 || step.status == 3 || step.status == 4 || step.status == 7)); INCLUDE renderMachineName machine=step.machine; ELSE; "<em>n/a</em>"; END %]</td>
|
||||||
<td class="step-status">
|
<td class="step-status">
|
||||||
[% IF step.busy != 0 %]
|
[% IF step.busy != 0 %]
|
||||||
[% INCLUDE renderBusyStatus %]
|
[% IF step.busy == 1 %]
|
||||||
|
<strong>Preparing</strong>
|
||||||
|
[% ELSIF step.busy == 10 %]
|
||||||
|
<strong>Connecting</strong>
|
||||||
|
[% ELSIF step.busy == 20 %]
|
||||||
|
<strong>Sending inputs</strong>
|
||||||
|
[% ELSIF step.busy == 30 %]
|
||||||
|
<strong>Building</strong>
|
||||||
|
[% ELSIF step.busy == 40 %]
|
||||||
|
<strong>Receiving outputs</strong>
|
||||||
|
[% ELSIF step.busy == 50 %]
|
||||||
|
<strong>Post-processing</strong>
|
||||||
|
[% ELSE %]
|
||||||
|
<strong>Unknown state</strong>
|
||||||
|
[% END %]
|
||||||
[% ELSIF step.status == 0 %]
|
[% ELSIF step.status == 0 %]
|
||||||
[% IF step.isnondeterministic %]
|
[% IF step.isnondeterministic %]
|
||||||
<span class="warn">Succeeded with non-determistic result</span>
|
<span class="warn">Succeeded with non-determistic result</span>
|
||||||
@@ -86,7 +100,7 @@ END;
|
|||||||
[% ELSIF step.status == 11 %]
|
[% ELSIF step.status == 11 %]
|
||||||
<span class="error">Output limit exceeded</span>
|
<span class="error">Output limit exceeded</span>
|
||||||
[% ELSIF step.status == 12 %]
|
[% ELSIF step.status == 12 %]
|
||||||
<span class="error">Non-determinism detected</span> [% IF step.timesbuilt %] after [% HTML.escape(step.timesbuilt) %] times[% END %]
|
<span class="error">Non-determinism detected</span> [% IF step.timesbuilt %] after [% step.timesbuilt %] times[% END %]
|
||||||
[% ELSIF step.errormsg %]
|
[% ELSIF step.errormsg %]
|
||||||
<span class="error">Failed</span>: <em>[% HTML.escape(step.errormsg) %]</em>
|
<span class="error">Failed</span>: <em>[% HTML.escape(step.errormsg) %]</em>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
@@ -112,16 +126,16 @@ END;
|
|||||||
[% IF c.user_exists %]
|
[% IF c.user_exists %]
|
||||||
[% IF available %]
|
[% IF available %]
|
||||||
[% IF build.keep %]
|
[% IF build.keep %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for('/build' build.id 'keep' 0)) %]>Unkeep</a>
|
<a class="dropdown-item" href="[% c.uri_for('/build' build.id 'keep' 0) %]">Unkeep</a>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for('/build' build.id 'keep' 1)) %]>Keep</a>
|
<a class="dropdown-item" href="[% c.uri_for('/build' build.id 'keep' 1) %]">Keep</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF build.finished %]
|
[% IF build.finished %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for('/build' build.id 'restart')) %]>Restart</a>
|
<a class="dropdown-item" href="[% c.uri_for('/build' build.id 'restart') %]">Restart</a>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for('/build' build.id 'cancel')) %]>Cancel</a>
|
<a class="dropdown-item" href="[% c.uri_for('/build' build.id 'cancel') %]">Cancel</a>
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for('/build' build.id 'bump')) %]>Bump up</a>
|
<a class="dropdown-item" href="[% c.uri_for('/build' build.id 'bump') %]">Bump up</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
@@ -132,7 +146,7 @@ END;
|
|||||||
<li class="nav-item"><a class="nav-link" href="#tabs-details" data-toggle="tab">Details</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-details" data-toggle="tab">Details</a></li>
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-buildinputs" data-toggle="tab">Inputs</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-buildinputs" data-toggle="tab">Inputs</a></li>
|
||||||
[% IF steps.size() > 0 %]<li class="nav-item"><a class="nav-link" href="#tabs-buildsteps" data-toggle="tab">Build Steps</a></li>[% END %]
|
[% IF steps.size() > 0 %]<li class="nav-item"><a class="nav-link" href="#tabs-buildsteps" data-toggle="tab">Build Steps</a></li>[% END %]
|
||||||
[% IF build.dependents %]<li class="nav-item"><a class="nav-link" href="#tabs-usedby" data-toggle="tab">Used By</a></li>[% END %]
|
[% IF build.dependents %]<li class="nav-item"><a class="nav-link" href="#tabs-usedby" data-toggle="tab">Used By</a></li>[% END%]
|
||||||
[% IF drvAvailable %]<li class="nav-item"><a class="nav-link" href="#tabs-build-deps" data-toggle="tab">Build Dependencies</a></li>[% END %]
|
[% IF drvAvailable %]<li class="nav-item"><a class="nav-link" href="#tabs-build-deps" data-toggle="tab">Build Dependencies</a></li>[% END %]
|
||||||
[% IF localStore && available %]<li class="nav-item"><a class="nav-link" href="#tabs-runtime-deps" data-toggle="tab">Runtime Dependencies</a></li>[% END %]
|
[% IF localStore && available %]<li class="nav-item"><a class="nav-link" href="#tabs-runtime-deps" data-toggle="tab">Runtime Dependencies</a></li>[% END %]
|
||||||
[% IF runcommandlogProblem || runcommandlogs.size() > 0 %]<li class="nav-item"><a class="nav-link" href="#tabs-runcommandlogs" data-toggle="tab">RunCommand Logs[% IF runcommandlogProblem %] <span class="badge badge-warning">Disabled</span>[% END %]</a></li>[% END %]
|
[% IF runcommandlogProblem || runcommandlogs.size() > 0 %]<li class="nav-item"><a class="nav-link" href="#tabs-runcommandlogs" data-toggle="tab">RunCommand Logs[% IF runcommandlogProblem %] <span class="badge badge-warning">Disabled</span>[% END %]</a></li>[% END %]
|
||||||
@@ -151,7 +165,7 @@ END;
|
|||||||
<table class="info-table">
|
<table class="info-table">
|
||||||
<tr>
|
<tr>
|
||||||
<th>Build ID:</th>
|
<th>Build ID:</th>
|
||||||
<td>[% HTML.escape(build.id) %]</td>
|
<td>[% build.id %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Status:</th>
|
<th>Status:</th>
|
||||||
@@ -168,9 +182,9 @@ END;
|
|||||||
END;
|
END;
|
||||||
%];
|
%];
|
||||||
[%+ IF nrFinished == nrConstituents && nrFailedConstituents == 0 %]
|
[%+ IF nrFinished == nrConstituents && nrFailedConstituents == 0 %]
|
||||||
all [% HTML.escape(nrConstituents) %] constituent builds succeeded
|
all [% nrConstituents %] constituent builds succeeded
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
[% HTML.escape(nrFailedConstituents) %] out of [% HTML.escape(nrConstituents) %] constituent builds failed
|
[% nrFailedConstituents %] out of [% nrConstituents %] constituent builds failed
|
||||||
[% IF nrFinished < nrConstituents %]
|
[% IF nrFinished < nrConstituents %]
|
||||||
([% nrConstituents - nrFinished %] still pending)
|
([% nrConstituents - nrFinished %] still pending)
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -180,25 +194,25 @@ END;
|
|||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>System:</th>
|
<th>System:</th>
|
||||||
<td><tt>[% build.system | html %]</tt></td>
|
<td><tt>[% build.system %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% IF build.releasename %]
|
[% IF build.releasename %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Release name:</th>
|
<th>Release name:</th>
|
||||||
<td><tt>[% build.releasename | html %]</tt></td>
|
<td><tt>[% HTML.escape(build.releasename) %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Nix name:</th>
|
<th>Nix name:</th>
|
||||||
<td><tt>[% build.nixname | html %]</tt></td>
|
<td><tt>[% build.nixname %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF eval %]
|
[% IF eval %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Part of:</th>
|
<th>Part of:</th>
|
||||||
<td>
|
<td>
|
||||||
<a [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id])) %]>evaluation [% HTML.escape(eval.id) %]</a>
|
<a href="[% c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id]) %]">evaluation [% eval.id %]</a>
|
||||||
[% IF nrEvals > 1 +%] (and <a [% HTML.attributes(href => c.uri_for('/build' build.id 'evals')) %]>[% nrEvals - 1 %] others</a>)[% END %]
|
[% IF nrEvals > 1 +%] (and <a href="[% c.uri_for('/build' build.id 'evals') %]">[% nrEvals - 1 %] others</a>)[% END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -226,9 +240,9 @@ END;
|
|||||||
<th>Logfile:</th>
|
<th>Logfile:</th>
|
||||||
<td>
|
<td>
|
||||||
[% actualLog = cachedBuildStep ? c.uri_for('/build' cachedBuild.id 'nixlog' cachedBuildStep.stepnr) : c.uri_for('/build' build.id 'log') %]
|
[% actualLog = cachedBuildStep ? c.uri_for('/build' cachedBuild.id 'nixlog' cachedBuildStep.stepnr) : c.uri_for('/build' build.id 'log') %]
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => actualLog) %]>pretty</a>
|
<a class="btn btn-secondary btn-sm" href="[%actualLog%]">pretty</a>
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => actualLog _ "/raw") %]>raw</a>
|
<a class="btn btn-secondary btn-sm" href="[%actualLog%]/raw">raw</a>
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => actualLog _ "/tail") %]>tail</a>
|
<a class="btn btn-secondary btn-sm" href="[%actualLog%]/tail">tail</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -336,12 +350,12 @@ END;
|
|||||||
[% IF eval.nixexprinput %]
|
[% IF eval.nixexprinput %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Nix expression:</th>
|
<th>Nix expression:</th>
|
||||||
<td>file <tt>[% eval.nixexprpath | html %]</tt> in input <tt>[% eval.nixexprinput | html %]</tt></td>
|
<td>file <tt>[% HTML.escape(eval.nixexprpath) %]</tt> in input <tt>[% HTML.escape(eval.nixexprinput) %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Nix name:</th>
|
<th>Nix name:</th>
|
||||||
<td><tt>[% build.nixname | html %]</tt></td>
|
<td><tt>[% build.nixname %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Short description:</th>
|
<th>Short description:</th>
|
||||||
@@ -361,11 +375,11 @@ END;
|
|||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>System:</th>
|
<th>System:</th>
|
||||||
<td><tt>[% build.system | html %]</tt></td>
|
<td><tt>[% build.system %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Derivation store path:</th>
|
<th>Derivation store path:</th>
|
||||||
<td><tt>[% build.drvpath | html %]</tt></td>
|
<td><tt>[% build.drvpath %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Output store paths:</th>
|
<th>Output store paths:</th>
|
||||||
@@ -376,14 +390,14 @@ END;
|
|||||||
<tr>
|
<tr>
|
||||||
<th>Closure size:</th>
|
<th>Closure size:</th>
|
||||||
<td>[% mibs(build.closuresize / (1024 * 1024)) %] MiB
|
<td>[% mibs(build.closuresize / (1024 * 1024)) %] MiB
|
||||||
(<a [% HTML.attributes(href => chartsURL) %]>history</a>)</td>
|
(<a href="[%chartsURL%]">history</a>)</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF build.finished && build.closuresize %]
|
[% IF build.finished && build.closuresize %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Output size:</th>
|
<th>Output size:</th>
|
||||||
<td>[% mibs(build.size / (1024 * 1024)) %] MiB
|
<td>[% mibs(build.size / (1024 * 1024)) %] MiB
|
||||||
(<a [% HTML.attributes(href => chartsURL) %]>history</a>)</td>
|
(<a href="[%chartsURL%]">history</a>)</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF build.finished && build.buildproducts %]
|
[% IF build.finished && build.buildproducts %]
|
||||||
@@ -412,9 +426,9 @@ END;
|
|||||||
<tbody>
|
<tbody>
|
||||||
[% FOREACH metric IN build.buildmetrics %]
|
[% FOREACH metric IN build.buildmetrics %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><tt><a class="row-link" [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'metric' metric.name)) %]">[% metric.name | html %]</a></tt></td>
|
<td><tt><a class="row-link" [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'metric' metric.name)) %]">[%HTML.escape(metric.name)%]</a></tt></td>
|
||||||
<td style="text-align: right">[% HTML.escape(metric.value) %]</td>
|
<td style="text-align: right">[%metric.value%]</td>
|
||||||
<td>[% HTML.escape(metric.unit) %]</td>
|
<td>[%metric.unit%]</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tbody>
|
</tbody>
|
||||||
@@ -456,8 +470,8 @@ END;
|
|||||||
[% FOREACH input IN build.dependents %]
|
[% FOREACH input IN build.dependents %]
|
||||||
<tr>
|
<tr>
|
||||||
<td>[% INCLUDE renderFullBuildLink build=input.build %]</td>
|
<td>[% INCLUDE renderFullBuildLink build=input.build %]</td>
|
||||||
<td><tt>[% input.name | html %]</tt></td>
|
<td><tt>[% input.name %]</tt></td>
|
||||||
<td><tt>[% input.build.system | html %]</tt></td>
|
<td><tt>[% input.build.system %]</tt></td>
|
||||||
<td>[% INCLUDE renderDateTime timestamp = input.build.timestamp %]</td>
|
<td>[% INCLUDE renderDateTime timestamp = input.build.timestamp %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -484,7 +498,7 @@ END;
|
|||||||
[% ELSIF runcommandlogProblem == "disabled-jobset" %]
|
[% ELSIF runcommandlogProblem == "disabled-jobset" %]
|
||||||
This jobset does not enable Dynamic RunCommand support.
|
This jobset does not enable Dynamic RunCommand support.
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
Dynamic RunCommand is not enabled: [% HTML.escape(runcommandlogProblem) %].
|
Dynamic RunCommand is not enabled: [% runcommandlogProblem %].
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -503,18 +517,18 @@ END;
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="d-flex flex-column mr-auto align-self-center">
|
<div class="d-flex flex-column mr-auto align-self-center">
|
||||||
<div><tt>[% runcommandlog.command | html %]</tt></div>
|
<div><tt>[% runcommandlog.command | html%]</tt></div>
|
||||||
<div>
|
<div>
|
||||||
[% IF not runcommandlog.is_running() %]
|
[% IF not runcommandlog.is_running() %]
|
||||||
[% IF runcommandlog.did_fail_with_signal() %]
|
[% IF runcommandlog.did_fail_with_signal() %]
|
||||||
Exit signal: [% runcommandlog.signal | html %]
|
Exit signal: [% runcommandlog.signal %]
|
||||||
[% IF runcommandlog.core_dumped %]
|
[% IF runcommandlog.core_dumped %]
|
||||||
(Core Dumped)
|
(Core Dumped)
|
||||||
[% END %]
|
[% END %]
|
||||||
[% ELSIF runcommandlog.did_fail_with_exec_error() %]
|
[% ELSIF runcommandlog.did_fail_with_exec_error() %]
|
||||||
Exec error: [% runcommandlog.error_number | html %]
|
Exec error: [% runcommandlog.error_number %]
|
||||||
[% ELSIF not runcommandlog.did_succeed() %]
|
[% ELSIF not runcommandlog.did_succeed() %]
|
||||||
Exit code: [% runcommandlog.exit_code | html %]
|
Exit code: [% runcommandlog.exit_code %]
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
@@ -532,9 +546,9 @@ END;
|
|||||||
[% IF runcommandlog.uuid != undef %]
|
[% IF runcommandlog.uuid != undef %]
|
||||||
[% runLog = c.uri_for('/build', build.id, 'runcommandlog', runcommandlog.uuid) %]
|
[% runLog = c.uri_for('/build', build.id, 'runcommandlog', runcommandlog.uuid) %]
|
||||||
<div>
|
<div>
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => runLog) %]>pretty</a>
|
<a class="btn btn-secondary btn-sm" href="[% runLog %]">pretty</a>
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => runLog) %]/raw">raw</a>
|
<a class="btn btn-secondary btn-sm" href="[% runLog %]/raw">raw</a>
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => runLog) %]/tail">tail</a>
|
<a class="btn btn-secondary btn-sm" href="[% runLog %]/tail">tail</a>
|
||||||
</div>
|
</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
@@ -563,7 +577,7 @@ END;
|
|||||||
|
|
||||||
[% IF eval.flake %]
|
[% IF eval.flake %]
|
||||||
|
|
||||||
<p>If you have <a href='https://nixos.org/download/'>Nix
|
<p>If you have <a href='https://nixos.org/nix/download.html'>Nix
|
||||||
installed</a>, you can reproduce this build on your own machine by
|
installed</a>, you can reproduce this build on your own machine by
|
||||||
running the following command:</p>
|
running the following command:</p>
|
||||||
|
|
||||||
@@ -573,7 +587,7 @@ END;
|
|||||||
|
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
|
|
||||||
<p>If you have <a href='https://nixos.org/download/'>Nix
|
<p>If you have <a href='https://nixos.org/nix/download.html'>Nix
|
||||||
installed</a>, you can reproduce this build on your own machine by
|
installed</a>, you can reproduce this build on your own machine by
|
||||||
downloading <a [% HTML.attributes(href => url) %]>a script</a>
|
downloading <a [% HTML.attributes(href => url) %]>a script</a>
|
||||||
that checks out all inputs of the build and then invokes Nix to
|
that checks out all inputs of the build and then invokes Nix to
|
||||||
|
@@ -7,7 +7,7 @@ href="http://nixos.org/">Nix package manager</a>. If you have Nix
|
|||||||
installed, you can subscribe to this channel by once executing</p>
|
installed, you can subscribe to this channel by once executing</p>
|
||||||
|
|
||||||
<div class="card bg-light"><div class="card-body"><pre>
|
<div class="card bg-light"><div class="card-body"><pre>
|
||||||
<span class="shell-prompt">$ </span>nix-channel --add [% HTML.escape(curUri) +%]
|
<span class="shell-prompt">$ </span>nix-channel --add [% curUri +%]
|
||||||
<span class="shell-prompt">$ </span>nix-channel --update
|
<span class="shell-prompt">$ </span>nix-channel --update
|
||||||
</pre></div></div>
|
</pre></div></div>
|
||||||
|
|
||||||
@@ -49,9 +49,9 @@ installed, you can subscribe to this channel by once executing</p>
|
|||||||
[% b = pkg.build %]
|
[% b = pkg.build %]
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td><a [% HTML.attributes(href => c.uri_for('/build' b.id)) %]>[% HTML.escape(b.id) %]</a></td>
|
<td><a href="[% c.uri_for('/build' b.id) %]">[% b.id %]</a></td>
|
||||||
<td><tt>[% b.get_column('releasename') || b.nixname | html %]</tt></td>
|
<td><tt>[% b.get_column('releasename') || b.nixname %]</tt></td>
|
||||||
<td><tt>[% b.system | html %]</tt></td>
|
<td><tt>[% b.system %]</tt></td>
|
||||||
<td>
|
<td>
|
||||||
[% IF b.homepage %]
|
[% IF b.homepage %]
|
||||||
<a [% HTML.attributes(href => b.homepage) %]>[% HTML.escape(b.description) %]</a>
|
<a [% HTML.attributes(href => b.homepage) %]>[% HTML.escape(b.description) %]</a>
|
||||||
|
@@ -55,17 +55,17 @@ BLOCK renderRelativeDate %]
|
|||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
BLOCK renderProjectName %]
|
BLOCK renderProjectName %]
|
||||||
<a [% IF inRow %]class="row-link"[% END %] [% HTML.attributes(href => c.uri_for('/project' project)) %]><tt>[% project | html %]</tt></a>
|
<a [% IF inRow %]class="row-link"[% END %] href="[% c.uri_for('/project' project) %]"><tt>[% project %]</tt></a>
|
||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderJobsetName %]
|
BLOCK renderJobsetName %]
|
||||||
<a [% IF inRow %]class="row-link"[% END %] [% HTML.attributes(href => c.uri_for('/jobset' project jobset)) %]><tt>[% jobset | html %]</tt></a>
|
<a [% IF inRow %]class="row-link"[% END %] href="[% c.uri_for('/jobset' project jobset) %]"><tt>[% jobset %]</tt></a>
|
||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderJobName %]
|
BLOCK renderJobName %]
|
||||||
<a [% IF inRow %]class="row-link"[% END %] [% HTML.attributes(href => c.uri_for('/job' project jobset job)) %]>[% job | html %]</a>
|
<a [% IF inRow %]class="row-link"[% END %] href="[% c.uri_for('/job' project jobset job) %]">[% job %]</a>
|
||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
|
|
||||||
@@ -91,17 +91,6 @@ BLOCK renderDuration;
|
|||||||
duration % 60 %]s[%
|
duration % 60 %]s[%
|
||||||
END;
|
END;
|
||||||
|
|
||||||
BLOCK renderDrvInfo;
|
|
||||||
drvname = step.drvpath
|
|
||||||
.substr(11) # strip `/nix/store/`
|
|
||||||
.split('-').slice(1).join("-") # strip hash part
|
|
||||||
.substr(0, -4); # strip `.drv`
|
|
||||||
IF drvname != releasename;
|
|
||||||
IF step.type == 0; action = "Build"; ELSE; action = "Substitution"; END;
|
|
||||||
IF drvname; %]<em> ([% HTML.escape(action) %] of [% HTML.escape(drvname) %])</em>[% END;
|
|
||||||
END;
|
|
||||||
END;
|
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderBuildListHeader %]
|
BLOCK renderBuildListHeader %]
|
||||||
<table class="table table-striped table-condensed clickable-rows">
|
<table class="table table-striped table-condensed clickable-rows">
|
||||||
@@ -140,25 +129,20 @@ BLOCK renderBuildListBody;
|
|||||||
[% IF showSchedulingInfo %]
|
[% IF showSchedulingInfo %]
|
||||||
<td>[% IF busy %]<span class="badge badge-success">Started</span>[% ELSE %]<span class="badge badge-secondary">Queued</span>[% END %]</td>
|
<td>[% IF busy %]<span class="badge badge-success">Started</span>[% ELSE %]<span class="badge badge-secondary">Queued</span>[% END %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
<td><a class="row-link" [% HTML.attributes(href => link) %]>[% HTML.escape(build.id) %]</a></td>
|
<td><a class="row-link" href="[% link %]">[% build.id %]</a></td>
|
||||||
[% IF !hideJobName %]
|
[% IF !hideJobName %]
|
||||||
<td>
|
<td><a href="[%link%]">[% IF !hideJobsetName %][%build.jobset.get_column("project")%]:[%build.jobset.get_column("name")%]:[% END %][%build.get_column("job")%]</td>
|
||||||
<a [% HTML.attributes(href => link) %]>[% IF !hideJobsetName %][% HTML.escape(build.jobset.get_column("project")) %]:[% HTML.escape(build.jobset.get_column("name")) %]:[% END %][% HTML.escape(build.get_column("job")) %]</a>
|
|
||||||
[% IF showStepName %]
|
|
||||||
[% INCLUDE renderDrvInfo step=build.buildsteps releasename=build.nixname %]
|
|
||||||
[% END %]
|
|
||||||
</td>
|
|
||||||
[% END %]
|
[% END %]
|
||||||
<td class="nowrap">[% t = showSchedulingInfo ? build.timestamp : build.stoptime; IF t; INCLUDE renderRelativeDate timestamp=(showSchedulingInfo ? build.timestamp : build.stoptime); ELSE; "-"; END %]</td>
|
<td class="nowrap">[% t = showSchedulingInfo ? build.timestamp : build.stoptime; IF t; INCLUDE renderRelativeDate timestamp=(showSchedulingInfo ? build.timestamp : build.stoptime); ELSE; "-"; END %]</td>
|
||||||
<td>[% !showSchedulingInfo and build.get_column('releasename') ? HTML.escape(build.get_column('releasename')) : HTML.escape(build.nixname) %]</td>
|
<td>[% !showSchedulingInfo and build.get_column('releasename') ? build.get_column('releasename') : build.nixname %]</td>
|
||||||
<td class="nowrap"><tt>[% build.system | html %]</tt></td>
|
<td class="nowrap"><tt>[% build.system %]</tt></td>
|
||||||
[% IF showDescription %]
|
[% IF showDescription %]
|
||||||
<td>[% HTML.escape(build.description) %]</td>
|
<td>[% build.description %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
[% END;
|
[% END;
|
||||||
IF linkToAll %]
|
IF linkToAll %]
|
||||||
<tr><td class="centered" colspan="5"><a [% HTML.attributes(href => linkToAll) %]><em>More...</em></a></td></tr>
|
<tr><td class="centered" colspan="5"><a href="[% linkToAll %]"><em>More...</em></a></td></tr>
|
||||||
[% END;
|
[% END;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
@@ -176,11 +160,11 @@ BLOCK renderBuildList;
|
|||||||
END;
|
END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderLink %]<a [% HTML.attributes(href => uri) %]>[% HTML.escape(title) %]</a>[% END;
|
BLOCK renderLink %]<a href="[% uri %]">[% title %]</a>[% END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK maybeLink;
|
BLOCK maybeLink;
|
||||||
IF uri %]<a [% HTML.attributes(href => uri, class => class); IF confirmmsg +%] onclick="javascript:return confirm('[% confirmmsg %]')"[% END %]>[% HTML.escape(content) %]</a>[% ELSE; HTML.escape(content); END;
|
IF uri %]<a [% HTML.attributes(href => uri, class => class); IF confirmmsg +%] onclick="javascript:return confirm('[% confirmmsg %]')"[% END %]>[% content %]</a>[% ELSE; content; END;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
|
|
||||||
@@ -192,7 +176,7 @@ BLOCK renderSelection;
|
|||||||
<label class="radio inline">
|
<label class="radio inline">
|
||||||
<input type="radio" [% HTML.attributes(id => param, name => param, value => name) %]
|
<input type="radio" [% HTML.attributes(id => param, name => param, value => name) %]
|
||||||
[% IF name == curValue; "checked='1'"; END %]>
|
[% IF name == curValue; "checked='1'"; END %]>
|
||||||
[% HTML.escape(options.$name) %]
|
[% options.$name %]
|
||||||
</input>
|
</input>
|
||||||
</label>
|
</label>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -200,7 +184,7 @@ BLOCK renderSelection;
|
|||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<select class="custom-select" [% HTML.attributes(id => param, name => param) %]>
|
<select class="custom-select" [% HTML.attributes(id => param, name => param) %]>
|
||||||
[% FOREACH name IN options.keys.sort %]
|
[% FOREACH name IN options.keys.sort %]
|
||||||
<option [% IF name == curValue; "selected='selected'"; END; " "; HTML.attributes(value => name) %]>[% HTML.escape(options.$name) %]</option>
|
<option [% IF name == curValue; "selected='selected'"; END; " "; HTML.attributes(value => name) %]>[% options.$name %]</option>
|
||||||
[% END %]
|
[% END %]
|
||||||
</select>
|
</select>
|
||||||
[% END;
|
[% END;
|
||||||
@@ -216,12 +200,12 @@ BLOCK editString; %]
|
|||||||
|
|
||||||
|
|
||||||
BLOCK renderFullBuildLink;
|
BLOCK renderFullBuildLink;
|
||||||
INCLUDE renderFullJobNameOfBuild build=build %] <a [% HTML.attributes(href => c.uri_for('/build' build.id)) %]>build [% HTML.escape(build.id) %]</a>[%
|
INCLUDE renderFullJobNameOfBuild build=build %] <a href="[% c.uri_for('/build' build.id) %]">build [% build.id %]</a>[%
|
||||||
END;
|
END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderBuildIdLink; %]
|
BLOCK renderBuildIdLink; %]
|
||||||
<a [% HTML.attributes(href => c.uri_for('/build' id)) %]>build [% HTML.escape(id) %]</a>
|
<a href="[% c.uri_for('/build' id) %]">build [% id %]</a>
|
||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
|
|
||||||
@@ -261,27 +245,6 @@ BLOCK renderBuildStatusIcon;
|
|||||||
END;
|
END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderBusyStatus;
|
|
||||||
IF step.busy == 1 %]
|
|
||||||
<strong>Preparing</strong>
|
|
||||||
[% ELSIF step.busy == 10 %]
|
|
||||||
<strong>Connecting</strong>
|
|
||||||
[% ELSIF step.busy == 20 %]
|
|
||||||
<strong>Sending inputs</strong>
|
|
||||||
[% ELSIF step.busy == 30 %]
|
|
||||||
<strong>Building</strong>
|
|
||||||
[% ELSIF step.busy == 35 %]
|
|
||||||
<strong>Waiting to receive outputs</strong>
|
|
||||||
[% ELSIF step.busy == 40 %]
|
|
||||||
<strong>Receiving outputs</strong>
|
|
||||||
[% ELSIF step.busy == 50 %]
|
|
||||||
<strong>Post-processing</strong>
|
|
||||||
[% ELSE %]
|
|
||||||
<strong>Unknown state</strong>
|
|
||||||
[% END;
|
|
||||||
END;
|
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderStatus;
|
BLOCK renderStatus;
|
||||||
IF build.finished;
|
IF build.finished;
|
||||||
buildstatus = build.buildstatus;
|
buildstatus = build.buildstatus;
|
||||||
@@ -320,7 +283,7 @@ END;
|
|||||||
|
|
||||||
BLOCK renderShortInputValue;
|
BLOCK renderShortInputValue;
|
||||||
IF input.type == "build" || input.type == "sysbuild" %]
|
IF input.type == "build" || input.type == "sysbuild" %]
|
||||||
<a [% HTML.attributes(href => c.uri_for('/build' input.dependency.id)) %]>[% HTML.escape(input.dependency.id) %]</a>
|
<a href="[% c.uri_for('/build' input.dependency.id) %]">[% input.dependency.id %]</a>
|
||||||
[% ELSIF input.type == "string" %]
|
[% ELSIF input.type == "string" %]
|
||||||
<tt>"[% HTML.escape(input.value) %]"</tt>
|
<tt>"[% HTML.escape(input.value) %]"</tt>
|
||||||
[% ELSIF input.type == "nix" || input.type == "boolean" %]
|
[% ELSIF input.type == "nix" || input.type == "boolean" %]
|
||||||
@@ -338,7 +301,7 @@ BLOCK renderDiffUri;
|
|||||||
url = bi1.uri;
|
url = bi1.uri;
|
||||||
path = url.replace(base, '');
|
path = url.replace(base, '');
|
||||||
IF url.match(base) %]
|
IF url.match(base) %]
|
||||||
<a target="_blank" [% HTML.attributes(href => m.uri.replace('_path_', path).replace('_1_', bi1.revision).replace('_2_', bi2.revision)) %]>[% HTML.escape(contents) %]</a>
|
<a target="_blank" href="[% m.uri.replace('_path_', path).replace('_1_', bi1.revision).replace('_2_', bi2.revision) %]">[% contents %]</a>
|
||||||
[% nouri = 0;
|
[% nouri = 0;
|
||||||
END;
|
END;
|
||||||
END;
|
END;
|
||||||
@@ -347,13 +310,13 @@ BLOCK renderDiffUri;
|
|||||||
url = res.0;
|
url = res.0;
|
||||||
branch = res.1;
|
branch = res.1;
|
||||||
IF bi1.type == "hg" || bi1.type == "git" %]
|
IF bi1.type == "hg" || bi1.type == "git" %]
|
||||||
<a target="_blank" [% HTML.attributes(href => c.uri_for('/api/scmdiff', {
|
<a target="_blank" href="[% HTML.escape(c.uri_for('/api/scmdiff', {
|
||||||
uri = url,
|
uri = url,
|
||||||
rev1 = bi1.revision,
|
rev1 = bi1.revision,
|
||||||
rev2 = bi2.revision,
|
rev2 = bi2.revision,
|
||||||
type = bi1.type,
|
type = bi1.type,
|
||||||
branch = branch
|
branch = branch
|
||||||
})) %]>[% HTML.escape(contents) %]</a>
|
})) %]">[% contents %]</a>
|
||||||
[% ELSE;
|
[% ELSE;
|
||||||
contents;
|
contents;
|
||||||
END;
|
END;
|
||||||
@@ -369,8 +332,8 @@ BLOCK renderInputs; %]
|
|||||||
<tbody>
|
<tbody>
|
||||||
[% FOREACH input IN inputs %]
|
[% FOREACH input IN inputs %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><tt>[% input.name | html %]</tt></td>
|
<td><tt>[% input.name %]</tt></td>
|
||||||
<td>[% type = input.type; HTML.escape(inputTypes.$type) %]</td>
|
<td>[% type = input.type; inputTypes.$type %]</td>
|
||||||
<td>
|
<td>
|
||||||
[% IF input.type == "build" || input.type == "sysbuild" %]
|
[% IF input.type == "build" || input.type == "sysbuild" %]
|
||||||
[% INCLUDE renderFullBuildLink build=input.dependency %]
|
[% INCLUDE renderFullBuildLink build=input.dependency %]
|
||||||
@@ -383,7 +346,7 @@ BLOCK renderInputs; %]
|
|||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
<td>[% IF input.revision %][% HTML.escape(input.revision) %][% END %]</td>
|
<td>[% IF input.revision %][% HTML.escape(input.revision) %][% END %]</td>
|
||||||
<td><tt>[% input.path | html %]</tt></td>
|
<td><tt>[% input.path %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tbody>
|
</tbody>
|
||||||
@@ -407,33 +370,33 @@ BLOCK renderInputDiff; %]
|
|||||||
IF bi1.name == bi2.name;
|
IF bi1.name == bi2.name;
|
||||||
IF bi1.type == bi2.type;
|
IF bi1.type == bi2.type;
|
||||||
IF bi1.value != bi2.value || bi1.uri != bi2.uri %]
|
IF bi1.value != bi2.value || bi1.uri != bi2.uri %]
|
||||||
<tr><td><b>[% HTML.escape(bi1.name) %]</b></td><td><tt>[% INCLUDE renderShortInputValue input=bi1 %]</tt> to <tt>[% INCLUDE renderShortInputValue input=bi2 %]</tt></td></tr>
|
<tr><td><b>[% bi1.name %]</b></td><td><tt>[% INCLUDE renderShortInputValue input=bi1 %]</tt> to <tt>[% INCLUDE renderShortInputValue input=bi2 %]</tt></td></tr>
|
||||||
[% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %]
|
[% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %]
|
||||||
[% IF bi1.type == "git" %]
|
[% IF bi1.type == "git" %]
|
||||||
<tr><td>
|
<tr><td>
|
||||||
<b>[% HTML.escape(bi1.name) %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %]</tt>
|
<b>[% bi1.name %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %]</tt>
|
||||||
</td></tr>
|
</td></tr>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<tr><td>
|
<tr><td>
|
||||||
<b>[% HTML.escape(bi1.name) %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision _ ' to ' _ bi2.revision) %]</tt>
|
<b>[% bi1.name %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision _ ' to ' _ bi2.revision) %]</tt>
|
||||||
</td></tr>
|
</td></tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% ELSIF bi1.dependency.id != bi2.dependency.id || bi1.path != bi2.path %]
|
[% ELSIF bi1.dependency.id != bi2.dependency.id || bi1.path != bi2.path %]
|
||||||
<tr><td>
|
<tr><td>
|
||||||
<b>[% HTML.escape(bi1.name) %]</b></td><td><tt>[% INCLUDE renderShortInputValue input=bi1 %]</tt> to <tt>[% INCLUDE renderShortInputValue input=bi2 %]</tt>
|
<b>[% bi1.name %]</b></td><td><tt>[% INCLUDE renderShortInputValue input=bi1 %]</tt> to <tt>[% INCLUDE renderShortInputValue input=bi2 %]</tt>
|
||||||
<br/>
|
<br/>
|
||||||
<br/>
|
<br/>
|
||||||
[% INCLUDE renderInputDiff inputs1=bi1.dependency.inputs inputs2=bi2.dependency.inputs nestedDiff=1 nestLevel=nestLevel+1 %]
|
[% INCLUDE renderInputDiff inputs1=bi1.dependency.inputs inputs2=bi2.dependency.inputs nestedDiff=1 nestLevel=nestLevel+1 %]
|
||||||
</td></tr>
|
</td></tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<tr><td><b>[% HTML.escape(bi1.name) %]</b></td><td>Changed input type from '[% type = bi1.type; HTML.escape(inputTypes.$type) %]' to '[% type = bi2.type; HTML.escape(inputTypes.$type) %]'</td></tr>
|
<tr><td><b>[% bi1.name %]</b></td><td>Changed input type from '[% type = bi1.type; inputTypes.$type %]' to '[% type = bi2.type; inputTypes.$type %]'</td></tr>
|
||||||
[% END;
|
[% END;
|
||||||
deletedInput = 0;
|
deletedInput = 0;
|
||||||
END;
|
END;
|
||||||
END;
|
END;
|
||||||
IF deletedInput == 1 %]
|
IF deletedInput == 1 %]
|
||||||
<tr><td><b>[% HTML.escape(bi1.name) %]</b></td><td>Input not present in this build.</td></tr>
|
<tr><td><b>[% bi1.name %]</b></td><td>Input not present in this build.</td></tr>
|
||||||
[% END;
|
[% END;
|
||||||
END;
|
END;
|
||||||
END %]
|
END %]
|
||||||
@@ -443,10 +406,10 @@ BLOCK renderInputDiff; %]
|
|||||||
|
|
||||||
BLOCK renderPager %]
|
BLOCK renderPager %]
|
||||||
<ul class="pagination">
|
<ul class="pagination">
|
||||||
<li class="page-item[% IF page == 1 %] disabled[% END %]"><a class="page-link" [% HTML.attributes(href => "$baseUri?page=1") %]>« First</a></li>
|
<li class="page-item[% IF page == 1 %] disabled[% END %]"><a class="page-link" href="[% "$baseUri?page=1" %]">« First</a></li>
|
||||||
<li class="page-item[% IF page == 1 %] disabled[% END %]"><a class="page-link" [% HTML.attributes(href => "$baseUri?page=" _ (page - 1)) %]>‹ Previous</a></li>
|
<li class="page-item[% IF page == 1 %] disabled[% END %]"><a class="page-link" href="[% "$baseUri?page="; (page - 1) %]">‹ Previous</a></li>
|
||||||
<li class="page-item[% IF page * resultsPerPage >= total %] disabled[% END %]"><a class="page-link" [% HTML.attributes(href => "$baseUri?page=" _ (page + 1)) %]>Next ›</a></li>
|
<li class="page-item[% IF page * resultsPerPage >= total %] disabled[% END %]"><a class="page-link" href="[% "$baseUri?page="; (page + 1) %]">Next ›</a></li>
|
||||||
<li class="page-item[% IF page * resultsPerPage >= total %] disabled[% END %]"><a class="page-link" [% HTML.attributes(href => "$baseUri?page=" _ ((total - 1) div resultsPerPage + 1)) %]>Last »</a></li>
|
<li class="page-item[% IF page * resultsPerPage >= total %] disabled[% END %]"><a class="page-link" href="[% "$baseUri?page="; (total - 1) div resultsPerPage + 1 %]">Last »</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
@@ -455,13 +418,13 @@ BLOCK renderShortEvalInput;
|
|||||||
IF input.type == "svn" || input.type == "svn-checkout" || input.type == "bzr" || input.type == "bzr-checkout" %]
|
IF input.type == "svn" || input.type == "svn-checkout" || input.type == "bzr" || input.type == "bzr-checkout" %]
|
||||||
r[% input.revision %]
|
r[% input.revision %]
|
||||||
[% ELSIF input.type == "git" %]
|
[% ELSIF input.type == "git" %]
|
||||||
<tt>[% input.revision.substr(0, 7) | html %]</tt>
|
<tt>[% input.revision.substr(0, 7) %]</tt>
|
||||||
[% ELSIF input.type == "hg" %]
|
[% ELSIF input.type == "hg" %]
|
||||||
<tt>[% input.revision.substr(0, 12) | html %]</tt>
|
<tt>[% input.revision.substr(0, 12) %]</tt>
|
||||||
[% ELSIF input.type == "build" || input.type == "sysbuild" %]
|
[% ELSIF input.type == "build" || input.type == "sysbuild" %]
|
||||||
<a [% HTML.attributes(href => c.uri_for('/build' input.get_column('dependency'))) %]>[% HTML.escape(input.get_column('dependency')) %]</a>
|
<a href="[% c.uri_for('/build' input.get_column('dependency')) %]">[% input.get_column('dependency') %]</a>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<tt>[% input.revision | html %]</tt>
|
<tt>[% input.revision %]</tt>
|
||||||
[% END;
|
[% END;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
@@ -498,7 +461,7 @@ BLOCK renderEvals %]
|
|||||||
eval = e.eval;
|
eval = e.eval;
|
||||||
link = c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id]) %]
|
link = c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id]) %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><a class="row-link" [% HTML.attributes(href => link) %]>[% HTML.escape(eval.id) %]</a></td>
|
<td><a class="row-link" href="[% link %]">[% eval.id %]</a></td>
|
||||||
[% IF !jobset && !build %]
|
[% IF !jobset && !build %]
|
||||||
<td>[% INCLUDE renderFullJobsetName project=eval.jobset.project.name jobset=eval.jobset.name %]</td>
|
<td>[% INCLUDE renderFullJobsetName project=eval.jobset.project.name jobset=eval.jobset.name %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -507,40 +470,40 @@ BLOCK renderEvals %]
|
|||||||
[% IF e.changedInputs.size > 0;
|
[% IF e.changedInputs.size > 0;
|
||||||
sep='';
|
sep='';
|
||||||
FOREACH input IN e.changedInputs;
|
FOREACH input IN e.changedInputs;
|
||||||
sep; %] [% HTML.escape(input.name) %] → [% INCLUDE renderShortEvalInput input=input;
|
sep; %] [% input.name %] → [% INCLUDE renderShortEvalInput input=input;
|
||||||
sep=', ';
|
sep=', ';
|
||||||
END;
|
END;
|
||||||
ELSE %]
|
ELSE %]
|
||||||
-
|
-
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF eval.evaluationerror.has_error %]
|
[% IF eval.evaluationerror.errormsg %]
|
||||||
<span class="badge badge-warning">Eval Errors</span>
|
<span class="badge badge-warning">Eval Errors</span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
<td align='right' class="nowrap">
|
<td align='right' class="nowrap">
|
||||||
<span class="badge badge-success">[% HTML.escape(e.nrSucceeded) %]</span>
|
<span class="badge badge-success">[% e.nrSucceeded %]</span>
|
||||||
</td>
|
</td>
|
||||||
<td align="right" class="nowrap">
|
<td align="right" class="nowrap">
|
||||||
[% IF e.nrFailed > 0 %]
|
[% IF e.nrFailed > 0 %]
|
||||||
<span class="badge badge-danger">[% HTML.escape(e.nrFailed) %]</span>
|
<span class="badge badge-danger">[% e.nrFailed %]</span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
<td align="right" class="nowrap">
|
<td align="right" class="nowrap">
|
||||||
[% IF e.nrScheduled > 0 %]
|
[% IF e.nrScheduled > 0 %]
|
||||||
<span class="badge badge-secondary">[% HTML.escape(e.nrScheduled) %]</span>
|
<span class="badge badge-secondary">[% e.nrScheduled %]</span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
<td align='right' class="nowrap">
|
<td align='right' class="nowrap">
|
||||||
[% IF e.diff > 0 %]
|
[% IF e.diff > 0 %]
|
||||||
<span class='badge badge-success'><strong>+[% HTML.escape(e.diff) %]</strong></span>
|
<span class='badge badge-success'><strong>+[% e.diff %]</strong></span>
|
||||||
[% ELSIF e.diff < 0 && e.nrScheduled == 0 %]
|
[% ELSIF e.diff < 0 && e.nrScheduled == 0 %]
|
||||||
<span class='badge badge-danger'><strong>[% HTML.escape(e.diff) %]</strong></span>
|
<span class='badge badge-danger'><strong>[% e.diff %]</strong></span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END;
|
[% END;
|
||||||
IF linkToAll %]
|
IF linkToAll %]
|
||||||
<tr><td class="centered" colspan="7"><a [% HTML.attributes(href => linkToAll) %]><em>More...</em></a></td></tr>
|
<tr><td class="centered" colspan="7"><a href="[% linkToAll %]"><em>More...</em></a></td></tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
@@ -548,19 +511,19 @@ BLOCK renderEvals %]
|
|||||||
|
|
||||||
|
|
||||||
BLOCK renderLogLinks %]
|
BLOCK renderLogLinks %]
|
||||||
(<a [% IF inRow %]class="row-link"[% END %] [% HTML.attributes(href => url) %]>log</a>, <a [% HTML.attributes(href => "$url/raw") %]>raw</a>, <a [% HTML.attributes(href => "$url/tail") %]>tail</a>)
|
(<a [% IF inRow %]class="row-link"[% END %] href="[% url %]">log</a>, <a href="[% "$url/raw" %]">raw</a>, <a href="[% "$url/tail" %]">tail</a>)
|
||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK makeLazyTab %]
|
BLOCK makeLazyTab %]
|
||||||
<div [% HTML.attributes(id => tabName) %] class="tab-pane">
|
<div id="[% tabName %]" class="tab-pane">
|
||||||
<center><span class="spinner-border spinner-border-sm"/></center>
|
<center><span class="spinner-border spinner-border-sm"/></center>
|
||||||
</div>
|
</div>
|
||||||
<script>
|
<script>
|
||||||
[% IF callback.defined %]
|
[% IF callback.defined %]
|
||||||
$(function() { makeLazyTab("[% HTML.escape(tabName) %]", "[% uri %]", [% callback %] ); });
|
$(function() { makeLazyTab("[% tabName %]", "[% uri %]", [% callback %] ); });
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
$(function() { makeLazyTab("[% HTML.escape(tabName) %]", "[% uri %]", null ); });
|
$(function() { makeLazyTab("[% tabName %]", "[% uri %]", null ); });
|
||||||
[% END %]
|
[% END %]
|
||||||
</script>
|
</script>
|
||||||
[% END;
|
[% END;
|
||||||
@@ -587,7 +550,7 @@ BLOCK navItem %]
|
|||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link[% IF "${root}${curUri}" == uri %] active[% END %]"
|
<a class="nav-link[% IF "${root}${curUri}" == uri %] active[% END %]"
|
||||||
[% HTML.attributes(href => uri) %]>
|
[% HTML.attributes(href => uri) %]>
|
||||||
[% HTML.escape(title) %]
|
[% title %]
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
[% END;
|
[% END;
|
||||||
@@ -639,7 +602,7 @@ BLOCK renderJobsetOverview %]
|
|||||||
<td>[% HTML.escape(j.description) %]</td>
|
<td>[% HTML.escape(j.description) %]</td>
|
||||||
<td>[% IF j.lastcheckedtime;
|
<td>[% IF j.lastcheckedtime;
|
||||||
INCLUDE renderDateTime timestamp = j.lastcheckedtime;
|
INCLUDE renderDateTime timestamp = j.lastcheckedtime;
|
||||||
IF j.has_error || j.fetcherrormsg; %] <span class = 'badge badge-warning'>Error</span>[% END;
|
IF j.errormsg || j.fetcherrormsg; %] <span class = 'badge badge-warning'>Error</span>[% END;
|
||||||
ELSE; "-";
|
ELSE; "-";
|
||||||
END %]</td>
|
END %]</td>
|
||||||
[% IF j.get_column('nrtotal') > 0 %]
|
[% IF j.get_column('nrtotal') > 0 %]
|
||||||
@@ -657,17 +620,17 @@ BLOCK renderJobsetOverview %]
|
|||||||
<td><span class="[% class %]">[% successrate FILTER format('%d') %]%</span></td>
|
<td><span class="[% class %]">[% successrate FILTER format('%d') %]%</span></td>
|
||||||
<td>
|
<td>
|
||||||
[% IF j.get_column('nrsucceeded') > 0 %]
|
[% IF j.get_column('nrsucceeded') > 0 %]
|
||||||
<span class="badge badge-success">[% HTML.escape(j.get_column('nrsucceeded')) %]</span>
|
<span class="badge badge-success">[% j.get_column('nrsucceeded') %]</span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
[% IF j.get_column('nrfailed') > 0 %]
|
[% IF j.get_column('nrfailed') > 0 %]
|
||||||
<span class="badge badge-danger">[% HTML.escape(j.get_column('nrfailed')) %]</span>
|
<span class="badge badge-danger">[% j.get_column('nrfailed') %]</span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
[% IF j.get_column('nrscheduled') > 0 %]
|
[% IF j.get_column('nrscheduled') > 0 %]
|
||||||
<span class="badge badge-secondary">[% HTML.escape(j.get_column('nrscheduled')) %]</span>
|
<span class="badge badge-secondary">[% j.get_column('nrscheduled') %]</span>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -685,22 +648,14 @@ BLOCK includeFlot %]
|
|||||||
[% END;
|
[% END;
|
||||||
|
|
||||||
|
|
||||||
BLOCK renderYesNo %]
|
|
||||||
[% IF value %]
|
|
||||||
<span class="text-success">Yes</span>
|
|
||||||
[% ELSE %]
|
|
||||||
<span class="text-danger">No</span>
|
|
||||||
[% END %]
|
|
||||||
[% END;
|
|
||||||
|
|
||||||
BLOCK createChart %]
|
BLOCK createChart %]
|
||||||
|
|
||||||
<div id="[% id %]-chart" style="width: 1000px; height: 400px;"></div>
|
<div id="[%id%]-chart" style="width: 1000px; height: 400px;"></div>
|
||||||
<div id="[% id %]-overview" style="margin-top: 20px; margin-left: 50px; margin-right: 50px; width: 900px; height: 100px"></div>
|
<div id="[%id%]-overview" style="margin-top: 20px; margin-left: 50px; margin-right: 50px; width: 900px; height: 100px"></div>
|
||||||
|
|
||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
$(function() {
|
$(function() {
|
||||||
showChart("[% HTML.escape(id) %]", "[% dataUrl %]", "[% yaxis %]");
|
showChart("[%id%]", "[%dataUrl%]", "[%yaxis%]");
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
|
|
||||||
<p>Below are the most recent builds of the [% HTML.escape(builds.size) %] jobs of which you
|
<p>Below are the most recent builds of the [% builds.size %] jobs of which you
|
||||||
(<tt>[% HTML.escape(user.emailaddress) %]</tt>) are a maintainer.</p>
|
(<tt>[% HTML.escape(user.emailaddress) %]</tt>) are a maintainer.</p>
|
||||||
|
|
||||||
[% INCLUDE renderBuildList %]
|
[% INCLUDE renderBuildList %]
|
||||||
|
@@ -24,7 +24,7 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td><span class="[% IF !jobExists(j.job.jobset j.job.job) %]disabled-job[% END %]">[% INCLUDE renderFullJobName project=j.job.get_column('project') jobset=j.job.get_column('jobset') job=j.job.job %]</span></td>
|
<td><span class="[% IF !jobExists(j.job.jobset j.job.job) %]disabled-job[% END %]">[% INCLUDE renderFullJobName project=j.job.get_column('project') jobset=j.job.get_column('jobset') job=j.job.job %]</span></td>
|
||||||
[% FOREACH b IN j.builds %]
|
[% FOREACH b IN j.builds %]
|
||||||
<td><a [% HTML.attributes(href => c.uri_for('/build' b.id)) %]>[% INCLUDE renderBuildStatusIcon size=16 build=b %]</a></td>
|
<td><a href="[% c.uri_for('/build' b.id) %]">[% INCLUDE renderBuildStatusIcon size=16 build=b %]</a></td>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
@@ -3,20 +3,20 @@
|
|||||||
[% BLOCK renderNode %]
|
[% BLOCK renderNode %]
|
||||||
<li>
|
<li>
|
||||||
[% IF done.${node.path} %]
|
[% IF done.${node.path} %]
|
||||||
<tt>[% node.name | html %]</tt> (<a [% HTML.attributes(href => "#" _ done.${node.path}) %]><em>repeated</em></a>)
|
<tt>[% node.name %]</tt> (<a href="#[% done.${node.path} %]"><em>repeated</em></a>)
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
[% done.${node.path} = global.nodeId; global.nodeId = global.nodeId + 1; %]
|
[% done.${node.path} = global.nodeId; global.nodeId = global.nodeId + 1; %]
|
||||||
[% IF node.refs.size > 0 %]
|
[% IF node.refs.size > 0 %]
|
||||||
<a href="javascript:" class="tree-toggle"></a>
|
<a href="javascript:" class="tree-toggle"></a>
|
||||||
[% END %]
|
[% END %]
|
||||||
<span [% HTML.attributes(id => done.${node.path}) %]><span class="dep-tree-line">
|
<span id="[% done.${node.path} %]"><span class="dep-tree-line">
|
||||||
[% IF node.buildStep %]
|
[% IF node.buildStep %]
|
||||||
<a [% HTML.attributes(href => c.uri_for('/build' node.buildStep.get_column('build'))) %]><tt>[% node.name %]</tt></a> [%
|
<a href="[% c.uri_for('/build' node.buildStep.get_column('build')) %]"><tt>[% node.name %]</tt></a> [%
|
||||||
IF buildStepLogExists(node.buildStep);
|
IF buildStepLogExists(node.buildStep);
|
||||||
INCLUDE renderLogLinks url=c.uri_for('/build' node.buildStep.get_column('build') 'nixlog' node.buildStep.stepnr);
|
INCLUDE renderLogLinks url=c.uri_for('/build' node.buildStep.get_column('build') 'nixlog' node.buildStep.stepnr);
|
||||||
END %]
|
END %]
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<tt>[% node.name | html %]</tt> (<em>no info</em>)
|
<tt>[% node.name %]</tt> (<em>no info</em>)
|
||||||
[% END %]
|
[% END %]
|
||||||
</span></span>
|
</span></span>
|
||||||
[% IF isRoot %]
|
[% IF isRoot %]
|
||||||
|
@@ -7,17 +7,17 @@
|
|||||||
[% USE format %]
|
[% USE format %]
|
||||||
|
|
||||||
[% BLOCK renderJobsetInput %]
|
[% BLOCK renderJobsetInput %]
|
||||||
<tr class="input [% extraClass %]" [% IF id %][% HTML.attributes(id => id) %][% END %]>
|
<tr class="input [% extraClass %]" [% IF id %]id="[% id %]"[% END %]>
|
||||||
<td>
|
<td>
|
||||||
<button type="button" class="btn btn-warning" onclick='$(this).parents(".input").remove()'><i class="fas fa-trash"></i></button>
|
<button type="button" class="btn btn-warning" onclick='$(this).parents(".input").remove()'><i class="fas fa-trash"></i></button>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<input type="text" [% HTML.attributes(id => baseName _ "-name", name => baseName _ "-name", value => input.name) %] />
|
<input type="text" id="[% baseName %]-name" name="[% baseName %]-name" [% HTML.attributes(value => input.name) %]/>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
[% INCLUDE renderSelection curValue=input.type param="$baseName-type" options=inputTypes edit=1 %]
|
[% INCLUDE renderSelection curValue=input.type param="$baseName-type" options=inputTypes edit=1 %]
|
||||||
</td>
|
</td>
|
||||||
<td [% HTML.attributes(id => baseName) %]>
|
<td id="[% baseName %]">
|
||||||
[% IF createFromEval %]
|
[% IF createFromEval %]
|
||||||
[% value = (input.uri or input.value); IF input.revision; value = value _ " " _ input.revision; END;
|
[% value = (input.uri or input.value); IF input.revision; value = value _ " " _ input.revision; END;
|
||||||
warn = input.altnr != 0;
|
warn = input.altnr != 0;
|
||||||
@@ -36,7 +36,7 @@
|
|||||||
<input style="width: 95%" type="text" [% HTML.attributes(value => value, id => "$baseName-value", name => "$baseName-value") %]/>
|
<input style="width: 95%" type="text" [% HTML.attributes(value => value, id => "$baseName-value", name => "$baseName-value") %]/>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<input type="checkbox" [% HTML.attributes(id => "$baseName-emailresponsible", name => "$baseName-emailresponsible") %] [% IF input.emailresponsible; 'checked="checked"'; END %]/>
|
<input type="checkbox" id="[% baseName %]-emailresponsible" name="[% baseName %]-emailresponsible" [% IF input.emailresponsible; 'checked="checked"'; END %]/>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -149,7 +149,7 @@
|
|||||||
<label class="col-sm-3" for="editjobsetschedulingshares">
|
<label class="col-sm-3" for="editjobsetschedulingshares">
|
||||||
Scheduling shares
|
Scheduling shares
|
||||||
[% IF totalShares %]
|
[% IF totalShares %]
|
||||||
<small class="form-text text-muted">([% f = format("%.2f"); f(jobset.schedulingshares / totalShares * 100) %]% out of [% HTML.escape(totalShares) %] shares)</small>
|
<small class="form-text text-muted">([% f = format("%.2f"); f(jobset.schedulingshares / totalShares * 100) %]% out of [% totalShares %] shares)</small>
|
||||||
[% END %]
|
[% END %]
|
||||||
</label>
|
</label>
|
||||||
<div class="col-sm-9">
|
<div class="col-sm-9">
|
||||||
@@ -195,7 +195,7 @@
|
|||||||
|
|
||||||
[% INCLUDE renderJobsetInputs %]
|
[% INCLUDE renderJobsetInputs %]
|
||||||
|
|
||||||
<button id="submit-jobset" type="submit" class="btn btn-primary"><i class="fas fa-check"></i> [% IF !edit %]Create jobset[% ELSE %]Apply changes[% END %]</button>
|
<button id="submit-jobset" type="submit" class="btn btn-primary"><i class="fas fa-check"></i> [%IF !edit %]Create jobset[% ELSE %]Apply changes[% END %]</button>
|
||||||
|
|
||||||
<table style="display: none">
|
<table style="display: none">
|
||||||
[% INCLUDE renderJobsetInput input="" extraClass="template" id="input-template" baseName="input-template" %]
|
[% INCLUDE renderJobsetInput input="" extraClass="template" id="input-template" baseName="input-template" %]
|
||||||
|
@@ -86,7 +86,7 @@
|
|||||||
|
|
||||||
<button id="submit-project" type="submit" class="btn btn-primary">
|
<button id="submit-project" type="submit" class="btn btn-primary">
|
||||||
<i class="fas fa-check"></i>
|
<i class="fas fa-check"></i>
|
||||||
[% IF create %]Create project[% ELSE %]Apply changes[% END %]
|
[%IF create %]Create project[% ELSE %]Apply changes[% END %]
|
||||||
</button>
|
</button>
|
||||||
|
|
||||||
</form>
|
</form>
|
||||||
|
@@ -1,26 +0,0 @@
|
|||||||
[% PROCESS common.tt %]
|
|
||||||
<!DOCTYPE html>
|
|
||||||
|
|
||||||
<html lang="en">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=Edge" />
|
|
||||||
[% INCLUDE style.tt %]
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<div class="tab-content tab-pane">
|
|
||||||
<div id="tabs-errors" class="">
|
|
||||||
[% IF eval %]
|
|
||||||
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(eval.evaluationerror.errortime || eval.timestamp) %].</p>
|
|
||||||
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(eval.evaluationerror.errormsg) %]</pre></div></div>
|
|
||||||
[% ELSIF jobset %]
|
|
||||||
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(jobset.errortime || jobset.lastcheckedtime) %].</p>
|
|
||||||
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(jobset.fetcherrormsg || jobset.errormsg) %]</pre></div></div>
|
|
||||||
[% END %]
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
@@ -10,7 +10,7 @@
|
|||||||
[% PROCESS common.tt %]
|
[% PROCESS common.tt %]
|
||||||
|
|
||||||
<p>Showing evaluations [% (page - 1) * resultsPerPage + 1 %] - [%
|
<p>Showing evaluations [% (page - 1) * resultsPerPage + 1 %] - [%
|
||||||
(page - 1) * resultsPerPage + evals.size %] out of [% HTML.escape(total) %].</p>
|
(page - 1) * resultsPerPage + evals.size %] out of [% total %].</p>
|
||||||
|
|
||||||
[% INCLUDE renderEvals %]
|
[% INCLUDE renderEvals %]
|
||||||
|
|
||||||
|
@@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
[% FOREACH metric IN metrics %]
|
[% FOREACH metric IN metrics %]
|
||||||
|
|
||||||
<h3>Metric: <a [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'metric' metric.name)) %]><tt>[% HTML.escape(metric.name) %]</tt></a></h3>
|
<h3>Metric: <a [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'metric' metric.name)) %]><tt>[%HTML.escape(metric.name)%]</tt></a></h3>
|
||||||
|
|
||||||
[% id = metricDivId(metric.name);
|
[% id = metricDivId(metric.name);
|
||||||
INCLUDE createChart dataUrl=c.uri_for('/job' project.name jobset.name job 'metric' metric.name); %]
|
INCLUDE createChart dataUrl=c.uri_for('/job' project.name jobset.name job 'metric' metric.name); %]
|
||||||
|
@@ -10,8 +10,8 @@
|
|||||||
|
|
||||||
[% IF !jobExists(jobset, job) %]
|
[% IF !jobExists(jobset, job) %]
|
||||||
<div class="alert alert-warning">This job is not a member of the <a
|
<div class="alert alert-warning">This job is not a member of the <a
|
||||||
[% HTML.attributes(href => c.uri_for('/jobset' project.name jobset.name
|
href="[%c.uri_for('/jobset' project.name jobset.name
|
||||||
'evals')) %]>latest evaluation</a> of its jobset. This means it was
|
'evals')%]">latest evaluation</a> of its jobset. This means it was
|
||||||
removed or had an evaluation error.</div>
|
removed or had an evaluation error.</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ removed or had an evaluation error.</div>
|
|||||||
its success or failure is determined entirely by the result of
|
its success or failure is determined entirely by the result of
|
||||||
building its <em>constituent jobs</em>. The table below shows
|
building its <em>constituent jobs</em>. The table below shows
|
||||||
the status of each constituent job for the [%
|
the status of each constituent job for the [%
|
||||||
HTML.escape(aggregates.keys.size) %] most recent builds of the
|
aggregates.keys.size %] most recent builds of the
|
||||||
aggregate.</div>
|
aggregate.</div>
|
||||||
|
|
||||||
[% aggs = aggregates.keys.nsort.reverse %]
|
[% aggs = aggregates.keys.nsort.reverse %]
|
||||||
@@ -58,7 +58,7 @@ removed or had an evaluation error.</div>
|
|||||||
<th class="rotate-45">
|
<th class="rotate-45">
|
||||||
[% agg_ = aggregates.$agg %]
|
[% agg_ = aggregates.$agg %]
|
||||||
<div><span class="[% agg_.build.finished == 0 ? "text-info" : (agg_.build.buildstatus == 0 ? "text-success" : "text-warning") %] override-link">
|
<div><span class="[% agg_.build.finished == 0 ? "text-info" : (agg_.build.buildstatus == 0 ? "text-success" : "text-warning") %] override-link">
|
||||||
<a [% HTML.attributes(href => c.uri_for('/build' agg)) %]>[% agg %]</a>
|
<a href="[% c.uri_for('/build' agg) %]">[% agg %]</a>
|
||||||
</span></div></th>
|
</span></div></th>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
@@ -70,7 +70,7 @@ removed or had an evaluation error.</div>
|
|||||||
[% FOREACH agg IN aggs %]
|
[% FOREACH agg IN aggs %]
|
||||||
<td>
|
<td>
|
||||||
[% r = aggregates.$agg.constituents.$j; IF r.id %]
|
[% r = aggregates.$agg.constituents.$j; IF r.id %]
|
||||||
<a [% HTML.attributes(href => c.uri_for('/build' r.id)) %]>
|
<a href="[% c.uri_for('/build' r.id) %]">
|
||||||
[% INCLUDE renderBuildStatusIcon size=16 build=r %]
|
[% INCLUDE renderBuildStatusIcon size=16 build=r %]
|
||||||
</a>
|
</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -89,8 +89,8 @@ removed or had an evaluation error.</div>
|
|||||||
|
|
||||||
<div id="tabs-links" class="tab-pane">
|
<div id="tabs-links" class="tab-pane">
|
||||||
<ul>
|
<ul>
|
||||||
<li><a [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'latest')) %]>Latest successful build</a></li>
|
<li><a href="[% c.uri_for('/job' project.name jobset.name job 'latest') %]">Latest successful build</a></li>
|
||||||
<li><a [% HTML.attributes(href => c.uri_for('/job' project.name jobset.name job 'latest-finished')) %]>Latest successful build from a finished evaluation</a></li>
|
<li><a href="[% c.uri_for('/job' project.name jobset.name job 'latest-finished') %]">Latest successful build from a finished evaluation</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@@ -14,7 +14,7 @@
|
|||||||
[% FOREACH eval IN evalIds %]
|
[% FOREACH eval IN evalIds %]
|
||||||
<th class="rotate-45">
|
<th class="rotate-45">
|
||||||
<div><span>
|
<div><span>
|
||||||
<a [% HTML.attributes(href => c.uri_for('/eval' eval)) %]>[% INCLUDE renderRelativeDate timestamp=evals.$eval.timestamp %]</a>
|
<a href="[% c.uri_for('/eval' eval) %]">[% INCLUDE renderRelativeDate timestamp=evals.$eval.timestamp %]</a>
|
||||||
</span></div></th>
|
</span></div></th>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
@@ -22,9 +22,9 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
[% FOREACH chan IN channels-%]
|
[% FOREACH chan IN channels-%]
|
||||||
<tr>
|
<tr>
|
||||||
<th><span><a [% HTML.attributes(href => c.uri_for('/channel/custom' project.name jobset.name chan)) %]>[% HTML.escape(chan) %]</a></span></th>
|
<th><span><a href="[% c.uri_for('/channel/custom' project.name jobset.name chan) %]">[% chan %]</a></span></th>
|
||||||
[% FOREACH eval IN evalIds %]
|
[% FOREACH eval IN evalIds %]
|
||||||
<td>[% r = evals.$eval.builds.$chan; IF r.id %]<a [% HTML.attributes(href => c.uri_for('/build' r.id)) %]>[% INCLUDE renderBuildStatusIcon size=16 build=r %]</a>[% END %]</td>
|
<td>[% r = evals.$eval.builds.$chan; IF r.id %]<a href="[% c.uri_for('/build' r.id) %]">[% INCLUDE renderBuildStatusIcon size=16 build=r %]</a>[% END %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
@@ -13,23 +13,25 @@
|
|||||||
<a class="dropdown-item" href="?compare=-[% 31 * 24 * 60 * 60 %]&full=[% full ? 1 : 0 %]">This jobset <strong>one month</strong> earlier</a>
|
<a class="dropdown-item" href="?compare=-[% 31 * 24 * 60 * 60 %]&full=[% full ? 1 : 0 %]">This jobset <strong>one month</strong> earlier</a>
|
||||||
[% IF project.jobsets_rs.count > 1 %]
|
[% IF project.jobsets_rs.count > 1 %]
|
||||||
<div class="dropdown-divider"></div>
|
<div class="dropdown-divider"></div>
|
||||||
[% FOREACH j IN project.jobsets.sort('name'); IF j.name != jobset.name && j.enabled == 1 %]
|
[% FOREACH j IN project.jobsets.sort('name'); IF j.name != jobset.name %]
|
||||||
<a class="dropdown-item" href="?compare=[% j.name | uri %]&full=[% full ? 1 : 0 %]">Jobset <tt>[% project.name | html %]:[% j.name | html %]</tt></a>
|
<a class="dropdown-item" href="?compare=[% j.name %]&full=[% full ? 1 : 0 %]">Jobset <tt>[% project.name %]:[% j.name %]</tt></a>
|
||||||
[% END; END %]
|
[% END; END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<p>This evaluation was performed [% IF eval.flake %]from the flake
|
<p>This evaluation was performed [% IF eval.flake %]from the flake
|
||||||
<tt>[% HTML.escape(eval.flake) %]</tt>[% END %] on [% INCLUDE renderDateTime
|
<tt>[%HTML.escape(eval.flake)%]</tt>[%END%] on [% INCLUDE renderDateTime
|
||||||
timestamp=eval.timestamp %]. Fetching the dependencies took [%
|
timestamp=eval.timestamp %]. Fetching the dependencies took [%
|
||||||
eval.checkouttime %]s and evaluation took [% HTML.escape(eval.evaltime) %]s.</p>
|
eval.checkouttime %]s and evaluation took [% eval.evaltime %]s.</p>
|
||||||
|
|
||||||
[% IF otherEval %]
|
[% IF otherEval %]
|
||||||
<p>Comparisons are relative to [% INCLUDE renderFullJobsetName
|
<p>Comparisons are relative to [% INCLUDE renderFullJobsetName
|
||||||
project=otherEval.jobset.project.name jobset=otherEval.jobset.name %] evaluation <a [%
|
project=otherEval.jobset.project.name jobset=otherEval.jobset.name %] evaluation <a href="[%
|
||||||
HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'),
|
c.uri_for(c.controller('JobsetEval').action_for('view'),
|
||||||
[otherEval.id])) %]>[% HTML.escape(otherEval.id) %]</a>.</p>
|
[otherEval.id]) %]">[% otherEval.id %]</a>.</p>
|
||||||
|
[% ELSE %]
|
||||||
|
<div class="alert alert-danger">Couldn't find an evaluation to compare to.</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
<form>
|
<form>
|
||||||
@@ -45,50 +47,50 @@ HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'),
|
|||||||
<li class="nav-item dropdown">
|
<li class="nav-item dropdown">
|
||||||
<a class="nav-link dropdown-toggle" data-toggle="dropdown" href="#">Actions</a>
|
<a class="nav-link dropdown-toggle" data-toggle="dropdown" href="#">Actions</a>
|
||||||
<div class="dropdown-menu">
|
<div class="dropdown-menu">
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('create_jobset'), [eval.id])) %]>Create a jobset from this evaluation</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('create_jobset'), [eval.id]) %]">Create a jobset from this evaluation</a>
|
||||||
[% IF totalQueued > 0 %]
|
[% IF unfinished.size > 0 %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('cancel'), [eval.id])) %]>Cancel all scheduled builds</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('cancel'), [eval.id]) %]">Cancel all scheduled builds</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF totalFailed > 0 %]
|
[% IF aborted.size > 0 || stillFail.size > 0 || nowFail.size > 0 || failed.size > 0 %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('restart_failed'), [eval.id])) %]>Restart all failed builds</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('restart_failed'), [eval.id]) %]">Restart all failed builds</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF totalAborted > 0 %]
|
[% IF aborted.size > 0 %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('restart_aborted'), [eval.id])) %]>Restart all aborted builds</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('restart_aborted'), [eval.id]) %]">Restart all aborted builds</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF totalQueued > 0 %]
|
[% IF unfinished.size > 0 %]
|
||||||
<a class="dropdown-item" [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('bump'), [eval.id])) %]>Bump builds to front of queue</a>
|
<a class="dropdown-item" href="[% c.uri_for(c.controller('JobsetEval').action_for('bump'), [eval.id]) %]">Bump builds to front of queue</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
</li>
|
</li>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
[% IF aborted.size > 0 %]
|
[% IF aborted.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-aborted" data-toggle="tab"><span class="text-warning">Aborted / Timed out Jobs ([% HTML.escape(aborted.size) %])</span></a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-aborted" data-toggle="tab"><span class="text-warning">Aborted Jobs ([% aborted.size %])</span></a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF nowFail.size > 0 %]
|
[% IF nowFail.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-now-fail" data-toggle="tab"><span class="text-warning">Newly Failing Jobs ([% HTML.escape(nowFail.size) %])</span></a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-now-fail" data-toggle="tab"><span class="text-warning">Newly Failing Jobs ([% nowFail.size %])</span></a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF nowSucceed.size > 0 %]
|
[% IF nowSucceed.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-now-succeed" data-toggle="tab"><span class="text-success">Newly Succeeding Jobs ([% HTML.escape(nowSucceed.size) %])</span></a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-now-succeed" data-toggle="tab"><span class="text-success">Newly Succeeding Jobs ([% nowSucceed.size %])</span></a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF new.size > 0 %]
|
[% IF new.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-new" data-toggle="tab">New Jobs ([% HTML.escape(new.size) %])</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-new" data-toggle="tab">New Jobs ([% new.size %])</a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF removed.size > 0 %]
|
[% IF removed.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-removed" data-toggle="tab">Removed Jobs ([% HTML.escape(removed.size) %])</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-removed" data-toggle="tab">Removed Jobs ([% removed.size %])</a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF stillFail.size > 0 %]
|
[% IF stillFail.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-still-fail" data-toggle="tab">Still Failing Jobs ([% HTML.escape(stillFail.size) %])</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-still-fail" data-toggle="tab">Still Failing Jobs ([% stillFail.size %])</a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF stillSucceed.size > 0 %]
|
[% IF stillSucceed.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-still-succeed" data-toggle="tab">Still Succeeding Jobs ([% HTML.escape(stillSucceed.size) %])</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-still-succeed" data-toggle="tab">Still Succeeding Jobs ([% stillSucceed.size %])</a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF unfinished.size > 0 %]
|
[% IF unfinished.size > 0 %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-unfinished" data-toggle="tab">Queued Jobs ([% HTML.escape(unfinished.size) %])</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-unfinished" data-toggle="tab">Queued Jobs ([% unfinished.size %])</a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-inputs" data-toggle="tab">Inputs</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-inputs" data-toggle="tab">Inputs</a></li>
|
||||||
|
|
||||||
[% IF eval.evaluationerror.has_error %]
|
[% IF eval.evaluationerror.errormsg %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-errors" data-toggle="tab"><span class="text-warning">Evaluation Errors</span></a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-errors" data-toggle="tab"><span class="text-warning">Evaluation Errors</span></a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
</ul>
|
</ul>
|
||||||
@@ -99,13 +101,20 @@ HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'),
|
|||||||
[% INCLUDE renderBuildListBody builds=builds.slice(0, (size > max ? max : size) - 1)
|
[% INCLUDE renderBuildListBody builds=builds.slice(0, (size > max ? max : size) - 1)
|
||||||
hideProjectName=1 hideJobsetName=1 busy=0 %]
|
hideProjectName=1 hideJobsetName=1 busy=0 %]
|
||||||
[% IF size > max; params = c.req.params; params.full = 1 %]
|
[% IF size > max; params = c.req.params; params.full = 1 %]
|
||||||
<tr><td class="centered" colspan="6"><a [% HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id], params) _ tabname) %]><em>([% size - max %] more builds omitted)</em></a></td></tr>
|
<tr><td class="centered" colspan="6"><a href="[% c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id], params) %][% tabname %]"><em>([% size - max %] more builds omitted)</em></a></td></tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% INCLUDE renderBuildListFooter %]
|
[% INCLUDE renderBuildListFooter %]
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
<div class="tab-content">
|
<div class="tab-content">
|
||||||
|
|
||||||
|
[% IF eval.evaluationerror.errormsg %]
|
||||||
|
<div id="tabs-errors" class="tab-pane">
|
||||||
|
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(eval.evaluationerror.errortime || eval.timestamp) %].</p>
|
||||||
|
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(eval.evaluationerror.errormsg) %]</pre></div></div>
|
||||||
|
</div>
|
||||||
|
[% END %]
|
||||||
|
|
||||||
<div id="tabs-aborted" class="tab-pane">
|
<div id="tabs-aborted" class="tab-pane">
|
||||||
[% INCLUDE renderSome builds=aborted tabname="#tabs-aborted" %]
|
[% INCLUDE renderSome builds=aborted tabname="#tabs-aborted" %]
|
||||||
</div>
|
</div>
|
||||||
@@ -132,11 +141,11 @@ HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'),
|
|||||||
[% FOREACH j IN removed.slice(0,(size > max ? max : size) - 1) %]
|
[% FOREACH j IN removed.slice(0,(size > max ? max : size) - 1) %]
|
||||||
<tr>
|
<tr>
|
||||||
<td>[% INCLUDE renderJobName project=project.name jobset=jobset.name job=j.job %]</td>
|
<td>[% INCLUDE renderJobName project=project.name jobset=jobset.name job=j.job %]</td>
|
||||||
<td><tt>[% j.system | html %]</tt></td>
|
<td><tt>[% j.system %]</tt></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF size > max; params = c.req.params; params.full = 1 %]
|
[% IF size > max; params = c.req.params; params.full = 1 %]
|
||||||
<tr><td class="centered" colspan="2"><a [% HTML.attributes(c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id], params) _ "#tabs-removed") %]><em>([% size - max %] more jobs omitted)</em></a></td></tr>
|
<tr><td class="centered" colspan="2"><a href="[% c.uri_for(c.controller('JobsetEval').action_for('view'), [eval.id], params) %]#tabs-removed"><em>([% size - max %] more jobs omitted)</em></a></td></tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
@@ -163,9 +172,10 @@ HTML.attributes(href => c.uri_for(c.controller('JobsetEval').action_for('view'),
|
|||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
[% IF eval.evaluationerror.has_error %]
|
[% IF eval.evaluationerror.errormsg %]
|
||||||
<div id="tabs-errors" class="tab-pane">
|
<div id="tabs-errors" class="tab-pane">
|
||||||
<iframe src="[% c.uri_for(c.controller('JobsetEval').action_for('errors'), [eval.id], params) %]" loading="lazy" frameBorder="0" width="100%"></iframe>
|
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(eval.evaluationerror.errortime || eval.timestamp) %].</p>
|
||||||
|
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(eval.evaluationerror.errormsg) %]</pre></div></div>
|
||||||
</div>
|
</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
</div>
|
</div>
|
||||||
|
@@ -41,7 +41,7 @@
|
|||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
|
|
||||||
[% IF nrJobs > jobs.size %]
|
[% IF nrJobs > jobs.size %]
|
||||||
<div class="alert alert-info">Showing the first [% HTML.escape(jobs.size) %] jobs. <a href="javascript:setFilter('filter=%')">Show all [% HTML.escape(nrJobs) %] jobs...</a></div>
|
<div class="alert alert-info">Showing the first [% jobs.size %] jobs. <a href="javascript:setFilter('filter=%')">Show all [% nrJobs %] jobs...</a></div>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
[% evalIds = evals.keys.nsort.reverse %]
|
[% evalIds = evals.keys.nsort.reverse %]
|
||||||
@@ -52,7 +52,7 @@
|
|||||||
[% FOREACH eval IN evalIds %]
|
[% FOREACH eval IN evalIds %]
|
||||||
<th class="rotate-45">
|
<th class="rotate-45">
|
||||||
<div><span>
|
<div><span>
|
||||||
<a [% HTML.attributes(href => c.uri_for('/eval' eval)) %]>[% INCLUDE renderRelativeDate timestamp=evals.$eval.timestamp %]</a>
|
<a href="[% c.uri_for('/eval' eval) %]">[% INCLUDE renderRelativeDate timestamp=evals.$eval.timestamp %]</a>
|
||||||
</span></div></th>
|
</span></div></th>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
@@ -62,7 +62,7 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<th><span [% IF inactiveJobs.$j %]class="muted override-link"[% END %]>[% INCLUDE renderJobName project=project.name jobset=jobset.name job=j %]</span></th>
|
<th><span [% IF inactiveJobs.$j %]class="muted override-link"[% END %]>[% INCLUDE renderJobName project=project.name jobset=jobset.name job=j %]</span></th>
|
||||||
[% FOREACH eval IN evalIds %]
|
[% FOREACH eval IN evalIds %]
|
||||||
<td>[% r = evals.$eval.builds.$j; IF r.id %]<a [% HTML.attributes(href => c.uri_for('/build' r.id)) %]>[% INCLUDE renderBuildStatusIcon size=16 build=r %]</a>[% END %]</td>
|
<td>[% r = evals.$eval.builds.$j; IF r.id %]<a href="[% c.uri_for('/build' r.id) %]">[% INCLUDE renderBuildStatusIcon size=16 build=r %]</a>[% END %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
@@ -6,14 +6,14 @@
|
|||||||
|
|
||||||
|
|
||||||
[% BLOCK renderJobsetInput %]
|
[% BLOCK renderJobsetInput %]
|
||||||
<tr class="input [% extraClass %]" [% IF id %][% HTML.attributes(id => id) %][% END %]>
|
<tr class="input [% extraClass %]" [% IF id %]id="[% id %]"[% END %]>
|
||||||
<td>
|
<td>
|
||||||
<tt>[% HTML.escape(input.name) %]</tt>
|
<tt>[% HTML.escape(input.name) %]</tt>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
[% INCLUDE renderSelection curValue=input.type param="$baseName-type" options=inputTypes %]
|
[% INCLUDE renderSelection curValue=input.type param="$baseName-type" options=inputTypes %]
|
||||||
</td>
|
</td>
|
||||||
<td class="inputalts" [% HTML.attributes(id => baseName) %]>
|
<td class="inputalts" id="[% baseName %]">
|
||||||
[% FOREACH alt IN input.search_related('jobsetinputalts', {}, { order_by => 'altnr' }) %]
|
[% FOREACH alt IN input.search_related('jobsetinputalts', {}, { order_by => 'altnr' }) %]
|
||||||
<tt class="inputalt">
|
<tt class="inputalt">
|
||||||
[% IF input.type == "string" %]
|
[% IF input.type == "string" %]
|
||||||
@@ -61,7 +61,7 @@
|
|||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
<li class="nav-item"><a class="nav-link active" href="#tabs-evaluations" data-toggle="tab">Evaluations</a></li>
|
<li class="nav-item"><a class="nav-link active" href="#tabs-evaluations" data-toggle="tab">Evaluations</a></li>
|
||||||
[% IF jobset.has_error || jobset.fetcherrormsg %]
|
[% IF jobset.errormsg || jobset.fetcherrormsg %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-errors" data-toggle="tab"><span class="text-warning">Evaluation Errors</span></a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-errors" data-toggle="tab"><span class="text-warning">Evaluation Errors</span></a></li>
|
||||||
[% END %]
|
[% END %]
|
||||||
<li class="nav-item"><a class="nav-link" href="#tabs-jobs" data-toggle="tab">Jobs</a></li>
|
<li class="nav-item"><a class="nav-link" href="#tabs-jobs" data-toggle="tab">Jobs</a></li>
|
||||||
@@ -79,7 +79,7 @@
|
|||||||
<th>Last checked:</th>
|
<th>Last checked:</th>
|
||||||
<td>
|
<td>
|
||||||
[% IF jobset.lastcheckedtime %]
|
[% IF jobset.lastcheckedtime %]
|
||||||
[% INCLUDE renderDateTime timestamp = jobset.lastcheckedtime %], [% IF jobset.has_error || jobset.fetcherrormsg %]<em class="text-warning">with errors!</em>[% ELSE %]<em>no errors</em>[% END %]
|
[% INCLUDE renderDateTime timestamp = jobset.lastcheckedtime %], [% IF jobset.errormsg || jobset.fetcherrormsg %]<em class="text-warning">with errors!</em>[% ELSE %]<em>no errors</em>[% END %]
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<em>never</em>
|
<em>never</em>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -117,9 +117,10 @@
|
|||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
[% IF jobset.has_error || jobset.fetcherrormsg %]
|
[% IF jobset.errormsg || jobset.fetcherrormsg %]
|
||||||
<div id="tabs-errors" class="tab-pane">
|
<div id="tabs-errors" class="tab-pane">
|
||||||
<iframe src="[% c.uri_for('/jobset' project.name jobset.name "errors") %]" loading="lazy" frameBorder="0" width="100%"></iframe>
|
<p>Errors occurred at [% INCLUDE renderDateTime timestamp=(jobset.errortime || jobset.lastcheckedtime) %].</p>
|
||||||
|
<div class="card bg-light"><div class="card-body"><pre>[% HTML.escape(jobset.fetcherrormsg || jobset.errormsg) %]</pre></div></div>
|
||||||
</div>
|
</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
@@ -153,11 +154,11 @@
|
|||||||
[% END %]
|
[% END %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Check interval:</th>
|
<th>Check interval:</th>
|
||||||
<td>[% HTML.escape(jobset.checkinterval) || "<em>disabled</em>" %]</td>
|
<td>[% jobset.checkinterval || "<em>disabled</em>" %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Scheduling shares:</th>
|
<th>Scheduling shares:</th>
|
||||||
<td>[% HTML.escape(jobset.schedulingshares) %] [% IF totalShares %] ([% f = format("%.2f"); f(jobset.schedulingshares / totalShares * 100) %]% out of [% HTML.escape(totalShares) %] shares)[% END %]</td>
|
<td>[% jobset.schedulingshares %] [% IF totalShares %] ([% f = format("%.2f"); f(jobset.schedulingshares / totalShares * 100) %]% out of [% totalShares %] shares)[% END %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Enable Dynamic RunCommand Hooks:</th>
|
<th>Enable Dynamic RunCommand Hooks:</th>
|
||||||
@@ -175,7 +176,7 @@
|
|||||||
[% END %]
|
[% END %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Number of evaluations to keep:</th>
|
<th>Number of evaluations to keep:</th>
|
||||||
<td>[% HTML.escape(jobset.keepnr) %]</td>
|
<td>[% jobset.keepnr %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
@@ -188,7 +189,7 @@
|
|||||||
|
|
||||||
<div id="tabs-links" class="tab-pane">
|
<div id="tabs-links" class="tab-pane">
|
||||||
<ul>
|
<ul>
|
||||||
<li><a [% HTML.attributes(href => c.uri_for(c.controller('Jobset').action_for('latest_eval'), c.req.captures)) %]>Latest finished evaluation</a></li>
|
<li><a href="[% c.uri_for(c.controller('Jobset').action_for('latest_eval'), c.req.captures) %]">Latest finished evaluation</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@@ -10,7 +10,31 @@
|
|||||||
|
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=Edge" />
|
<meta http-equiv="X-UA-Compatible" content="IE=Edge" />
|
||||||
[% INCLUDE style.tt %]
|
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/js/jquery/jquery-3.4.1.min.js") %]"></script>
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/js/jquery/jquery-ui-1.10.4.min.js") %]"></script>
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/js/moment/moment-2.24.0.min.js") %]"></script>
|
||||||
|
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
|
||||||
|
<link href="[% c.uri_for("/static/fontawesome/css/all.css") %]" rel="stylesheet" />
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/js/popper.min.js") %]"></script>
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/bootstrap/js/bootstrap.min.js") %]"></script>
|
||||||
|
<link href="[% c.uri_for("/static/bootstrap/css/bootstrap.min.css") %]" rel="stylesheet" />
|
||||||
|
|
||||||
|
<!-- hydra.css may need to be moved to before boostrap to make the @media rule work. -->
|
||||||
|
<link rel="stylesheet" href="[% c.uri_for("/static/css/hydra.css") %]" type="text/css" />
|
||||||
|
<link rel="stylesheet" href="[% c.uri_for("/static/css/rotated-th.css") %]" type="text/css" />
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.popover { max-width: 40%; }
|
||||||
|
</style>
|
||||||
|
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/js/bootbox.min.js") %]"></script>
|
||||||
|
|
||||||
|
<link rel="stylesheet" href="[% c.uri_for("/static/css/tree.css") %]" type="text/css" />
|
||||||
|
|
||||||
|
<script type="text/javascript" src="[% c.uri_for("/static/js/common.js") %]"></script>
|
||||||
|
|
||||||
[% IF c.config.enable_google_login %]
|
[% IF c.config.enable_google_login %]
|
||||||
<meta name="google-signin-client_id" content="[% c.config.google_client_id %]">
|
<meta name="google-signin-client_id" content="[% c.config.google_client_id %]">
|
||||||
@@ -24,7 +48,7 @@
|
|||||||
|
|
||||||
<nav class="navbar navbar-expand-md navbar-light bg-light">
|
<nav class="navbar navbar-expand-md navbar-light bg-light">
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<a class="navbar-brand" [% HTML.attributes(href => c.uri_for(c.controller('Root').action_for('index'))) %]>
|
<a class="navbar-brand" href="[% c.uri_for(c.controller('Root').action_for('index')) %]">
|
||||||
[% IF logo == "" %]
|
[% IF logo == "" %]
|
||||||
Hydra
|
Hydra
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
|
@@ -11,14 +11,13 @@
|
|||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
is
|
is
|
||||||
[% END %]
|
[% END %]
|
||||||
the build log (<a [% HTML.attributes(href => step ? c.uri_for('/build' build.id 'nixlog' step.stepnr, 'raw')
|
the build log of derivation <tt>[% IF step; step.drvpath; ELSE; build.drvpath; END %]</tt>.
|
||||||
: c.uri_for('/build' build.id 'log', 'raw')) %]>raw</a>) of derivation <tt>[% IF step; step.drvpath; ELSE; build.drvpath; END %]</tt>.
|
|
||||||
[% IF step && step.machine %]
|
[% IF step && step.machine %]
|
||||||
It was built on <tt>[% step.machine | html %]</tt>.
|
It was built on <tt>[% step.machine %]</tt>.
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF tail %]
|
[% IF tail %]
|
||||||
The <a [% HTML.attributes(href => step ? c.uri_for('/build' build.id 'nixlog' step.stepnr)
|
The <a href="[% step ? c.uri_for('/build' build.id 'nixlog' step.stepnr)
|
||||||
: c.uri_for('/build' build.id 'log')) %]>full log</a> is also available.
|
: c.uri_for('/build' build.id 'log') %]">full log</a> is also available.
|
||||||
[% END %]
|
[% END %]
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@@ -37,7 +36,7 @@
|
|||||||
[% IF tail %]
|
[% IF tail %]
|
||||||
/* The server may give us a full log (e.g. if the log is in
|
/* The server may give us a full log (e.g. if the log is in
|
||||||
S3). So extract the last lines. */
|
S3). So extract the last lines. */
|
||||||
log_data = log_data.split("\n").slice(-[% HTML.escape(tail) %]).join("\n");
|
log_data = log_data.split("\n").slice(-[%tail%]).join("\n");
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
$("#contents").text(log_data);
|
$("#contents").text(log_data);
|
||||||
|
@@ -6,10 +6,10 @@
|
|||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Job</th>
|
<th>Job</th>
|
||||||
|
<th>System</th>
|
||||||
<th>Build</th>
|
<th>Build</th>
|
||||||
<th>Step</th>
|
<th>Step</th>
|
||||||
<th>What</th>
|
<th>What</th>
|
||||||
<th>Status</th>
|
|
||||||
<th>Since</th>
|
<th>Since</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
@@ -17,48 +17,12 @@
|
|||||||
[% name = m.key ? stripSSHUser(m.key) : "localhost" %]
|
[% name = m.key ? stripSSHUser(m.key) : "localhost" %]
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th colspan="7">
|
<th colspan="6">
|
||||||
<tt [% IF m.value.disabled %]style="text-decoration: line-through;"[% END %]>[% INCLUDE renderMachineName machine=m.key %]</tt>
|
<tt [% IF m.value.disabled %]style="text-decoration: line-through;"[% END %]>[% INCLUDE renderMachineName machine=m.key %]</tt>
|
||||||
[% IF m.value.primarySystemType %]
|
[% IF m.value.systemTypes %]
|
||||||
<span class="muted" style="font-weight: normal;">
|
<span class="muted" style="font-weight: normal;">
|
||||||
(<tt>[% m.value.primarySystemType | html %]</tt>)
|
([% comma=0; FOREACH system IN m.value.systemTypes %][% IF comma; %], [% ELSE; comma = 1; END %]<tt>[% system %]</tt>[% END %])
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
[% WRAPPER makePopover title="Details" classes="btn-secondary btn-sm" %]
|
|
||||||
<ul class="list-unstyled mb-0">
|
|
||||||
<li><b>System types: </b>[% comma=0; FOREACH system IN m.value.systemTypes %][% IF comma; %], [% ELSE; comma = 1; END %]<tt>[% system | html%]</tt>[% END %]</li>
|
|
||||||
<li><b>Supported Features: </b>[% comma=0; FOREACH feat IN m.value.supportedFeatures %][% IF comma; %], [% ELSE; comma = 1; END %]<tt>[% feat| html %]</tt>[% END %]</li>
|
|
||||||
<li><b>Mandatory Features: </b>[% comma=0; FOREACH feat IN m.value.mandatoryFeatures %][% IF comma; %], [% ELSE; comma = 1; END %]<tt>[% feat| html %]</tt>[% END %]</li>
|
|
||||||
<li><b>Capacity: </b>[% INCLUDE renderYesNo value=m.value.hasCapacity %] <b>Static: </b>[% INCLUDE renderYesNo value=m.value.hasStaticCapacity %] <b>Dynamic: </b>[% INCLUDE renderYesNo value=m.value.hasDynamicCapacity %]</li>
|
|
||||||
<li><b>Scheduling Score: </b>[% HTML.escape(m.value.score) %]</li>
|
|
||||||
<li><b>Load: </b><tt>[% pretty_load(m.value.stats.load1) | html %]</tt> <tt>[% pretty_load(m.value.stats.load5) | html %]</tt> <tt>[% pretty_load(m.value.stats.load15) | html %]</tt></li>
|
|
||||||
<li><b>Memory: </b><tt>[% human_bytes(m.value.stats.memUsage) | html %]</tt> of <tt>[% human_bytes(m.value.memTotal) | html %]</tt> used (<tt>[% human_bytes(m.value.memTotal - m.value.stats.memUsage) | html %]</tt> free)</li>
|
|
||||||
[% pressure = m.value.stats.pressure %]
|
|
||||||
[% MACRO render_pressure(title, pressure) BLOCK %]
|
|
||||||
[% IF pressure %]
|
|
||||||
<tr><td><b>[% HTML.escape(title) %]:</b></td><td><tt>[% pretty_percent(pressure.avg10) %]%</tt></td><td><td><tt>[% pretty_percent(pressure.avg60) %]%</tt></td><td><td><tt>[% pretty_percent(pressure.avg300) %]%</tt></td><td>
|
|
||||||
[% END %]
|
|
||||||
[% END %]
|
|
||||||
[% IF pressure %]
|
|
||||||
<li><b>Pressure: </b>
|
|
||||||
<table class="pressureTable">
|
|
||||||
[% render_pressure('Some CPU', pressure.cpuSome) %]
|
|
||||||
[% render_pressure('Some IO', pressure.ioSome) %]
|
|
||||||
[% render_pressure('Full IO', pressure.ioFull) %]
|
|
||||||
[% render_pressure('Full IRQ', pressure.irqFull) %]
|
|
||||||
[% render_pressure('Some Memory', pressure.memSome) %]
|
|
||||||
[% render_pressure('Full Memory', pressure.memFull) %]
|
|
||||||
</table>
|
|
||||||
</li>
|
|
||||||
[% END %]
|
|
||||||
</ul>
|
|
||||||
[% END %]
|
|
||||||
[% ELSE %]
|
|
||||||
[% IF m.value.systemTypes %]
|
|
||||||
<span class="muted" style="font-weight: normal;">
|
|
||||||
([% comma=0; FOREACH system IN m.value.systemTypes %][% IF comma; %], [% ELSE; comma = 1; END %]<tt>[% system | html %]</tt>[% END %])
|
|
||||||
</span>
|
|
||||||
[% END %]
|
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF m.value.nrStepsDone %]
|
[% IF m.value.nrStepsDone %]
|
||||||
<span class="muted" style="font-weight: normal;">
|
<span class="muted" style="font-weight: normal;">
|
||||||
@@ -76,10 +40,10 @@
|
|||||||
[% idle = 0 %]
|
[% idle = 0 %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><tt>[% INCLUDE renderFullJobName project=step.project jobset=step.jobset job=step.job %]</tt></td>
|
<td><tt>[% INCLUDE renderFullJobName project=step.project jobset=step.jobset job=step.job %]</tt></td>
|
||||||
<td><a [% HTML.attributes(href => c.uri_for('/build' step.build)) %]>[% HTML.escape(step.build) %]</a></td>
|
<td><tt>[% step.system %]</tt></td>
|
||||||
<td>[% IF step.busy >= 30 %]<a class="row-link" [% HTML.attributes(href => c.uri_for('/build' step.build 'nixlog' step.stepnr 'tail')) %]>[% HTML.escape(step.stepnr) %]</a>[% ELSE; HTML.escape(step.stepnr); END %]</td>
|
<td><a href="[% c.uri_for('/build' step.build) %]">[% step.build %]</a></td>
|
||||||
<td><tt>[% step.drvpath.match('-(.*)').0 | html %]</tt></td>
|
<td>[% IF step.busy >= 30 %]<a class="row-link" href="[% c.uri_for('/build' step.build 'nixlog' step.stepnr 'tail') %]">[% step.stepnr %]</a>[% ELSE; step.stepnr; END %]</td>
|
||||||
<td>[% INCLUDE renderBusyStatus %]</td>
|
<td><tt>[% step.drvpath.match('-(.*)').0 %]</tt></td>
|
||||||
<td style="width: 10em">[% INCLUDE renderDuration duration = curTime - step.starttime %] </td>
|
<td style="width: 10em">[% INCLUDE renderDuration duration = curTime - step.starttime %] </td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
@@ -15,11 +15,11 @@
|
|||||||
[% FOREACH m IN machines %]
|
[% FOREACH m IN machines %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><input type="checkbox" name="enabled" [% IF m.value.maxJobs > 0 %]CHECKED[% END %] disabled="true" /></td>
|
<td><input type="checkbox" name="enabled" [% IF m.value.maxJobs > 0 %]CHECKED[% END %] disabled="true" /></td>
|
||||||
<td>[% HTML.escape(m.key) %]</a></td>
|
<td>[% m.key %]</a></td>
|
||||||
<td>[% HTML.escape(m.value.maxJobs) %]</td>
|
<td>[% m.value.maxJobs %]</td>
|
||||||
<td>[% HTML.escape(m.value.speedFactor) %]</td>
|
<td>[% m.value.speedFactor %]</td>
|
||||||
<td>
|
<td>
|
||||||
[% comma=0; FOREACH system IN m.value.systemTypes %][% IF comma; %], [% ELSE; comma = 1; END; HTML.escape(system); END %]
|
[% comma=0; FOREACH system IN m.value.systemTypes %][% IF comma; %], [% ELSE; comma = 1; END; system; END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
|
@@ -6,7 +6,7 @@
|
|||||||
[% FOREACH i IN newsItems %]
|
[% FOREACH i IN newsItems %]
|
||||||
<div class="news-item">
|
<div class="news-item">
|
||||||
[% contents = String.new(i.contents) %]
|
[% contents = String.new(i.contents) %]
|
||||||
<h4 class="alert-heading">[% INCLUDE renderDateTime timestamp=i.createtime %] by [% HTML.escape(i.author.fullname) %]</h4>
|
<h4 class="alert-heading">[% INCLUDE renderDateTime timestamp=i.createtime %] by [% i.author.fullname %]</h4>
|
||||||
[% contents.replace('\n','<br />\n') %]
|
[% contents.replace('\n','<br />\n') %]
|
||||||
</div>
|
</div>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -65,7 +65,7 @@
|
|||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
|
|
||||||
<div class="alert alert-warning">Hydra has no projects yet. Please
|
<div class="alert alert-warning">Hydra has no projects yet. Please
|
||||||
<a [% HTML.attributes(href => c.uri_for(c.controller('Project').action_for('create'))) %]>create a project</a>.</div>
|
<a href="[% c.uri_for(c.controller('Project').action_for('create')) %]">create a project</a>.</div>
|
||||||
|
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
|
@@ -1,17 +1,17 @@
|
|||||||
[% BLOCK renderProductLinks %]
|
[% BLOCK renderProductLinks %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>URL:</th>
|
<th>URL:</th>
|
||||||
<td><a [% HTML.attributes(href => uri) %]><tt>[% uri | html %]</tt></a></td>
|
<td><a href="[% uri %]"><tt>[% uri %]</tt></a></td>
|
||||||
</tr>
|
</tr>
|
||||||
[% IF latestRoot %]
|
[% IF latestRoot %]
|
||||||
<tr>
|
<tr>
|
||||||
<th>Links to latest:</th>
|
<th>Links to latest:</th>
|
||||||
<td>
|
<td>
|
||||||
[% uri2 = "${c.uri_for(latestRoot.join('/') 'download-by-type' product.type product.subtype)}" %]
|
[% uri2 = "${c.uri_for(latestRoot.join('/') 'download-by-type' product.type product.subtype)}" %]
|
||||||
<a [% HTML.attributes(href => uri2) %]><tt>[% uri2 | html %]</tt></a>
|
<a href="[% uri2 %]"><tt>[% uri2 %]</tt></a>
|
||||||
<br />
|
<br />
|
||||||
[% uri2 = "${c.uri_for(latestRoot.join('/') 'download' product.productnr)}" %]
|
[% uri2 = "${c.uri_for(latestRoot.join('/') 'download' product.productnr)}" %]
|
||||||
<a [% HTML.attributes(href => uri2) %]><tt>[% uri2 | html %]</tt></a>
|
<a href="[% uri2 %]"><tt>[% uri2 %]</tt></a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -49,7 +49,7 @@
|
|||||||
Error
|
Error
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<a [% HTML.attributes(href => contents) %]>
|
<a href="[% contents %]">
|
||||||
Failed build produced output. Click here to inspect the output.
|
Failed build produced output. Click here to inspect the output.
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
@@ -58,9 +58,9 @@
|
|||||||
<p>If you have Nix installed on your machine, this failed build output and
|
<p>If you have Nix installed on your machine, this failed build output and
|
||||||
all its dependencies can be unpacked into your local Nix store by doing:</p>
|
all its dependencies can be unpacked into your local Nix store by doing:</p>
|
||||||
|
|
||||||
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>curl [% HTML.escape(uri) %] | gunzip | nix-store --import</code></div></div>
|
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>curl [% uri %] | gunzip | nix-store --import</code></div></div>
|
||||||
|
|
||||||
<p>The build output can then be found in the path <tt>[% product.path | html %]</tt>.</p>
|
<p>The build output can then be found in the path <tt>[% product.path %]</tt>.</p>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -74,17 +74,17 @@
|
|||||||
Nix package
|
Nix package
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<tt>[% build.nixname | html %]</tt>
|
<tt>[% HTML.escape(build.nixname) %]</tt>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
[% WRAPPER makePopover title="Help" classes="btn-secondary btn-sm"
|
[% WRAPPER makePopover title="Help" classes="btn-secondary btn-sm"
|
||||||
%] <p>You can install this package using the Nix package
|
%] <p>You can install this package using the Nix package
|
||||||
manager from the command-line:</p>
|
manager from the command-line:</p>
|
||||||
|
|
||||||
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>nix-env -i [% HTML.escape(product.path) %][% IF binaryCachePublicUri %] --option binary-caches [% HTML.escape(binaryCachePublicUri) %][% END %]</code></div></div>
|
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>nix-env -i [%HTML.escape(product.path)%][% IF binaryCachePublicUri %] --option binary-caches [% HTML.escape(binaryCachePublicUri) %][% END %]</code></div></div>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF localStore %]
|
[% IF localStore %]
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => contents) %]>Contents</a>
|
<a class="btn btn-secondary btn-sm" href="[% contents %]">Contents</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -100,8 +100,8 @@
|
|||||||
[% filename = build.nixname _ (product.subtype ? "-" _ product.subtype : "") _ ".closure.gz" %]
|
[% filename = build.nixname _ (product.subtype ? "-" _ product.subtype : "") _ ".closure.gz" %]
|
||||||
[% uri = c.uri_for('/build' build.id 'nix' 'closure' filename ) %]
|
[% uri = c.uri_for('/build' build.id 'nix' 'closure' filename ) %]
|
||||||
|
|
||||||
<a [% HTML.attributes(href => uri) %]>
|
<a href="[% uri %]">
|
||||||
<tt>[% product.path | html %]</tt>
|
<tt>[% product.path %]</tt>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
@@ -110,16 +110,16 @@
|
|||||||
all its dependencies can be unpacked into your local Nix
|
all its dependencies can be unpacked into your local Nix
|
||||||
store by doing:</p>
|
store by doing:</p>
|
||||||
|
|
||||||
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>gunzip < [% HTML.escape(filename) %] | nix-store --import</code></div></div>
|
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>gunzip < [% filename %] | nix-store --import</code></div></div>
|
||||||
|
|
||||||
<p>or to download and unpack in one command:</p>
|
<p>or to download and unpack in one command:</p>
|
||||||
|
|
||||||
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>curl [% HTML.escape(uri) %] | gunzip | nix-store --import</code></div></div>
|
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>curl [% uri %] | gunzip | nix-store --import</code></div></div>
|
||||||
|
|
||||||
<p>The package can then be found in the path <tt>[%
|
<p>The package can then be found in the path <tt>[%
|
||||||
product.path | html %]</tt>. You’ll probably also want to do</p>
|
product.path %]</tt>. You’ll probably also want to do</p>
|
||||||
|
|
||||||
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>nix-env -i [% HTML.escape(product.path) %]</code></div></div>
|
<div class="card bg-light"><div class="card-body p-2"><code><span class="shell-prompt">$ </span>nix-env -i [% product.path %]</code></div></div>
|
||||||
|
|
||||||
<p>to actually install the package in your Nix user environment.</p>
|
<p>to actually install the package in your Nix user environment.</p>
|
||||||
|
|
||||||
@@ -174,16 +174,16 @@
|
|||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
Channel expression tarball
|
Channel expression tarball
|
||||||
[% IF product.subtype != "-" %]for <tt>[% product.subtype | html %]</tt>[% END %]
|
[% IF product.subtype != "-" %]for <tt>[% product.subtype %]</tt>[% END %]
|
||||||
</td>
|
</td>
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
<td>File</td>
|
<td>File</td>
|
||||||
<td>[% HTML.escape(product.subtype) %]</td>
|
<td>[% product.subtype %]</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
<td>
|
<td>
|
||||||
<a [% HTML.attributes(href => uri) %]>
|
<a href="[% uri %]">
|
||||||
<tt>[% product.name | html %]</tt>
|
<tt>[% product.name %]</tt>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
@@ -191,12 +191,12 @@
|
|||||||
<table class="info-table">
|
<table class="info-table">
|
||||||
[% INCLUDE renderProductLinks %]
|
[% INCLUDE renderProductLinks %]
|
||||||
<tr><th>File size:</th><td>[% product.filesize %] bytes ([% mibs(product.filesize / (1024 * 1024)) %] MiB)</td></tr>
|
<tr><th>File size:</th><td>[% product.filesize %] bytes ([% mibs(product.filesize / (1024 * 1024)) %] MiB)</td></tr>
|
||||||
<tr><th>SHA-256 hash:</th><td><tt>[% product.sha256hash | html %]</tt></td></tr>
|
<tr><th>SHA-256 hash:</th><td><tt>[% product.sha256hash %]</tt></td></tr>
|
||||||
<tr><th>Full path:</th><td><tt>[% product.path | html %]</tt></td></tr>
|
<tr><th>Full path:</th><td><tt>[% product.path %]</tt></td></tr>
|
||||||
</table>
|
</table>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF localStore %]
|
[% IF localStore %]
|
||||||
<a class="btn btn-secondary btn-sm" [% HTML.attributes(href => contents) %]>Contents</a>
|
<a class="btn btn-secondary btn-sm" href="[% contents %]">Contents</a>
|
||||||
[% END %]
|
[% END %]
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -211,15 +211,15 @@
|
|||||||
[% CASE "coverage" %]
|
[% CASE "coverage" %]
|
||||||
<td>Code coverage</td>
|
<td>Code coverage</td>
|
||||||
<td>
|
<td>
|
||||||
<a [% HTML.attributes(href => uri) %]>
|
<a href="[% uri %]">
|
||||||
Analysis report
|
Analysis report
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
[% CASE DEFAULT %]
|
[% CASE DEFAULT %]
|
||||||
<td>Report</td>
|
<td>Report</td>
|
||||||
<td>
|
<td>
|
||||||
<a [% HTML.attributes(href => uri) %]>
|
<a href="[% uri %]">
|
||||||
<tt>[% product.subtype | html %]</tt>
|
<tt>[% product.subtype %]</tt>
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
[% END %]
|
[% END %]
|
||||||
@@ -240,7 +240,7 @@
|
|||||||
Documentation
|
Documentation
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<a [% HTML.attributes(href => uri) %]>
|
<a href="[% uri %]">
|
||||||
[% SWITCH product.subtype %]
|
[% SWITCH product.subtype %]
|
||||||
[% CASE "readme" %]
|
[% CASE "readme" %]
|
||||||
Read Me!
|
Read Me!
|
||||||
@@ -249,7 +249,7 @@
|
|||||||
[% CASE "release-notes" %]
|
[% CASE "release-notes" %]
|
||||||
Release notes
|
Release notes
|
||||||
[% CASE DEFAULT %]
|
[% CASE DEFAULT %]
|
||||||
[% HTML.escape(product.subtype) %]
|
[% product.subtype %]
|
||||||
[% END %]
|
[% END %]
|
||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
@@ -266,12 +266,12 @@
|
|||||||
|
|
||||||
<tr class="product">
|
<tr class="product">
|
||||||
<td>
|
<td>
|
||||||
<tt>[% product.type | html %]</tt>
|
<tt>[% product.type %]</tt>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
[% HTML.escape(product) %]
|
[% product %]
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
</td>
|
</td>
|
||||||
|
@@ -39,7 +39,7 @@
|
|||||||
[% FOREACH s IN systems %]
|
[% FOREACH s IN systems %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><tt>[% HTML.escape(s.system) %]</tt></td>
|
<td><tt>[% HTML.escape(s.system) %]</tt></td>
|
||||||
<td>[% HTML.escape(s.c) %]</td>
|
<td>[% s.c %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tdata>
|
</tdata>
|
||||||
|
@@ -12,9 +12,9 @@
|
|||||||
is
|
is
|
||||||
[% END %]
|
[% END %]
|
||||||
the output of a RunCommand execution of the command <tt>[% HTML.escape(runcommandlog.command) %]</tt>
|
the output of a RunCommand execution of the command <tt>[% HTML.escape(runcommandlog.command) %]</tt>
|
||||||
on <a [% HTML.attributes(href => c.uri_for('/build', build.id)) %]>Build [% HTML.escape(build.id) %]</a>.
|
on <a href="[% c.uri_for('/build', build.id) %]">Build [% build.id %]</a>.
|
||||||
[% IF tail %]
|
[% IF tail %]
|
||||||
The <a [% HTML.attributes(href => c.uri_for('/build', build.id, 'runcommandlog', runcommandlog.uuid)) %]>full log</a> is also available.
|
The <a href="[% c.uri_for('/build', build.id, 'runcommandlog', runcommandlog.uuid) %]">full log</a> is also available.
|
||||||
[% END %]
|
[% END %]
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@
|
|||||||
[% IF tail %]
|
[% IF tail %]
|
||||||
/* The server may give us a full log (e.g. if the log is in
|
/* The server may give us a full log (e.g. if the log is in
|
||||||
S3). So extract the last lines. */
|
S3). So extract the last lines. */
|
||||||
log_data = log_data.split("\n").slice(-[% HTML.escape(tail) %]).join("\n");
|
log_data = log_data.split("\n").slice(-[%tail%]).join("\n");
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
$("#contents").text(log_data);
|
$("#contents").text(log_data);
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
[% IF builds.size > 0 %]
|
[% IF builds.size > 0 %]
|
||||||
|
|
||||||
<p>The following builds match your query:[% IF builds.size > limit %] <span class="text-warning">(first [% HTML.escape(limit) %] results only)</span>[% END %]</p>
|
<p>The following builds match your query:[% IF builds.size > limit %] <span class="text-warning">(first [% limit %] results only)</span>[% END %]</p>
|
||||||
|
|
||||||
[% INCLUDE renderBuildList %]
|
[% INCLUDE renderBuildList %]
|
||||||
|
|
||||||
@@ -58,7 +58,7 @@
|
|||||||
|
|
||||||
[% IF jobs.size > 0; matched = 1 %]
|
[% IF jobs.size > 0; matched = 1 %]
|
||||||
|
|
||||||
<p>The following jobs match your query:[% IF jobs.size > limit %] <span class="text-warning">(first [% HTML.escape(limit) %] results only)</span>[% END %]</p>
|
<p>The following jobs match your query:[% IF jobs.size > limit %] <span class="text-warning">(first [% limit %] results only)</span>[% END %]</p>
|
||||||
|
|
||||||
<table class="table table-striped table-condensed clickable-rows">
|
<table class="table table-striped table-condensed clickable-rows">
|
||||||
<thead>
|
<thead>
|
||||||
|
@@ -181,20 +181,12 @@ a.squiggle:hover {
|
|||||||
padding-bottom: 1px;
|
padding-bottom: 1px;
|
||||||
}
|
}
|
||||||
|
|
||||||
table.pressureTable {
|
|
||||||
margin-left: 2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table.pressureTable td {
|
|
||||||
padding: 0 .4em;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media (prefers-color-scheme: dark) {
|
@media (prefers-color-scheme: dark) {
|
||||||
/* Prevent some flickering */
|
/* Prevent some flickering */
|
||||||
html {
|
html {
|
||||||
background-color: #1f1f1f;
|
background-color: #1f1f1f;
|
||||||
}
|
}
|
||||||
body, div.popover, div.popover-body {
|
body, div.popover {
|
||||||
background-color: #1f1f1f;
|
background-color: #1f1f1f;
|
||||||
color: #fafafa !important;
|
color: #fafafa !important;
|
||||||
}
|
}
|
||||||
|
@@ -129,12 +129,6 @@ $(document).ready(function() {
|
|||||||
el.addClass("is-local");
|
el.addClass("is-local");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
[...document.getElementsByTagName("iframe")].forEach((element) => {
|
|
||||||
element.contentWindow.addEventListener("DOMContentLoaded", (_) => {
|
|
||||||
element.style.height = element.contentWindow.document.body.scrollHeight + 'px';
|
|
||||||
})
|
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
var tabsLoaded = {};
|
var tabsLoaded = {};
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
[% ELSE %]
|
[% ELSE %]
|
||||||
|
|
||||||
[% INCLUDE renderBuildList builds=resource showSchedulingInfo=1 hideResultInfo=1 busy=1 showStepName=1 %]
|
[% INCLUDE renderBuildList builds=resource showSchedulingInfo=1 hideResultInfo=1 busy=1 %]
|
||||||
|
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
[% PROCESS common.tt %]
|
[% PROCESS common.tt %]
|
||||||
|
|
||||||
<p>Showing steps [% (page - 1) * resultsPerPage + 1 %] - [% (page - 1)
|
<p>Showing steps [% (page - 1) * resultsPerPage + 1 %] - [% (page - 1)
|
||||||
* resultsPerPage + steps.size %] of about [% HTML.escape(total) %] in
|
* resultsPerPage + steps.size %] of about [% total %] in
|
||||||
order of descending finish time.</p>
|
order of descending finish time.</p>
|
||||||
|
|
||||||
<table class="table table-striped table-condensed clickable-rows">
|
<table class="table table-striped table-condensed clickable-rows">
|
||||||
@@ -24,8 +24,8 @@ order of descending finish time.</p>
|
|||||||
<td>[% INCLUDE renderBuildStatusIcon buildstatus=step.status size=16 %]</td>
|
<td>[% INCLUDE renderBuildStatusIcon buildstatus=step.status size=16 %]</td>
|
||||||
<td><tt>[% step.drvpath.match('-(.*).drv').0 %]</tt></td>
|
<td><tt>[% step.drvpath.match('-(.*).drv').0 %]</tt></td>
|
||||||
<td><tt>[% INCLUDE renderFullJobNameOfBuild build=step.build %]</tt></td>
|
<td><tt>[% INCLUDE renderFullJobNameOfBuild build=step.build %]</tt></td>
|
||||||
<td><a [% HTML.attributes(href => c.uri_for('/build' step.build.id)) %]>[% HTML.escape(step.build.id) %]</a></td>
|
<td><a href="[% c.uri_for('/build' step.build.id) %]">[% step.build.id %]</a></td>
|
||||||
<td><a class="row-link" [% HTML.attributes(href => c.uri_for('/build' step.build.id 'nixlog' step.stepnr 'tail')) %]>[% HTML.escape(step.stepnr) %]</a></td>
|
<td><a class="row-link" href="[% c.uri_for('/build' step.build.id 'nixlog' step.stepnr 'tail') %]">[% step.stepnr %]</a></td>
|
||||||
<td>[% INCLUDE renderRelativeDate timestamp=step.stoptime %]</td>
|
<td>[% INCLUDE renderRelativeDate timestamp=step.stoptime %]</td>
|
||||||
<td style="width: 10em">[% INCLUDE renderDuration duration = step.stoptime - step.starttime %] </td>
|
<td style="width: 10em">[% INCLUDE renderDuration duration = step.stoptime - step.starttime %] </td>
|
||||||
<td><tt>[% INCLUDE renderMachineName machine=step.machine %]</tt></td>
|
<td><tt>[% INCLUDE renderMachineName machine=step.machine %]</tt></td>
|
||||||
|
@@ -1,24 +0,0 @@
|
|||||||
<script type="text/javascript" src="[% c.uri_for("/static/js/jquery/jquery-3.4.1.min.js") %]"></script>
|
|
||||||
<script type="text/javascript" src="[% c.uri_for("/static/js/jquery/jquery-ui-1.10.4.min.js") %]"></script>
|
|
||||||
<script type="text/javascript" src="[% c.uri_for("/static/js/moment/moment-2.24.0.min.js") %]"></script>
|
|
||||||
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
||||||
|
|
||||||
<link [% HTML.attributes(href => c.uri_for("/static/fontawesome/css/all.css")) %] rel="stylesheet" />
|
|
||||||
<script type="text/javascript" src="[% c.uri_for("/static/js/popper.min.js") %]"></script>
|
|
||||||
<script type="text/javascript" src="[% c.uri_for("/static/bootstrap/js/bootstrap.min.js") %]"></script>
|
|
||||||
<link [% HTML.attributes(href => c.uri_for("/static/bootstrap/css/bootstrap.min.css")) %] rel="stylesheet" />
|
|
||||||
|
|
||||||
<!-- hydra.css may need to be moved to before boostrap to make the @media rule work. -->
|
|
||||||
<link rel="stylesheet" [% HTML.attributes(href => c.uri_for("/static/css/hydra.css")) %] type="text/css" />
|
|
||||||
<link rel="stylesheet" [% HTML.attributes(href => c.uri_for("/static/css/rotated-th.css")) %] type="text/css" />
|
|
||||||
|
|
||||||
<style>
|
|
||||||
.popover { max-width: 40%; }
|
|
||||||
</style>
|
|
||||||
|
|
||||||
<script type="text/javascript" src="[% c.uri_for("/static/js/bootbox.min.js") %]"></script>
|
|
||||||
|
|
||||||
<link rel="stylesheet" [% HTML.attributes(href => c.uri_for("/static/css/tree.css")) %] type="text/css" />
|
|
||||||
|
|
||||||
<script type="text/javascript" src="[% c.uri_for("/static/js/common.js") %]"></script>
|
|
@@ -1,6 +1,6 @@
|
|||||||
[% BLOCK makeSubMenu %]
|
[% BLOCK makeSubMenu %]
|
||||||
<li class="nav-item dropdown" [% IF id; HTML.attributes(id => id); END %] >
|
<li class="nav-item dropdown" [% IF id; HTML.attributes(id => id); END %] >
|
||||||
<a class="nav-link dropdown-toggle" href="#" data-toggle="dropdown">[% HTML.escape(title) %]<b class="caret"></b></a>
|
<a class="nav-link dropdown-toggle" href="#" data-toggle="dropdown">[% title %]<b class="caret"></b></a>
|
||||||
<div class="dropdown-menu[% IF align == 'right' %] dropdown-menu-right[% END %]">
|
<div class="dropdown-menu[% IF align == 'right' %] dropdown-menu-right[% END %]">
|
||||||
[% content %]
|
[% content %]
|
||||||
</div>
|
</div>
|
||||||
@@ -34,9 +34,6 @@
|
|||||||
[% INCLUDE menuItem
|
[% INCLUDE menuItem
|
||||||
uri = c.uri_for(c.controller('Root').action_for('steps'))
|
uri = c.uri_for(c.controller('Root').action_for('steps'))
|
||||||
title = "Latest steps" %]
|
title = "Latest steps" %]
|
||||||
[% INCLUDE menuItem
|
|
||||||
uri = c.uri_for(c.controller('Root').action_for('queue_runner_status'))
|
|
||||||
title = "Queue Runner Status" %]
|
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
[% IF project %]
|
[% IF project %]
|
||||||
@@ -45,7 +42,7 @@
|
|||||||
<div class="dropdown-divider"></div>
|
<div class="dropdown-divider"></div>
|
||||||
[% INCLUDE menuItem uri = c.uri_for(c.controller('Project').action_for('project'), [project.name]) title = "Overview" %]
|
[% INCLUDE menuItem uri = c.uri_for(c.controller('Project').action_for('project'), [project.name]) title = "Overview" %]
|
||||||
[% INCLUDE menuItem uri = c.uri_for(c.controller('Project').action_for('all'), [project.name]) title = "Latest builds" %]
|
[% INCLUDE menuItem uri = c.uri_for(c.controller('Project').action_for('all'), [project.name]) title = "Latest builds" %]
|
||||||
[% IF localStore %][% INCLUDE menuItem uri = c.uri_for('/project' project.name 'channel' 'latest') title = "Channel" %][% END %]
|
[% INCLUDE menuItem uri = c.uri_for('/project' project.name 'channel' 'latest') title = "Channel" %]
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
@@ -62,7 +59,7 @@
|
|||||||
[% INCLUDE menuItem
|
[% INCLUDE menuItem
|
||||||
uri = c.uri_for(c.controller('Jobset').action_for('all'), [project.name, jobset.name])
|
uri = c.uri_for(c.controller('Jobset').action_for('all'), [project.name, jobset.name])
|
||||||
title = "Latest builds" %]
|
title = "Latest builds" %]
|
||||||
[% IF localStore %][% INCLUDE menuItem uri = c.uri_for('/jobset' project.name jobset.name 'channel' 'latest') title = "Channel" %][% END %]
|
[% INCLUDE menuItem uri = c.uri_for('/jobset' project.name jobset.name 'channel' 'latest') title = "Channel" %]
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
@@ -76,7 +73,7 @@
|
|||||||
[% INCLUDE menuItem
|
[% INCLUDE menuItem
|
||||||
uri = c.uri_for(c.controller('Job').action_for('all'), [project.name, jobset.name, job])
|
uri = c.uri_for(c.controller('Job').action_for('all'), [project.name, jobset.name, job])
|
||||||
title = "Latest builds" %]
|
title = "Latest builds" %]
|
||||||
[% IF localStore %][% INCLUDE menuItem uri = c.uri_for('/job' project.name jobset.name job 'channel' 'latest') title = "Channel" %][% END %]
|
[% INCLUDE menuItem uri = c.uri_for('/job' project.name jobset.name job 'channel' 'latest') title = "Channel" %]
|
||||||
[% END %]
|
[% END %]
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
@@ -143,7 +140,7 @@
|
|||||||
<div class="dropdown-divider"></div>
|
<div class="dropdown-divider"></div>
|
||||||
[% END %]
|
[% END %]
|
||||||
[% IF c.config.github_client_id %]
|
[% IF c.config.github_client_id %]
|
||||||
<a class="dropdown-item" href="/github-redirect?after=[% c.req.path | uri %]">Sign in with GitHub</a>
|
<a class="dropdown-item" href="/github-redirect?after=[% c.req.path %]">Sign in with GitHub</a>
|
||||||
<div class="dropdown-divider"></div>
|
<div class="dropdown-divider"></div>
|
||||||
[% END %]
|
[% END %]
|
||||||
<a class="dropdown-item" href="#hydra-signin" data-toggle="modal">Sign in with a Hydra account</a>
|
<a class="dropdown-item" href="#hydra-signin" data-toggle="modal">Sign in with a Hydra account</a>
|
||||||
|
@@ -17,7 +17,7 @@
|
|||||||
disabled="disabled"
|
disabled="disabled"
|
||||||
[% END %]
|
[% END %]
|
||||||
[% HTML.attributes(id => "role-${role}", value => role) %] />
|
[% HTML.attributes(id => "role-${role}", value => role) %] />
|
||||||
<label [% HTML.attributes(for => "role-${role}") %]> [% HTML.escape(role) %]</label><br />
|
<label [% HTML.attributes(for => "role-${role}") %]> [% role %]</label><br />
|
||||||
[% END %]
|
[% END %]
|
||||||
|
|
||||||
<form>
|
<form>
|
||||||
|
@@ -14,17 +14,17 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
[% FOREACH u IN users %]
|
[% FOREACH u IN users %]
|
||||||
<tr>
|
<tr>
|
||||||
<td><a class="row-link" [% HTML.attributes(href => c.uri_for(c.controller('User').action_for('edit'), [u.username])) %]>[% HTML.escape(u.username) %]</a></td>
|
<td><a class="row-link" href="[% c.uri_for(c.controller('User').action_for('edit'), [u.username]) %]">[% HTML.escape(u.username) %]</a></td>
|
||||||
<td>[% HTML.escape(u.fullname) %]</td>
|
<td>[% HTML.escape(u.fullname) %]</td>
|
||||||
<td>[% HTML.escape(u.emailaddress) %]</td>
|
<td>[% HTML.escape(u.emailaddress) %]</td>
|
||||||
<td>[% FOREACH r IN u.userroles %]<i>[% HTML.escape(r.role) %]</i> [% END %]</td>
|
<td>[% FOREACH r IN u.userroles %]<i>[% r.role %]</i> [% END %]</td>
|
||||||
<td>[% IF u.emailonerror %]Yes[% ELSE %]No[% END %]</td>
|
<td>[% IF u.emailonerror %]Yes[% ELSE %]No[% END %]</td>
|
||||||
</tr>
|
</tr>
|
||||||
[% END %]
|
[% END %]
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<a class="btn btn-primary" [% HTML.attributes(href => c.uri_for(c.controller('Root').action_for('register'))) %]>
|
<a class="btn btn-primary" href="[% c.uri_for(c.controller('Root').action_for('register')) %]">
|
||||||
<i class="fas fa-plus"></i> Add a new user
|
<i class="fas fa-plus"></i> Add a new user
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
|
@@ -366,19 +366,12 @@ sub evalJobs {
|
|||||||
"or flake.checks " .
|
"or flake.checks " .
|
||||||
"or (throw \"flake '$flakeRef' does not provide any Hydra jobs or checks\")";
|
"or (throw \"flake '$flakeRef' does not provide any Hydra jobs or checks\")";
|
||||||
|
|
||||||
@cmd = ("nix-eval-jobs",
|
@cmd = ("nix-eval-jobs", "--expr", $nix_expr);
|
||||||
# Disable the eval cache to prevent SQLite database contention.
|
|
||||||
# Since Hydra typically evaluates each revision only once,
|
|
||||||
# parallel workers would compete for database locks without
|
|
||||||
# providing any benefit from caching.
|
|
||||||
"--option", "eval-cache", "false",
|
|
||||||
"--expr", $nix_expr);
|
|
||||||
} else {
|
} else {
|
||||||
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
|
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
|
||||||
or die "cannot find the input containing the job expression\n";
|
or die "cannot find the input containing the job expression\n";
|
||||||
|
|
||||||
@cmd = ("nix-eval-jobs",
|
@cmd = ("nix-eval-jobs",
|
||||||
"--option", "restrict-eval", "true",
|
|
||||||
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
||||||
inputsToArgs($inputInfo));
|
inputsToArgs($inputInfo));
|
||||||
}
|
}
|
||||||
@@ -388,7 +381,7 @@ sub evalJobs {
|
|||||||
push @cmd, "--meta";
|
push @cmd, "--meta";
|
||||||
push @cmd, "--constituents";
|
push @cmd, "--constituents";
|
||||||
push @cmd, "--force-recurse";
|
push @cmd, "--force-recurse";
|
||||||
push @cmd, ("--option", "allow-import-from-derivation", "false") if ($config->{allow_import_from_derivation} // "false") ne "true";
|
push @cmd, ("--option", "allow-import-from-derivation", "false") if $config->{allow_import_from_derivation} // "true" ne "true";
|
||||||
push @cmd, ("--workers", $config->{evaluator_workers} // 1);
|
push @cmd, ("--workers", $config->{evaluator_workers} // 1);
|
||||||
push @cmd, ("--max-memory-size", $config->{evaluator_max_memory_size} // 4096);
|
push @cmd, ("--max-memory-size", $config->{evaluator_max_memory_size} // 4096);
|
||||||
|
|
||||||
@@ -402,14 +395,9 @@ sub evalJobs {
|
|||||||
print STDERR "evaluator: @escaped\n";
|
print STDERR "evaluator: @escaped\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
# Unset NIX_PATH for nix-eval-jobs to ensure reproducible evaluations
|
|
||||||
my %env = %ENV;
|
|
||||||
delete $env{'NIX_PATH'};
|
|
||||||
|
|
||||||
my $evalProc = IPC::Run::start \@cmd,
|
my $evalProc = IPC::Run::start \@cmd,
|
||||||
'>', IPC::Run::new_chunker, \my $out,
|
'>', IPC::Run::new_chunker, \my $out,
|
||||||
'2>', \my $err,
|
'2>', \my $err;
|
||||||
init => sub { %ENV = %env; };
|
|
||||||
|
|
||||||
return sub {
|
return sub {
|
||||||
while (1) {
|
while (1) {
|
||||||
|
@@ -9,7 +9,6 @@ use Net::Statsd;
|
|||||||
use File::Slurper qw(read_text);
|
use File::Slurper qw(read_text);
|
||||||
use JSON::MaybeXS;
|
use JSON::MaybeXS;
|
||||||
use Getopt::Long qw(:config gnu_getopt);
|
use Getopt::Long qw(:config gnu_getopt);
|
||||||
use IPC::Run3;
|
|
||||||
|
|
||||||
STDERR->autoflush(1);
|
STDERR->autoflush(1);
|
||||||
binmode STDERR, ":encoding(utf8)";
|
binmode STDERR, ":encoding(utf8)";
|
||||||
@@ -26,11 +25,10 @@ sub gauge {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sub sendQueueRunnerStats {
|
sub sendQueueRunnerStats {
|
||||||
my ($stdout, $stderr);
|
my $s = `hydra-queue-runner --status`;
|
||||||
run3(['hydra-queue-runner', '--status'], \undef, \$stdout, \$stderr);
|
die "cannot get queue runner stats\n" if $? != 0;
|
||||||
die "cannot get queue runner stats: $stderr\n" if $? != 0;
|
|
||||||
|
|
||||||
my $json = decode_json($stdout) or die "cannot decode queue runner status";
|
my $json = decode_json($s) or die "cannot decode queue runner status";
|
||||||
|
|
||||||
gauge("hydra.queue.up", $json->{status} eq "up" ? 1 : 0);
|
gauge("hydra.queue.up", $json->{status} eq "up" ? 1 : 0);
|
||||||
|
|
||||||
|
@@ -6,7 +6,6 @@ use Catalyst::Test ();
|
|||||||
use HTTP::Request;
|
use HTTP::Request;
|
||||||
use HTTP::Request::Common;
|
use HTTP::Request::Common;
|
||||||
use JSON::MaybeXS qw(decode_json encode_json);
|
use JSON::MaybeXS qw(decode_json encode_json);
|
||||||
use Digest::SHA qw(hmac_sha256_hex);
|
|
||||||
|
|
||||||
sub is_json {
|
sub is_json {
|
||||||
my ($response, $message) = @_;
|
my ($response, $message) = @_;
|
||||||
@@ -22,13 +21,7 @@ sub is_json {
|
|||||||
return $data;
|
return $data;
|
||||||
}
|
}
|
||||||
|
|
||||||
my $ctx = test_context(hydra_config => qq|
|
my $ctx = test_context();
|
||||||
<webhooks>
|
|
||||||
<github>
|
|
||||||
secret = test
|
|
||||||
</github>
|
|
||||||
</webhooks>
|
|
||||||
|);
|
|
||||||
Catalyst::Test->import('Hydra');
|
Catalyst::Test->import('Hydra');
|
||||||
|
|
||||||
# Create a user to log in to
|
# Create a user to log in to
|
||||||
@@ -195,20 +188,16 @@ subtest "/api/push-github" => sub {
|
|||||||
my $jobsetinput = $jobset->jobsetinputs->create({name => "src", type => "git"});
|
my $jobsetinput = $jobset->jobsetinputs->create({name => "src", type => "git"});
|
||||||
$jobsetinput->jobsetinputalts->create({altnr => 0, value => "https://github.com/OWNER/LEGACY-REPO.git"});
|
$jobsetinput->jobsetinputalts->create({altnr => 0, value => "https://github.com/OWNER/LEGACY-REPO.git"});
|
||||||
|
|
||||||
my $payload = encode_json({
|
|
||||||
repository => {
|
|
||||||
owner => {
|
|
||||||
name => "OWNER",
|
|
||||||
},
|
|
||||||
name => "LEGACY-REPO",
|
|
||||||
}
|
|
||||||
});
|
|
||||||
my $signature = "sha256=" . hmac_sha256_hex($payload, 'test');
|
|
||||||
|
|
||||||
my $req = POST '/api/push-github',
|
my $req = POST '/api/push-github',
|
||||||
"Content-Type" => "application/json",
|
"Content-Type" => "application/json",
|
||||||
"X-Hub-Signature-256" => $signature,
|
"Content" => encode_json({
|
||||||
"Content" => $payload;
|
repository => {
|
||||||
|
owner => {
|
||||||
|
name => "OWNER",
|
||||||
|
},
|
||||||
|
name => "LEGACY-REPO",
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
my $response = request($req);
|
my $response = request($req);
|
||||||
ok($response->is_success, "The API enpdoint for triggering jobsets returns 200.");
|
ok($response->is_success, "The API enpdoint for triggering jobsets returns 200.");
|
||||||
@@ -225,20 +214,16 @@ subtest "/api/push-github" => sub {
|
|||||||
emailoverride => ""
|
emailoverride => ""
|
||||||
});
|
});
|
||||||
|
|
||||||
my $payload = encode_json({
|
|
||||||
repository => {
|
|
||||||
owner => {
|
|
||||||
name => "OWNER",
|
|
||||||
},
|
|
||||||
name => "FLAKE-REPO",
|
|
||||||
}
|
|
||||||
});
|
|
||||||
my $signature = "sha256=" . hmac_sha256_hex($payload, 'test');
|
|
||||||
|
|
||||||
my $req = POST '/api/push-github',
|
my $req = POST '/api/push-github',
|
||||||
"Content-Type" => "application/json",
|
"Content-Type" => "application/json",
|
||||||
"X-Hub-Signature-256" => $signature,
|
"Content" => encode_json({
|
||||||
"Content" => $payload;
|
repository => {
|
||||||
|
owner => {
|
||||||
|
name => "OWNER",
|
||||||
|
},
|
||||||
|
name => "FLAKE-REPO",
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
my $response = request($req);
|
my $response = request($req);
|
||||||
ok($response->is_success, "The API enpdoint for triggering jobsets returns 200.");
|
ok($response->is_success, "The API enpdoint for triggering jobsets returns 200.");
|
||||||
|
@@ -1,209 +0,0 @@
|
|||||||
use strict;
|
|
||||||
use warnings;
|
|
||||||
use Setup;
|
|
||||||
use Test2::V0;
|
|
||||||
use Test2::Tools::Subtest qw(subtest_streamed);
|
|
||||||
use HTTP::Request;
|
|
||||||
use HTTP::Request::Common;
|
|
||||||
use JSON::MaybeXS qw(decode_json encode_json);
|
|
||||||
use Digest::SHA qw(hmac_sha256_hex);
|
|
||||||
|
|
||||||
# Create webhook configuration
|
|
||||||
my $github_secret = "github-test-secret-12345";
|
|
||||||
my $github_secret_alt = "github-alternative-secret";
|
|
||||||
my $gitea_secret = "gitea-test-secret-abcdef";
|
|
||||||
|
|
||||||
# Create a temporary directory first to get the path
|
|
||||||
use File::Temp;
|
|
||||||
my $tmpdir = File::Temp->newdir(CLEANUP => 0);
|
|
||||||
my $tmpdir_path = $tmpdir->dirname;
|
|
||||||
|
|
||||||
# Write webhook secrets configuration before creating test context
|
|
||||||
mkdir "$tmpdir_path/hydra-data";
|
|
||||||
|
|
||||||
# Create webhook secrets configuration file
|
|
||||||
my $webhook_config = qq|
|
|
||||||
<github>
|
|
||||||
secret = $github_secret
|
|
||||||
secret = $github_secret_alt
|
|
||||||
</github>
|
|
||||||
<gitea>
|
|
||||||
secret = $gitea_secret
|
|
||||||
</gitea>
|
|
||||||
|;
|
|
||||||
write_file("$tmpdir_path/hydra-data/webhook-secrets.conf", $webhook_config);
|
|
||||||
chmod 0600, "$tmpdir_path/hydra-data/webhook-secrets.conf";
|
|
||||||
|
|
||||||
# Create test context with webhook configuration using include
|
|
||||||
my $ctx = test_context(
|
|
||||||
tmpdir => $tmpdir,
|
|
||||||
hydra_config => qq|
|
|
||||||
<webhooks>
|
|
||||||
Include $tmpdir_path/hydra-data/webhook-secrets.conf
|
|
||||||
</webhooks>
|
|
||||||
|
|
|
||||||
);
|
|
||||||
|
|
||||||
# Import Catalyst::Test after test context is set up
|
|
||||||
require Catalyst::Test;
|
|
||||||
Catalyst::Test->import('Hydra');
|
|
||||||
|
|
||||||
# Create a project and jobset for testing
|
|
||||||
my $user = $ctx->db()->resultset('Users')->create({
|
|
||||||
username => "webhook-test",
|
|
||||||
emailaddress => 'webhook-test@example.org',
|
|
||||||
password => ''
|
|
||||||
});
|
|
||||||
|
|
||||||
my $project = $ctx->db()->resultset('Projects')->create({
|
|
||||||
name => "webhook-test",
|
|
||||||
displayname => "webhook-test",
|
|
||||||
owner => $user->username
|
|
||||||
});
|
|
||||||
|
|
||||||
my $jobset = $project->jobsets->create({
|
|
||||||
name => "test-jobset",
|
|
||||||
nixexprinput => "src",
|
|
||||||
nixexprpath => "default.nix",
|
|
||||||
emailoverride => ""
|
|
||||||
});
|
|
||||||
|
|
||||||
my $jobsetinput = $jobset->jobsetinputs->create({name => "src", type => "git"});
|
|
||||||
$jobsetinput->jobsetinputalts->create({altnr => 0, value => "https://github.com/owner/repo.git"});
|
|
||||||
|
|
||||||
# Create another jobset for Gitea
|
|
||||||
my $jobset_gitea = $project->jobsets->create({
|
|
||||||
name => "test-jobset-gitea",
|
|
||||||
nixexprinput => "src",
|
|
||||||
nixexprpath => "default.nix",
|
|
||||||
emailoverride => ""
|
|
||||||
});
|
|
||||||
|
|
||||||
my $jobsetinput_gitea = $jobset_gitea->jobsetinputs->create({name => "src", type => "git"});
|
|
||||||
$jobsetinput_gitea->jobsetinputalts->create({altnr => 0, value => "https://gitea.example.com/owner/repo.git"});
|
|
||||||
|
|
||||||
subtest "GitHub webhook authentication" => sub {
|
|
||||||
my $payload = encode_json({
|
|
||||||
repository => {
|
|
||||||
owner => { name => "owner" },
|
|
||||||
name => "repo"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
subtest "without authentication - properly rejects" => sub {
|
|
||||||
my $req = POST '/api/push-github',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 401, "Unauthenticated request is rejected");
|
|
||||||
|
|
||||||
my $data = decode_json($response->content);
|
|
||||||
is($data->{error}, "Missing webhook signature", "Proper error message for missing signature");
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "with valid signature" => sub {
|
|
||||||
my $signature = "sha256=" . hmac_sha256_hex($payload, $github_secret);
|
|
||||||
|
|
||||||
my $req = POST '/api/push-github',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"X-Hub-Signature-256" => $signature,
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 200, "Valid signature is accepted");
|
|
||||||
|
|
||||||
if ($response->code != 200) {
|
|
||||||
diag("Error response: " . $response->content);
|
|
||||||
}
|
|
||||||
|
|
||||||
my $data = decode_json($response->content);
|
|
||||||
is($data->{jobsetsTriggered}, ["webhook-test:test-jobset"], "Jobset was triggered with valid authentication");
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "with invalid signature" => sub {
|
|
||||||
my $signature = "sha256=" . hmac_sha256_hex($payload, "wrong-secret");
|
|
||||||
|
|
||||||
my $req = POST '/api/push-github',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"X-Hub-Signature-256" => $signature,
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 401, "Invalid signature is rejected");
|
|
||||||
|
|
||||||
my $data = decode_json($response->content);
|
|
||||||
is($data->{error}, "Invalid webhook signature", "Proper error message for invalid signature");
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "with second valid secret (multiple secrets configured)" => sub {
|
|
||||||
my $signature = "sha256=" . hmac_sha256_hex($payload, $github_secret_alt);
|
|
||||||
|
|
||||||
my $req = POST '/api/push-github',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"X-Hub-Signature-256" => $signature,
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 200, "Second valid secret is accepted");
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "Gitea webhook authentication" => sub {
|
|
||||||
my $payload = encode_json({
|
|
||||||
repository => {
|
|
||||||
owner => { username => "owner" },
|
|
||||||
name => "repo",
|
|
||||||
clone_url => "https://gitea.example.com/owner/repo.git"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
subtest "without authentication - properly rejects" => sub {
|
|
||||||
my $req = POST '/api/push-gitea',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 401, "Unauthenticated request is rejected");
|
|
||||||
|
|
||||||
my $data = decode_json($response->content);
|
|
||||||
is($data->{error}, "Missing webhook signature", "Proper error message for missing signature");
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "with valid signature" => sub {
|
|
||||||
# Note: Gitea doesn't use sha256= prefix
|
|
||||||
my $signature = hmac_sha256_hex($payload, $gitea_secret);
|
|
||||||
|
|
||||||
my $req = POST '/api/push-gitea',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"X-Gitea-Signature" => $signature,
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 200, "Valid signature is accepted");
|
|
||||||
|
|
||||||
if ($response->code != 200) {
|
|
||||||
diag("Error response: " . $response->content);
|
|
||||||
}
|
|
||||||
|
|
||||||
my $data = decode_json($response->content);
|
|
||||||
is($data->{jobsetsTriggered}, ["webhook-test:test-jobset-gitea"], "Jobset was triggered with valid authentication");
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "with invalid signature" => sub {
|
|
||||||
my $signature = hmac_sha256_hex($payload, "wrong-secret");
|
|
||||||
|
|
||||||
my $req = POST '/api/push-gitea',
|
|
||||||
"Content-Type" => "application/json",
|
|
||||||
"X-Gitea-Signature" => $signature,
|
|
||||||
"Content" => $payload;
|
|
||||||
|
|
||||||
my $response = request($req);
|
|
||||||
is($response->code, 401, "Invalid signature is rejected");
|
|
||||||
|
|
||||||
my $data = decode_json($response->content);
|
|
||||||
is($data->{error}, "Invalid webhook signature", "Proper error message for invalid signature");
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
done_testing;
|
|
@@ -32,9 +32,4 @@ subtest "/jobset/PROJECT/JOBSET/evals" => sub {
|
|||||||
ok($jobsetevals->is_success, "The page showing the jobset evals returns 200.");
|
ok($jobsetevals->is_success, "The page showing the jobset evals returns 200.");
|
||||||
};
|
};
|
||||||
|
|
||||||
subtest "/jobset/PROJECT/JOBSET/errors" => sub {
|
|
||||||
my $jobsetevals = request(GET '/jobset/' . $project->name . '/' . $jobset->name . '/errors');
|
|
||||||
ok($jobsetevals->is_success, "The page showing the jobset eval errors returns 200.");
|
|
||||||
};
|
|
||||||
|
|
||||||
done_testing;
|
done_testing;
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user