feature/waybar #15

Merged
ahuston-0 merged 8 commits from feature/waybar into main 2025-03-15 22:43:03 -04:00
19 changed files with 522 additions and 545 deletions
Showing only changes of commit 02b8a6bc6c - Show all commits

331
.github/settings.yml vendored
View File

@ -1,204 +1,173 @@
# Have borrowed this config from nix-community/infra # Have borrowed this config from nix-community/infra
repository: repository:
# See https://developer.github.com/v3/repos/#edit for all available settings. # See https://developer.github.com/v3/repos/#edit for all available settings.
# The name of the repository. Changing this will rename the repository # The name of the repository. Changing this will rename the repository
name: nix-dotfiles name: nix-dotfiles
# A short description of the repository that will show up on GitHub
# A short description of the repository that will show up on GitHub description: RAD-Dev Infra
description: RAD-Dev Infra # A URL with more information about the repository
# homepage: "https://nix-community.org"
# A URL with more information about the repository
# homepage: "https://nix-community.org"
# A comma-separated list of topics to set on the repository
topics: "nixos"
# Either `true` to make the repository private, or `false` to make it public.
private: false
# Either `true` to enable issues for this repository, `false` to disable them.
has_issues: true
# Either `true` to enable projects for this repository, or `false` to disable them.
# If projects are disabled for the organization, passing `true` will cause an API error.
has_projects: true
# Either `true` to enable the wiki for this repository, `false` to disable it.
has_wiki: false
# Either `true` to enable downloads for this repository, `false` to disable them.
has_downloads: false
# Updates the default branch for this repository.
default_branch: main
# Either `true` to allow squash-merging pull requests, or `false` to prevent
# squash-merging.
allow_squash_merge: true
# Either `true` to allow merging pull requests with a merge commit, or `false`
# to prevent merging pull requests with merge commits.
allow_merge_commit: false
# Either `true` to allow rebase-merging pull requests, or `false` to prevent
# rebase-merging.
allow_rebase_merge: true
# Either `true` to enable automatic deletion of branches on merge, or `false` to disable
delete_branch_on_merge: true
# Either `true` to enable automated security fixes, or `false` to disable
# automated security fixes.
enable_automated_security_fixes: true
# Either `true` to enable vulnerability alerts, or `false` to disable
# vulnerability alerts.
enable_vulnerability_alerts: true
allow_auto_merge: true
# A comma-separated list of topics to set on the repository
topics: "nixos"
# Either `true` to make the repository private, or `false` to make it public.
private: false
# Either `true` to enable issues for this repository, `false` to disable them.
has_issues: true
# Either `true` to enable projects for this repository, or `false` to disable them.
# If projects are disabled for the organization, passing `true` will cause an API error.
has_projects: true
# Either `true` to enable the wiki for this repository, `false` to disable it.
has_wiki: false
# Either `true` to enable downloads for this repository, `false` to disable them.
has_downloads: false
# Updates the default branch for this repository.
default_branch: main
# Either `true` to allow squash-merging pull requests, or `false` to prevent
# squash-merging.
allow_squash_merge: true
# Either `true` to allow merging pull requests with a merge commit, or `false`
# to prevent merging pull requests with merge commits.
allow_merge_commit: false
# Either `true` to allow rebase-merging pull requests, or `false` to prevent
# rebase-merging.
allow_rebase_merge: true
# Either `true` to enable automatic deletion of branches on merge, or `false` to disable
delete_branch_on_merge: true
# Either `true` to enable automated security fixes, or `false` to disable
# automated security fixes.
enable_automated_security_fixes: true
# Either `true` to enable vulnerability alerts, or `false` to disable
# vulnerability alerts.
enable_vulnerability_alerts: true
allow_auto_merge: true
# Labels: define labels for Issues and Pull Requests # Labels: define labels for Issues and Pull Requests
# #
labels: labels:
- name: bug - name: bug
color: '#d73a4a' color: '#d73a4a'
description: Something isn't working description: Something isn't working
- name: CI/CD - name: CI/CD
# If including a `#`, make sure to wrap it with quotes! # If including a `#`, make sure to wrap it with quotes!
color: '#0e8a16' color: '#0e8a16'
description: Related to GH Actions or Hydra description: Related to GH Actions or Hydra
- name: documentation - name: documentation
color: '#0075ca' color: '#0075ca'
description: Improvements or additions to documentation description: Improvements or additions to documentation
- name: duplicate - name: duplicate
color: '#cfd3d7' color: '#cfd3d7'
description: This issue or pull request already exists description: This issue or pull request already exists
- name: enhancement - name: enhancement
color: '#a2eeef' color: '#a2eeef'
description: New feature or request description: New feature or request
- name: good first issue - name: good first issue
color: '#7057ff' color: '#7057ff'
description: Good for newcomers description: Good for newcomers
- name: help wanted - name: help wanted
color: '#008672' color: '#008672'
description: Extra attention is needed description: Extra attention is needed
- name: high priority - name: high priority
color: '#BF480A' color: '#BF480A'
description: A major vurnability was detected description: A major vurnability was detected
- name: invalid - name: invalid
color: '#e4e669' color: '#e4e669'
description: This doesn't seem right description: This doesn't seem right
- name: new user - name: new user
color: '#C302A1' color: '#C302A1'
description: A new user was added to the Flake description: A new user was added to the Flake
- name: question - name: question
color: '#d876e3' color: '#d876e3'
description: Further information is requested description: Further information is requested
- name: wontfix - name: wontfix
color: '#ffffff' color: '#ffffff'
description: This will not be worked on description: This will not be worked on
- name: dependencies - name: dependencies
color: '#cb4ed5' color: '#cb4ed5'
description: Used for PR's related to flake.lock updates description: Used for PR's related to flake.lock updates
- name: automated - name: automated
color: '#42b528' color: '#42b528'
description: PR was automatically generated (through a bot or CI/CD) description: PR was automatically generated (through a bot or CI/CD)
# Milestones: define milestones for Issues and Pull Requests # Milestones: define milestones for Issues and Pull Requests
milestones: milestones:
- title: Go-Live - title: Go-Live
description: >- description: >-
All requirements for official go-live: All requirements for official go-live: - Automated testing via Hydra/Actions - Automated deployments via Hydra/Actions - 90+% testing coverage - Functional formatter with custom rules - palatine-hill is fully stable, enough so that jeeves can be migrated
- Automated testing via Hydra/Actions # The state of the milestone. Either `open` or `closed`
- Automated deployments via Hydra/Actions state: open
- 90+% testing coverage - title: Jeeves Migration
- Functional formatter with custom rules description: >-
- palatine-hill is fully stable, enough so that jeeves can be migrated Test common use-cases for Jeeves - Quadro GPU support - Multi-GPU support - Plex support - Docker support - ZFS support
# The state of the milestone. Either `open` or `closed`
state: open
- title: Jeeves Migration
description: >-
Test common use-cases for Jeeves
- Quadro GPU support
- Multi-GPU support
- Plex support
- Docker support
- ZFS support
# Collaborators: give specific users access to this repository. # Collaborators: give specific users access to this repository.
# See https://docs.github.com/en/rest/reference/repos#add-a-repository-collaborator for available options # See https://docs.github.com/en/rest/reference/repos#add-a-repository-collaborator for available options
collaborators: collaborators:
# - username: numtide-bot # - username: numtide-bot
# Note: `permission` is only valid on organization-owned repositories. # Note: `permission` is only valid on organization-owned repositories.
# The permission to grant the collaborator. Can be one of: # The permission to grant the collaborator. Can be one of:
# * `pull` - can pull, but not push to or administer this repository. # * `pull` - can pull, but not push to or administer this repository.
# * `push` - can pull and push, but not administer this repository. # * `push` - can pull and push, but not administer this repository.
# * `admin` - can pull, push and administer this repository. # * `admin` - can pull, push and administer this repository.
# * `maintain` - Recommended for project managers who need to manage the repository without access to sensitive or destructive actions. # * `maintain` - Recommended for project managers who need to manage the repository without access to sensitive or destructive actions.
# * `triage` - Recommended for contributors who need to proactively manage issues and pull requests without write access. # * `triage` - Recommended for contributors who need to proactively manage issues and pull requests without write access.
# permission: push # permission: push
# See https://docs.github.com/en/rest/reference/teams#add-or-update-team-repository-permissions for available options # See https://docs.github.com/en/rest/reference/teams#add-or-update-team-repository-permissions for available options
teams: teams:
# - name: admin # - name: admin
# The permission to grant the team. Can be one of: # The permission to grant the team. Can be one of:
# * `pull` - can pull, but not push to or administer this repository. # * `pull` - can pull, but not push to or administer this repository.
# * `push` - can pull and push, but not administer this repository. # * `push` - can pull and push, but not administer this repository.
# * `admin` - can pull, push and administer this repository. # * `admin` - can pull, push and administer this repository.
# * `maintain` - Recommended for project managers who need to manage the repository without access to sensitive or destructive actions. # * `maintain` - Recommended for project managers who need to manage the repository without access to sensitive or destructive actions.
# * `triage` - Recommended for contributors who need to proactively manage issues and pull requests without write access. # * `triage` - Recommended for contributors who need to proactively manage issues and pull requests without write access.
# permission: admin # permission: admin
branches: branches:
# gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/nix-community/infra/branches/master/protection # gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/nix-community/infra/branches/master/protection
# not available in the api yet # not available in the api yet
# `Require merge queue`: true # `Require merge queue`: true
# `Merge method`: Rebase and merge # `Merge method`: Rebase and merge
# `Maximum pull requests to build`: 1 # `Maximum pull requests to build`: 1
# `Maximum pull requests to merge`: 1 # `Maximum pull requests to merge`: 1
# defaults: # defaults:
# `Maximum pull requests to build`: 5 # `Maximum pull requests to build`: 5
# `Minimum pull requests to merge`: 1 or 5 minutes # `Minimum pull requests to merge`: 1 or 5 minutes
# `Maximum pull requests to merge`: 5 # `Maximum pull requests to merge`: 5
# `Only merge non-failing pull requests`: true # `Only merge non-failing pull requests`: true
# `Consider check failed after`: 60 minutes # `Consider check failed after`: 60 minutes
- name: main
# https://docs.github.com/en/rest/reference/repos#update-branch-protection
# Branch Protection settings. Set to null to disable
protection:
# Required. Require at least one approving review on a pull request, before merging. Set to null to disable.
- name: main # these settings are the same as manually enabling "Require a pull request before merging" but not setting any other restrictions
# https://docs.github.com/en/rest/reference/repos#update-branch-protection required_pull_request_reviews:
# Branch Protection settings. Set to null to disable # # The number of approvals required. (1-6)
protection: required_approving_review_count: 1
# Required. Require at least one approving review on a pull request, before merging. Set to null to disable. # # Dismiss approved reviews automatically when a new commit is pushed.
dismiss_stale_reviews: true
# these settings are the same as manually enabling "Require a pull request before merging" but not setting any other restrictions # # Blocks merge until code owners have reviewed.
required_pull_request_reviews: require_code_owner_reviews: false
# # The number of approvals required. (1-6) # # Specify which users and teams can dismiss pull request reviews. Pass an empty dismissal_restrictions object to disable. User and team dismissal_restrictions are only available for organization-owned repositories. Omit this parameter for personal repositories.
required_approving_review_count: 1 # dismissal_restrictions:
# # Dismiss approved reviews automatically when a new commit is pushed. # users: []
dismiss_stale_reviews: true # teams: []
# # Blocks merge until code owners have reviewed. require_last_push_approval: false
require_code_owner_reviews: false # Required. Require status checks to pass before merging. Set to null to disable
# # Specify which users and teams can dismiss pull request reviews. Pass an empty dismissal_restrictions object to disable. User and team dismissal_restrictions are only available for organization-owned repositories. Omit this parameter for personal repositories. # required_status_checks:
# dismissal_restrictions:
# users: []
# teams: []
require_last_push_approval: false
# Required. Require status checks to pass before merging. Set to null to disable
# required_status_checks:
# Required. Require branches to be up to date before merging. # Required. Require branches to be up to date before merging.
# strict: false # strict: false
# Required. The list of status checks to require in order to merge into this branch # Required. The list of status checks to require in order to merge into this branch
# contexts: # contexts:
# - buildbot/nix-eval # - buildbot/nix-eval
# Required. Enforce all configured restrictions for administrators. Set to true to enforce required status checks for repository administrators. Set to null to disable. # Required. Enforce all configured restrictions for administrators. Set to true to enforce required status checks for repository administrators. Set to null to disable.
enforce_admins: true enforce_admins: true
# Disabled for bors to work # Disabled for bors to work
required_linear_history: true required_linear_history: true
# Required. Restrict who can push to this branch. Team and user restrictions are only available for organization-owned repositories. Set to null to disable. # Required. Restrict who can push to this branch. Team and user restrictions are only available for organization-owned repositories. Set to null to disable.
restrictions: restrictions:
apps: [] apps: []
# TODO: make a buildbot instance # TODO: make a buildbot instance
# users: ["nix-infra-bot"] # users: ["nix-infra-bot"]
teams: [] teams: []

View File

@ -1,48 +1,47 @@
name: "Check Nix flake" name: "Check Nix flake"
on: on:
push: push:
branches: ["main"] branches: ["main"]
pull_request: pull_request:
branches: ["main"] branches: ["main"]
merge_group: merge_group:
jobs: jobs:
health-check: health-check:
name: "Perform Nix flake checks" name: "Perform Nix flake checks"
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest] os: [ubuntu-latest]
steps: steps:
- uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/nix-installer-action@main
- name: Setup Attic cache - name: Setup Attic cache
uses: ryanccn/attic-action@v0 uses: ryanccn/attic-action@v0
with: with:
endpoint: ${{ secrets.ATTIC_ENDPOINT }} endpoint: ${{ secrets.ATTIC_ENDPOINT }}
cache: ${{ secrets.ATTIC_CACHE }} cache: ${{ secrets.ATTIC_CACHE }}
token: ${{ secrets.ATTIC_TOKEN }} token: ${{ secrets.ATTIC_TOKEN }}
skip-push: "true" skip-push: "true"
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- run: nix flake check --accept-flake-config - run: nix flake check --accept-flake-config
- run: nix ./utils/attic-push.bash - run: nix ./utils/attic-push.bash
build-checks: build-checks:
name: "Build nix outputs" name: "Build nix outputs"
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest] os: [ubuntu-latest]
steps: steps:
- uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/nix-installer-action@main
- name: Setup Attic cache - name: Setup Attic cache
uses: ryanccn/attic-action@v0 uses: ryanccn/attic-action@v0
with: with:
endpoint: ${{ secrets.ATTIC_ENDPOINT }} endpoint: ${{ secrets.ATTIC_ENDPOINT }}
cache: ${{ secrets.ATTIC_CACHE }} cache: ${{ secrets.ATTIC_CACHE }}
token: ${{ secrets.ATTIC_TOKEN }} token: ${{ secrets.ATTIC_TOKEN }}
skip-push: "true" skip-push: "true"
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Build all outputs - name: Build all outputs
run: nix run git+https://nayeonie.com/ahuston-0/flake-update-diff -- --build . run: nix run git+https://nayeonie.com/ahuston-0/flake-update-diff -- --build .
- name: Push to Attic - name: Push to Attic
run: nix ./utils/attic-push.bash run: nix ./utils/attic-push.bash
continue-on-error: true continue-on-error: true

View File

@ -1,116 +1,112 @@
name: "Update flakes" name: "Update flakes"
on: on:
repository_dispatch: repository_dispatch:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: "00 12 * * *" - cron: "00 12 * * *"
jobs: jobs:
update_lockfile: update_lockfile:
runs-on: ubuntu-latest runs-on: ubuntu-latest
#if: github.ref == 'refs/heads/main' # ensure workflow_dispatch only runs on main #if: github.ref == 'refs/heads/main' # ensure workflow_dispatch only runs on main
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install nix - name: Install nix
uses: https://github.com/DeterminateSystems/nix-installer-action@main uses: https://github.com/DeterminateSystems/nix-installer-action@main
- name: Setup Attic cache - name: Setup Attic cache
uses: ryanccn/attic-action@v0 uses: ryanccn/attic-action@v0
with: with:
endpoint: ${{ secrets.ATTIC_ENDPOINT }} endpoint: ${{ secrets.ATTIC_ENDPOINT }}
cache: ${{ secrets.ATTIC_CACHE }} cache: ${{ secrets.ATTIC_CACHE }}
token: ${{ secrets.ATTIC_TOKEN }} token: ${{ secrets.ATTIC_TOKEN }}
skip-push: "true" skip-push: "true"
- name: Get pre-snapshot of evaluations - name: Get pre-snapshot of evaluations
run: nix ./utils/eval-to-drv.sh pre run: nix ./utils/eval-to-drv.sh pre
- name: Update flake.lock - name: Update flake.lock
id: update id: update
run: | run: |
nix flake update 2> >(tee /dev/stderr) | awk ' nix flake update 2> >(tee /dev/stderr) | awk '
/^• Updated input/ {in_update = 1; print; next} /^• Updated input/ {in_update = 1; print; next}
in_update && !/^warning:/ {print} in_update && !/^warning:/ {print}
/^$/ {in_update = 0} /^$/ {in_update = 0}
' > update.log ' > update.log
echo "UPDATE_LOG<<EOF" >> $GITHUB_ENV echo "UPDATE_LOG<<EOF" >> $GITHUB_ENV
cat update.log >> $GITHUB_ENV cat update.log >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV echo "EOF" >> $GITHUB_ENV
rm update.log rm update.log
- name: Get post-snapshot of evaluations - name: Get post-snapshot of evaluations
run: nix ./utils/eval-to-drv.sh post run: nix ./utils/eval-to-drv.sh post
- name: Calculate diff - name: Calculate diff
run: nix ./utils/diff-evals.sh run: nix ./utils/diff-evals.sh
- name: Read file contents - name: Read file contents
id: read_file id: read_file
uses: guibranco/github-file-reader-action-v2@latest uses: guibranco/github-file-reader-action-v2@latest
with: with:
path: "post-diff" path: "post-diff"
- name: Write PR body template - name: Write PR body template
uses: https://github.com/DamianReeves/write-file-action@v1.3 uses: https://github.com/DamianReeves/write-file-action@v1.3
with: with:
path: pr_body.template path: pr_body.template
contents: | contents: |
- The following Nix Flake inputs were updated: - The following Nix Flake inputs were updated:
``` ```
${{ env.UPDATE_LOG }} ${{ env.UPDATE_LOG }}
``` ```
``` ```
${{ steps.read_file.outputs.contents }} ${{ steps.read_file.outputs.contents }}
``` ```
Auto-generated by [update.yml][1] with the help of Auto-generated by [update.yml][1] with the help of
[create-pull-request][2]. [create-pull-request][2].
[1]: https://nayeonie.com/ahuston-0/nix-dotfiles/src/branch/main/.github/workflows/flake-update.yml [1]: https://nayeonie.com/ahuston-0/nix-dotfiles/src/branch/main/.github/workflows/flake-update.yml
[2]: https://forgejo.stefka.eu/jiriks74/create-pull-request [2]: https://forgejo.stefka.eu/jiriks74/create-pull-request
- name: Generate PR body
uses: pedrolamas/handlebars-action@v2.4.0 # v2.4.0
with:
files: "pr_body.template"
output-filename: "pr_body.md"
- name: Save PR body
id: pr_body
uses: juliangruber/read-file-action@v1
with:
path: "pr_body.md"
- name: Remove temporary files
run: |
rm pr_body.template
rm pr_body.md
rm pre.json
rm post.json
rm post-diff
- name: Create Pull Request
id: create-pull-request
# uses: https://forgejo.stefka.eu/jiriks74/create-pull-request@7174d368c2e4450dea17b297819eb28ae93ee645
uses: https://nayeonie.com/ahuston-0/create-pull-request@main
with:
token: ${{ secrets.GH_TOKEN_FOR_UPDATES }}
body: ${{ steps.pr_body.outputs.content }}
author: '"github-actions[bot]" <github-actions[bot]@users.noreply.github.com>'
title: 'automated: Update `flake.lock`'
commit-message: |
automated: Update `flake.lock`
- name: Generate PR body ${{ steps.pr_body.outputs.content }}
uses: pedrolamas/handlebars-action@v2.4.0 # v2.4.0 branch: update-flake-lock
with: delete-branch: true
files: "pr_body.template" pr-labels: | # Labels to be set on the PR
output-filename: "pr_body.md" dependencies
- name: Save PR body automated
id: pr_body - name: Push to Attic
uses: juliangruber/read-file-action@v1 run: nix ./utils/attic-push.bash
with: continue-on-error: true
path: "pr_body.md" - name: Print PR number
run: |
- name: Remove temporary files echo "Pull request number is ${{ steps.create-pull-request.outputs.pull-request-number }}."
run: | echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
rm pr_body.template
rm pr_body.md
rm pre.json
rm post.json
rm post-diff
- name: Create Pull Request
id: create-pull-request
# uses: https://forgejo.stefka.eu/jiriks74/create-pull-request@7174d368c2e4450dea17b297819eb28ae93ee645
uses: https://nayeonie.com/ahuston-0/create-pull-request@main
with:
token: ${{ secrets.GH_TOKEN_FOR_UPDATES }}
body: ${{ steps.pr_body.outputs.content }}
author: '"github-actions[bot]" <github-actions[bot]@users.noreply.github.com>'
title: 'automated: Update `flake.lock`'
commit-message: |
automated: Update `flake.lock`
${{ steps.pr_body.outputs.content }}
branch: update-flake-lock
delete-branch: true
pr-labels: | # Labels to be set on the PR
dependencies
automated
- name: Push to Attic
run: nix ./utils/attic-push.bash
continue-on-error: true
- name: Print PR number
run: |
echo "Pull request number is ${{ steps.create-pull-request.outputs.pull-request-number }}."
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
permissions: permissions:
pull-requests: write pull-requests: write
contents: write contents: write

View File

@ -1,17 +1,16 @@
name: "Check flake.lock" name: "Check flake.lock"
on: on:
push: push:
branches: ["main"] branches: ["main"]
pull_request: pull_request:
branches: ["main"] branches: ["main"]
merge_group: merge_group:
jobs: jobs:
health-check: health-check:
name: "Check health of `flake.lock`" name: "Check health of `flake.lock`"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: DeterminateSystems/flake-checker-action@main - uses: DeterminateSystems/flake-checker-action@main
with: with:
fail-mode: true fail-mode: true

View File

@ -1,26 +1,25 @@
name: "Check Nix formatting" name: "Check Nix formatting"
on: on:
push: push:
branches: ["main"] branches: ["main"]
pull_request: pull_request:
branches: ["main"] branches: ["main"]
merge_group: merge_group:
jobs: jobs:
health-check: health-check:
name: "Perform Nix format checks" name: "Perform Nix format checks"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/nix-installer-action@main
- name: Setup Attic cache - name: Setup Attic cache
uses: ryanccn/attic-action@v0 uses: ryanccn/attic-action@v0
with: with:
endpoint: ${{ secrets.ATTIC_ENDPOINT }} endpoint: ${{ secrets.ATTIC_ENDPOINT }}
cache: ${{ secrets.ATTIC_CACHE }} cache: ${{ secrets.ATTIC_CACHE }}
token: ${{ secrets.ATTIC_TOKEN }} token: ${{ secrets.ATTIC_TOKEN }}
skip-push: "true" skip-push: "true"
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- run: nix fmt -- --check . - run: nix fmt -- --check .
- name: Push to Attic - name: Push to Attic
run: nix ./utils/attic-push.bash run: nix ./utils/attic-push.bash
continue-on-error: true continue-on-error: true

View File

@ -1,51 +1,46 @@
keys: keys:
# The PGP keys in keys/ # The PGP keys in keys/
- &admin_alice 5EFFB75F7C9B74EAA5C4637547940175096C1330 - &admin_alice 5EFFB75F7C9B74EAA5C4637547940175096C1330
# Generate AGE keys from SSH keys with:
# Generate AGE keys from SSH keys with: # ssh-keygen -A
# ssh-keygen -A # nix-shell -p ssh-to-age --run 'cat /etc/ssh/ssh_host_ed25519_key.pub | ssh-to-age'
# nix-shell -p ssh-to-age --run 'cat /etc/ssh/ssh_host_ed25519_key.pub | ssh-to-age' # cspell:disable
# cspell:disable - &artemision age1jd2dcpykagz20kpk2kkchte3augqncwfn6nywursx0dkfyze6feqdzxkq2
- &artemision age1jd2dcpykagz20kpk2kkchte3augqncwfn6nywursx0dkfyze6feqdzxkq2 - &artemision-home age1t29a6z6cfy8m3cnc8uva0ey833vhcppue8psyumts7mtyf0zufcqvfshuc
- &artemision-home age1t29a6z6cfy8m3cnc8uva0ey833vhcppue8psyumts7mtyf0zufcqvfshuc
#- &palatine-hill age1z8q02wdp0a2ep5uuffgfeqlfam4ztl95frhw5qhnn6knn0rrmcnqk5evej #- &palatine-hill age1z8q02wdp0a2ep5uuffgfeqlfam4ztl95frhw5qhnn6knn0rrmcnqk5evej
- &palatine-hill age1qw5k8h72k3fjg5gmlxx8q8gwlc2k6n6u08d8hdzpm2pk9r0fnfxsmw33nh - &palatine-hill age1qw5k8h72k3fjg5gmlxx8q8gwlc2k6n6u08d8hdzpm2pk9r0fnfxsmw33nh
# cspell:enable # cspell:enable
servers: &servers servers: &servers
- *palatine-hill - *palatine-hill
# add new users by executing: sops users/<user>/secrets.yaml # add new users by executing: sops users/<user>/secrets.yaml
# then have someone already in the repo run the below # then have someone already in the repo run the below
# #
# update keys by executing: sops updatekeys secrets.yaml # update keys by executing: sops updatekeys secrets.yaml
# note: add .* before \.yaml if you'd like to use the mergetool config # note: add .* before \.yaml if you'd like to use the mergetool config
creation_rules: creation_rules:
- path_regex: users/alice/secrets.*\.yaml$ - path_regex: users/alice/secrets.*\.yaml$
key_groups: key_groups:
- pgp: - pgp:
- *admin_alice - *admin_alice
age: age:
- *palatine-hill - *palatine-hill
- *artemision - *artemision
- *artemision-home - *artemision-home
- path_regex: systems/palatine-hill/secrets.*\.yaml$
- path_regex: systems/palatine-hill/secrets.*\.yaml$ key_groups:
key_groups: - pgp:
- pgp: - *admin_alice
- *admin_alice age:
age: - *palatine-hill
- *palatine-hill - path_regex: systems/artemision/secrets.*\.yaml$
key_groups:
- path_regex: systems/artemision/secrets.*\.yaml$ - pgp:
key_groups: - *admin_alice
- pgp: age:
- *admin_alice - *artemision
age: - path_regex: systems/palatine-hill/docker/wg/.*\.conf$
- *artemision key_groups:
- path_regex: systems/palatine-hill/docker/wg/.*\.conf$ - pgp:
key_groups: - *admin_alice
- pgp: age:
- *admin_alice - *palatine-hill
age:
- *palatine-hill

View File

@ -1,5 +1,7 @@
{ {
"cSpell.enableFiletypes": ["nix"], "cSpell.enableFiletypes": [
"nix"
],
"cSpell.words": [ "cSpell.words": [
"aarch", "aarch",
"abmlevel", "abmlevel",

View File

@ -40,12 +40,12 @@ and will eventually trip a check when merging to main.
| Branch Name | Use Case | | Branch Name | Use Case |
|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| |------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| main | protected branch which all machines pull from, do not try to push directly | | main | protected branch which all machines pull from, do not try to push directly |
| feature/\<item\> | \<item\> is a new feature added to the repo, for personal or common use | | feature/\<item> | \<item> is a new feature added to the repo, for personal or common use |
| fixup/\<item\> | \<item\> is a non-urgent bug, PRs merging from these branches should be merged when possible, but are not considered mission-critical | | fixup/\<item> | \<item> is a non-urgent bug, PRs merging from these branches should be merged when possible, but are not considered mission-critical |
| hotfix/\<item\> | \<item\> is a mission-critical bug, either affecting all users or a breaking change on a user's machines. These PRs should be reviewed ASAP. This is automatically subject to the [Critical Issues](#critical-issues) process | | hotfix/\<item> | \<item> is a mission-critical bug, either affecting all users or a breaking change on a user's machines. These PRs should be reviewed ASAP. This is automatically subject to the [Critical Issues](#critical-issues) process |
| urgent/\<item\> | Accepted as an alias for the above, due to dev's coming from multiple standards and the criticality of these issues | | urgent/\<item> | Accepted as an alias for the above, due to dev's coming from multiple standards and the criticality of these issues |
| exp/\<item\> | \<item\> is a non-critical experiment. This is used for shipping around potential new features or fixes to multiple branches | | exp/\<item> | \<item> is a non-critical experiment. This is used for shipping around potential new features or fixes to multiple branches |
| merge/\<item\> | \<item\> is a temporary branch and should never be merged directly to main. This is solely used for addressing merge conflicts which are too complex to be merged directly on branch | | merge/\<item> | \<item> is a temporary branch and should never be merged directly to main. This is solely used for addressing merge conflicts which are too complex to be merged directly on branch |
### Review Process ### Review Process
@ -94,11 +94,11 @@ rules.
PR has been tested on at least one machine PR has been tested on at least one machine
- Issues which bypass the quorum process must have a second reviewer tagged - Issues which bypass the quorum process must have a second reviewer tagged
- All critical issues which bypass the approval process must have an RCA issue - All critical issues which bypass the approval process must have an RCA issue
opened and the RCA logged into the `inc/` folder opened and the RCA logged into the `inc/` folder
- The second reviewer has 2 weeks to retroactively review and approve the PR - The second reviewer has 2 weeks to retroactively review and approve the PR
- If the retro does not happen in the given window, an issue shall be opened - If the retro does not happen in the given window, an issue shall be opened
to either re-review the PR or to revert and replace the fix with a to either re-review the PR or to revert and replace the fix with a
permanent solution permanent solution
- Critical issues must be tagged to `Nix Flake Features` project, and must have - Critical issues must be tagged to `Nix Flake Features` project, and must have
a priority of `High` and an estimate tagged. Start and end date are not needed a priority of `High` and an estimate tagged. Start and end date are not needed

View File

@ -1,9 +1,9 @@
#!/usr/bin/env nix #!/usr/bin/env nix
#! nix shell nixpkgs#bash nixpkgs#git --command bash #! nix shell nixpkgs#bash nixpkgs#git --command bash
set -o errexit # abort on nonzero exitstatus set -o errexit # abort on nonzero exitstatus
set -o nounset # abort on unbound variable set -o nounset # abort on unbound variable
set -o pipefail # don't hide errors within pipes set -o pipefail # don't hide errors within pipes
PROCEED="N" PROCEED="N"
@ -50,60 +50,58 @@ GITBASE="systems"
FEATUREBRANCH="feature/adding-$MACHINENAME" FEATUREBRANCH="feature/adding-$MACHINENAME"
if [ $PROCEED != "Y" ]; then if [ $PROCEED != "Y" ]; then
echo "PROCEED is not set correctly, please validate the below partitions and update the script accordingly" echo "PROCEED is not set correctly, please validate the below partitions and update the script accordingly"
lsblk -ao NAME,FSTYPE,FSSIZE,FSUSED,SIZE,MOUNTPOINT lsblk -ao NAME,FSTYPE,FSSIZE,FSUSED,SIZE,MOUNTPOINT
fi fi
if [ $CREATEPARTS = "Y" ]; then if [ $CREATEPARTS = "Y" ]; then
# Create partition table # Create partition table
sudo parted "/dev/$DRIVE" -- mklabel gpt sudo parted "/dev/$DRIVE" -- mklabel gpt
# Create boot part # Create boot part
sudo parted "/dev/$DRIVE" -- mkpart ESP fat32 1MB 1024MB sudo parted "/dev/$DRIVE" -- mkpart ESP fat32 1MB 1024MB
sudo parted "/dev/$DRIVE" -- set 1 esp on sudo parted "/dev/$DRIVE" -- set 1 esp on
sudo mkfs.fat -F 32 -n NIXBOOT "/dev/${DRIVE}1" sudo mkfs.fat -F 32 -n NIXBOOT "/dev/${DRIVE}1"
# Create luks part # Create luks part
sudo parted "/dev/$DRIVE" -- mkpart primary ext4 1024MB 100% sudo parted "/dev/$DRIVE" -- mkpart primary ext4 1024MB 100%
sudo parted "/dev/$DRIVE" -- set 2 lvm on sudo parted "/dev/$DRIVE" -- set 2 lvm on
LUKSPART="nixos-pv" LUKSPART="nixos-pv"
sudo cryptsetup luksFormat "/dev/${DRIVE}p2" sudo cryptsetup luksFormat "/dev/${DRIVE}p2"
sudo cryptsetup luksOpen "/dev/${DRIVE}p2" "$LUKSPART" sudo cryptsetup luksOpen "/dev/${DRIVE}p2" "$LUKSPART"
# Create lvm part # Create lvm part
sudo pvcreate "/dev/mapper/$LUKSPART" sudo pvcreate "/dev/mapper/$LUKSPART"
sudo pvresize "/dev/mapper/$LUKSPART" sudo pvresize "/dev/mapper/$LUKSPART"
sudo pvdisplay sudo pvdisplay
# Create volume group # Create volume group
sudo vgcreate "$VOLGROUP" "/dev/mapper/$LUKSPART" sudo vgcreate "$VOLGROUP" "/dev/mapper/$LUKSPART"
sudo vgchange -a y "$VOLGROUP" sudo vgchange -a y "$VOLGROUP"
sudo vgdisplay sudo vgdisplay
# Create swap part on LVM # Create swap part on LVM
if [ $SWAPSIZE != 0 ]; then if [ $SWAPSIZE != 0 ]; then
sudo lvcreate -L "$SWAPSIZE" "$VOLGROUP" -n swap sudo lvcreate -L "$SWAPSIZE" "$VOLGROUP" -n swap
sudo mkswap -L NIXSWAP -c "$SWAPPATH" sudo mkswap -L NIXSWAP -c "$SWAPPATH"
fi fi
# Create home part on LVM, leaving plenty of room for snapshots # Create home part on LVM, leaving plenty of room for snapshots
sudo lvcreate -l 50%FREE "$VOLGROUP" -n home sudo lvcreate -l 50%FREE "$VOLGROUP" -n home
sudo mkfs.ext4 -L NIXHOME -c "$HOMEPATH" sudo mkfs.ext4 -L NIXHOME -c "$HOMEPATH"
# Create root part on LVM, keeping in mind most data will be on /home or /nix # Create root part on LVM, keeping in mind most data will be on /home or /nix
sudo lvcreate -L 5G "$VOLGROUP" -n root sudo lvcreate -L 5G "$VOLGROUP" -n root
sudo mkfs.ext4 -L NIXROOT -c "$ROOTPATH" sudo mkfs.ext4 -L NIXROOT -c "$ROOTPATH"
# Create nix part on LVM # Create nix part on LVM
sudo lvcreate -L 100G "$VOLGROUP" -n nix-store sudo lvcreate -L 100G "$VOLGROUP" -n nix-store
sudo mkfs.ext4 -L NIXSTORE -c "$NIXSTOREPATH" sudo mkfs.ext4 -L NIXSTORE -c "$NIXSTOREPATH"
sudo lvdisplay sudo lvdisplay
lsblk -ao NAME,FSTYPE,FSSIZE,FSUSED,SIZE,MOUNTPOINT lsblk -ao NAME,FSTYPE,FSSIZE,FSUSED,SIZE,MOUNTPOINT
fi fi
# Mount partitions # Mount partitions
@ -116,7 +114,7 @@ sudo mount $BOOTPART /mnt/boot
# Enable swap if SWAPSIZE is non-zero # Enable swap if SWAPSIZE is non-zero
if [ $SWAPSIZE != 0 ]; then if [ $SWAPSIZE != 0 ]; then
sudo swapon "/dev/$VOLGROUP/swap" sudo swapon "/dev/$VOLGROUP/swap"
fi fi
# Clone the repo # Clone the repo
@ -135,31 +133,31 @@ read -r -p "get this into github so you can check everything in, then hit enter
cat "$DOTS/id_ed25519_ghdeploy.pub" cat "$DOTS/id_ed25519_ghdeploy.pub"
if [ $SOPS == "Y" ]; then if [ $SOPS == "Y" ]; then
# Create ssh host-keys # Create ssh host-keys
sudo ssh-keygen -A sudo ssh-keygen -A
sudo mkdir -p /mnt/etc/ssh sudo mkdir -p /mnt/etc/ssh
sudo cp "/etc/ssh/ssh_host_*" /mnt/etc/ssh sudo cp "/etc/ssh/ssh_host_*" /mnt/etc/ssh
# Get line where AGE comment is and insert new AGE key two lines down # Get line where AGE comment is and insert new AGE key two lines down
AGELINE=$(grep "Generate AGE keys from SSH keys with" "$DOTS/.sops.yaml" -n | awk -F ':' '{print ($1+2)}') AGELINE=$(grep "Generate AGE keys from SSH keys with" "$DOTS/.sops.yaml" -n | awk -F ':' '{print ($1+2)}')
AGEKEY=$(nix-shell -p ssh-to-age --run 'cat /etc/ssh/ssh_host_ed25519_key.pub | ssh-to-age') AGEKEY=$(nix-shell -p ssh-to-age --run 'cat /etc/ssh/ssh_host_ed25519_key.pub | ssh-to-age')
sudo sed -i "${AGELINE}i\\ - &${MACHINENAME} $AGEKEY\\" "$DOTS/.sops.yaml" sudo sed -i "${AGELINE}i\\ - &${MACHINENAME} $AGEKEY\\" "$DOTS/.sops.yaml"
# Add server name # Add server name
SERVERLINE=$(grep 'servers: &servers' "$DOTS/.sops.yaml" -n | awk -F ':' '{print ($1+1)}') SERVERLINE=$(grep 'servers: &servers' "$DOTS/.sops.yaml" -n | awk -F ':' '{print ($1+1)}')
sudo sed -i "${SERVERLINE}i\\ - *${MACHINENAME}\\" "$DOTS/.sops.yaml" sudo sed -i "${SERVERLINE}i\\ - *${MACHINENAME}\\" "$DOTS/.sops.yaml"
# Add creation rules # Add creation rules
CREATIONLINE=$(grep 'creation_rules' "$DOTS/.sops.yaml" -n | awk -F ':' '{print ($1+1)}') CREATIONLINE=$(grep 'creation_rules' "$DOTS/.sops.yaml" -n | awk -F ':' '{print ($1+1)}')
# TODO: below was not working when last attempted # TODO: below was not working when last attempted
read -r -d '' PATHRULE <<-EOF read -r -d '' PATHRULE <<-EOF
- path_regex: $GITBASE/$MACHINENAME/secrets\.yaml$ - path_regex: $GITBASE/$MACHINENAME/secrets\.yaml$
key_groups: key_groups:
- pgp: *$OWNERORADMINS - pgp: *$OWNERORADMINS
age: age:
- *$MACHINENAME - *$MACHINENAME
EOF EOF
sudo sed -i "${CREATIONLINE}i\\${PATHRULE}\\" "$DOTS/.sops.yaml" sudo sed -i "${CREATIONLINE}i\\${PATHRULE}\\" "$DOTS/.sops.yaml"
fi fi
read -r -p "press enter to continue" read -r -p "press enter to continue"

View File

@ -2,9 +2,9 @@
#! nix shell nixpkgs#bash nixpkgs#findutils nixpkgs#attic-client --command bash #! nix shell nixpkgs#bash nixpkgs#findutils nixpkgs#attic-client --command bash
sync_directories=( sync_directories=(
/ZFS/ZFS-primary/hydra /ZFS/ZFS-primary/hydra
) )
for dir in "${sync_directories[@]}"; do for dir in "${sync_directories[@]}"; do
find "$dir" -regex ".*\.drv$" -exec attic push cache-nix-dot '{}' \; find "$dir" -regex ".*\.drv$" -exec attic push cache-nix-dot '{}' \;
done done

View File

@ -6,8 +6,8 @@ outdated_msg="Project code is out of date and needs to be upgraded. To remedy th
label="$1" label="$1"
label_val="$2" label_val="$2"
if (( $# != 2 )); then if (($# != 2)); then
echo "usage: $0 label label_value" echo "usage: $0 label label_value"
fi fi
containers=$(docker ps --format '{{.Names}}' -f "label=${label}=${label_val}") containers=$(docker ps --format '{{.Names}}' -f "label=${label}=${label_val}")

View File

@ -12,3 +12,21 @@ command = "nixfmt"
#options = [] #options = []
# Glob pattern of files to include # Glob pattern of files to include
includes = [ "*.nix" ] includes = [ "*.nix" ]
[formatter.jsonfmt]
command = "jsonfmt"
excludes = []
includes = ["*.json"]
options = ["-w"]
[formatter.shfmt]
command = "shfmt"
excludes = []
includes = ["*.sh", "*.bash", "*.envrc", "*.envrc.*"]
options = ["-i", "2", "-s", "-w"]
[formatter.yamlfmt]
command = "yamlfmt"
excludes = []
includes = ["*.yaml", "*.yml"]
options = ["-formatter","indent=4"]

View File

@ -11,18 +11,16 @@ set -e
# | jq 'map(.key) | join("\n")' | sed -E -e 's/\\n/\n/g;s/^"//g;s/"$//g') # | jq 'map(.key) | join("\n")' | sed -E -e 's/\\n/\n/g;s/^"//g;s/"$//g')
# retrieve all paths # retrieve all paths
nix_paths=$(nix path-info --json --all --closure-size \ nix_paths=$(nix path-info --json --all --closure-size |
| jq 'map_values(.closureSize | select(true)) | to_entries | sort_by(.value)' \ jq 'map_values(.closureSize | select(true)) | to_entries | sort_by(.value)' |
| jq 'map(.key) | join("\n")' | sed -E -e 's/\\n/\n/g;s/^"//g;s/"$//g') jq 'map(.key) | join("\n")' | sed -E -e 's/\\n/\n/g;s/^"//g;s/"$//g')
readarray -t nix_path_array < <(echo "$nix_paths") readarray -t nix_path_array < <(echo "$nix_paths")
batchsize=1000 batchsize=1000
for((i=0; i < ${#nix_path_array[@]}; i+=batchsize)) for ((i = 0; i < ${#nix_path_array[@]}; i += batchsize)); do
do part=("${nix_path_array[@]:i:batchsize}")
part=( "${nix_path_array[@]:i:batchsize}" )
attic push nix-cache "${part[@]}" attic push nix-cache "${part[@]}"
done done

View File

@ -1,8 +1,8 @@
#!/usr/bin/env bash #!/usr/bin/env bash
if (( $# != 3 )); then if (($# != 3)); then
echo "usage: $0 <cache/cache group> <cache pattern> <token type>" echo "usage: $0 <cache/cache group> <cache pattern> <token type>"
exit 1 exit 1
fi fi
cache="$1" cache="$1"
@ -10,27 +10,27 @@ cache_pattern="$2"
token_type="$3" token_type="$3"
case $token_type in case $token_type in
"cache-creator") "cache-creator")
atticd-atticadm make-token --sub "$cache-cache-creator" --validity "1y" \ atticd-atticadm make-token --sub "$cache-cache-creator" --validity "1y" \
--pull "$cache_pattern" --push "$cache_pattern" --delete "$cache_pattern" \ --pull "$cache_pattern" --push "$cache_pattern" --delete "$cache_pattern" \
--create-cache "$cache_pattern" --configure-cache "$cache_pattern" \ --create-cache "$cache_pattern" --configure-cache "$cache_pattern" \
--configure-cache-retention "$cache_pattern" --destroy-cache "$cache_pattern" --configure-cache-retention "$cache_pattern" --destroy-cache "$cache_pattern"
;; ;;
"admin") "admin")
atticd-atticadm make-token --sub "$cache-admin" --validity "1y" --pull "$cache_pattern" \ atticd-atticadm make-token --sub "$cache-admin" --validity "1y" --pull "$cache_pattern" \
--push "$cache_pattern" --configure-cache "$cache_pattern" \ --push "$cache_pattern" --configure-cache "$cache_pattern" \
--configure-cache-retention "$cache_pattern" --configure-cache-retention "$cache_pattern"
;; ;;
"writer") "writer")
atticd-atticadm make-token --sub "$cache-writer" --validity "1y" --pull "$cache_pattern" \ atticd-atticadm make-token --sub "$cache-writer" --validity "1y" --pull "$cache_pattern" \
--push "$cache_pattern" --push "$cache_pattern"
;; ;;
"reader") "reader")
atticd-atticadm make-token --sub "$cache-reader" --validity "1y" --pull "$cache_pattern" atticd-atticadm make-token --sub "$cache-reader" --validity "1y" --pull "$cache_pattern"
;; ;;
*) *)
echo "invalid token type: $token_type" echo "invalid token type: $token_type"
echo "available options: cache-creator, admin, writer, reader" echo "available options: cache-creator, admin, writer, reader"
exit 1 exit 1
;; ;;
esac esac

View File

@ -8,8 +8,8 @@ set -v
set -e set -e
if [ "$#" -ne 1 ]; then if [ "$#" -ne 1 ]; then
echo "$0 (pre|post)" echo "$0 (pre|post)"
exit 1 exit 1
fi fi
script_path=$(dirname "$(readlink -f $0)") script_path=$(dirname "$(readlink -f $0)")

View File

@ -14,10 +14,10 @@ parent_path=$(dirname "$script_path")
# relpath is the relative path to the parent_path where you want the file written # relpath is the relative path to the parent_path where you want the file written
# format: <image name>,<image tag>,<image architecture>,<os>,<relpath> # format: <image name>,<image tag>,<image architecture>,<os>,<relpath>
images=( images=(
"nextcloud,apache,amd64,linux,/systems/palatine-hill/docker/nextcloud-image/nextcloud-apache.nix" "nextcloud,apache,amd64,linux,/systems/palatine-hill/docker/nextcloud-image/nextcloud-apache.nix"
) )
IFS="," IFS=","
while read -r name tag arch os relpath; do while read -r name tag arch os relpath; do
nix-prefetch-docker --image-name "$name" --image-tag "$tag" --arch "$arch" --os "$os" --quiet > "$parent_path/$relpath" nix-prefetch-docker --image-name "$name" --image-tag "$tag" --arch "$arch" --os "$os" --quiet >"$parent_path/$relpath"
git --no-pager diff "$parent_path/$relpath" git --no-pager diff "$parent_path/$relpath"
done<<< "${images[@]}" done <<<"${images[@]}"

View File

@ -2,7 +2,10 @@
# Rename CLI parameters to friendlier names # Rename CLI parameters to friendlier names
# https://git-scm.com/docs/gitattributes#_defining_a_custom_merge_driver # https://git-scm.com/docs/gitattributes#_defining_a_custom_merge_driver
base="$1"; local_="$2"; remote="$3"; merged="$4" base="$1"
local_="$2"
remote="$3"
merged="$4"
# Load the mergetool scripts # Load the mergetool scripts
TOOL_MODE=merge TOOL_MODE=merge
@ -20,7 +23,7 @@ merged_decrypted="${base_decrypted/_BASE_/_MERGED_}"
backup_decrypted="${base_decrypted/_BASE_/_BACKUP_}" backup_decrypted="${base_decrypted/_BASE_/_BACKUP_}"
# If anything goes wrong, then delete our decrypted files # If anything goes wrong, then delete our decrypted files
handle_trap_exit () { handle_trap_exit() {
rm $base_decrypted || true rm $base_decrypted || true
rm $local_decrypted || true rm $local_decrypted || true
rm $remote_decrypted || true rm $remote_decrypted || true
@ -30,12 +33,12 @@ handle_trap_exit () {
trap handle_trap_exit EXIT trap handle_trap_exit EXIT
# Decrypt our file contents # Decrypt our file contents
sops --decrypt --show-master-keys "$base" > "$base_decrypted" sops --decrypt --show-master-keys "$base" >"$base_decrypted"
sops --decrypt --show-master-keys "$local_" > "$local_decrypted" sops --decrypt --show-master-keys "$local_" >"$local_decrypted"
sops --decrypt --show-master-keys "$remote" > "$remote_decrypted" sops --decrypt --show-master-keys "$remote" >"$remote_decrypted"
# Create a merge-diff to compare against # Create a merge-diff to compare against
git merge-file -p "$local_decrypted" "$base_decrypted" "$remote_decrypted" > "$merged_decrypted" git merge-file -p "$local_decrypted" "$base_decrypted" "$remote_decrypted" >"$merged_decrypted"
cp "$merged_decrypted" "$backup_decrypted" cp "$merged_decrypted" "$backup_decrypted"
# Set up variables for the mergetool # Set up variables for the mergetool
@ -48,7 +51,7 @@ MERGED="$merged_decrypted"
BACKUP="$backup_decrypted" BACKUP="$backup_decrypted"
# Override `check_unchanged` with a custom script # Override `check_unchanged` with a custom script
check_unchanged () { check_unchanged() {
# If the contents haven't changed, then fail # If the contents haven't changed, then fail
if test "$MERGED" -nt "$BACKUP"; then if test "$MERGED" -nt "$BACKUP"; then
return 0 return 0
@ -61,5 +64,4 @@ check_unchanged () {
run_merge_tool "${mergetool}" true run_merge_tool "${mergetool}" true
# Re-encrypt content # Re-encrypt content
sops --encrypt "$merged_decrypted" > "$merged" sops --encrypt "$merged_decrypted" >"$merged"

View File

@ -6,7 +6,10 @@ set -x
# Rename our variables to friendlier equivalents # Rename our variables to friendlier equivalents
# https://git-scm.com/docs/gitattributes#_defining_a_custom_merge_driver # https://git-scm.com/docs/gitattributes#_defining_a_custom_merge_driver
base="$1"; local_="$2"; remote="$3"; merged="$4" base="$1"
local_="$2"
remote="$3"
merged="$4"
echo "$base" echo "$base"
echo "$local_" echo "$local_"
@ -18,7 +21,7 @@ echo "$merged"
mergetool="$(git config --get merge.tool)" mergetool="$(git config --get merge.tool)"
GIT_DIR="$(git --exec-path)" GIT_DIR="$(git --exec-path)"
if test "$mergetool" = ""; then if test "$mergetool" = ""; then
echo "No default \`merge.tool\` was set for \`git\`. Please set one via \`git config --set merge.tool <tool>\`" 1>&2 echo 'No default `merge.tool` was set for `git`. Please set one via `git config --set merge.tool <tool>`' 1>&2
exit 1 exit 1
fi fi
@ -32,7 +35,7 @@ merged_decrypted="${base_decrypted/_BASE_/_MERGED_}"
backup_decrypted="${base_decrypted/_BASE_/_BACKUP_}" backup_decrypted="${base_decrypted/_BASE_/_BACKUP_}"
# If anything goes wrong, then delete our decrypted files # If anything goes wrong, then delete our decrypted files
handle_trap_exit () { handle_trap_exit() {
rm $base_decrypted || true rm $base_decrypted || true
rm $local_decrypted || true rm $local_decrypted || true
rm $remote_decrypted || true rm $remote_decrypted || true
@ -42,13 +45,13 @@ handle_trap_exit () {
trap handle_trap_exit EXIT trap handle_trap_exit EXIT
# Decrypt our file contents # Decrypt our file contents
sops --decrypt --show-master-keys "$base" > "$base_decrypted" sops --decrypt --show-master-keys "$base" >"$base_decrypted"
sops --decrypt --show-master-keys "$local_" > "$local_decrypted" sops --decrypt --show-master-keys "$local_" >"$local_decrypted"
sops --decrypt --show-master-keys "$remote" > "$remote_decrypted" sops --decrypt --show-master-keys "$remote" >"$remote_decrypted"
# Create a merge-diff to compare against # Create a merge-diff to compare against
set +e set +e
git merge-file -p "$local_decrypted" "$base_decrypted" "$remote_decrypted" > "$merged_decrypted" git merge-file -p "$local_decrypted" "$base_decrypted" "$remote_decrypted" >"$merged_decrypted"
set -e set -e
cp "$merged_decrypted" "$backup_decrypted" cp "$merged_decrypted" "$backup_decrypted"
@ -66,7 +69,7 @@ source "$GIT_DIR/git-mergetool--lib"
source "$GIT_DIR/mergetools/$mergetool" source "$GIT_DIR/mergetools/$mergetool"
# Override `check_unchanged` with a custom script # Override `check_unchanged` with a custom script
check_unchanged () { check_unchanged() {
# If the contents haven't changed, then fail # If the contents haven't changed, then fail
if test "$MERGED" -nt "$BACKUP"; then if test "$MERGED" -nt "$BACKUP"; then
return 0 return 0
@ -82,5 +85,4 @@ merge_cmd
set -eu set -eu
# Re-encrypt content # Re-encrypt content
sops --encrypt "$merged_decrypted" > "$merged" sops --encrypt "$merged_decrypted" >"$merged"