1
0
Fork 0

Compare commits

...

20 commits

Author SHA1 Message Date
Pierre Penninckx
7f2aa36d27
flake.lock: Update (#343)
Automated changes by the
[update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock)
GitHub Action.

```
Flake lock file updates:

• Updated input 'nixpkgs':
    'github:nixos/nixpkgs/807e9154dcb16384b1b765ebe9cd2bba2ac287fd?narHash=sha256-l253w0XMT8nWHGXuXqyiIC/bMvh1VRszGXgdpQlfhvU%3D' (2024-10-29)
  → 'github:nixos/nixpkgs/76612b17c0ce71689921ca12d9ffdc9c23ce40b2?narHash=sha256-IigrKK3vYRpUu%2BHEjPL/phrfh7Ox881er1UEsZvw9Q4%3D' (2024-11-09)
```

### Running GitHub Actions on this PR

GitHub Actions will not run workflows on pull requests which are opened
by a GitHub Action.

To run GitHub Actions workflows on this PR, run:

```sh
git branch -D update_flake_lock_action
git fetch origin
git checkout update_flake_lock_action
git commit --amend --no-edit
git push origin update_flake_lock_action --force
```

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-11-11 01:41:46 +00:00
Pierre Penninckx
575c310ee6
flake.lock: Update (#342)
Automated changes by the
[update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock)
GitHub Action.

```
Flake lock file updates:

• Updated input 'nixpkgs':
    'github:nixos/nixpkgs/18536bf04cd71abd345f9579158841376fdd0c5a?narHash=sha256-RP%2BOQ6koQQLX5nw0NmcDrzvGL8HDLnyXt/jHhL1jwjM%3D' (2024-10-25)
  → 'github:nixos/nixpkgs/807e9154dcb16384b1b765ebe9cd2bba2ac287fd?narHash=sha256-l253w0XMT8nWHGXuXqyiIC/bMvh1VRszGXgdpQlfhvU%3D' (2024-10-29)
```

### Running GitHub Actions on this PR

GitHub Actions will not run workflows on pull requests which are opened
by a GitHub Action.

To run GitHub Actions workflows on this PR, run:

```sh
git branch -D update_flake_lock_action
git fetch origin
git checkout update_flake_lock_action
git commit --amend --no-edit
git push origin update_flake_lock_action --force
```

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-11-01 01:55:00 +00:00
Pierre Penninckx
bd63389bad
flake.lock: Update (#336)
Automated changes by the
[update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock)
GitHub Action.

```
Flake lock file updates:

• Updated input 'nixpkgs':
    'github:nixos/nixpkgs/2768c7d042a37de65bb1b5b3268fc987e534c49d?narHash=sha256-AlcmCXJZPIlO5dmFzV3V2XF6x/OpNWUV8Y/FMPGd8Z4%3D' (2024-10-23)
  → 'github:nixos/nixpkgs/18536bf04cd71abd345f9579158841376fdd0c5a?narHash=sha256-RP%2BOQ6koQQLX5nw0NmcDrzvGL8HDLnyXt/jHhL1jwjM%3D' (2024-10-25)
```

### Running GitHub Actions on this PR

GitHub Actions will not run workflows on pull requests which are opened
by a GitHub Action.

To run GitHub Actions workflows on this PR, run:

```sh
git branch -D update_flake_lock_action
git fetch origin
git checkout update_flake_lock_action
git commit --amend --no-edit
git push origin update_flake_lock_action --force
```

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-10-29 17:29:11 +00:00
Pierre Penninckx
34efaf7292
fix cache in demo (#335) 2024-10-27 01:35:50 +00:00
Pierre Penninckx
51c90e13e6
wait on demo build to succeed (#334) 2024-10-26 21:08:18 +00:00
Pierre Penninckx
61faa8fc73
include workflow file in path filtering option (#333) 2024-10-26 16:33:26 +00:00
Pierre Penninckx
fd06c34032
use correct badge for tests (#332) 2024-10-26 16:22:54 +00:00
Pierre Penninckx
1a11dc52d1
do not use unsupported anchors in github workflow (#331) 2024-10-26 15:03:04 +00:00
Pierre Penninckx
4479a68f32
use correct cache name for pushing to cachix (#330) 2024-10-26 14:35:16 +00:00
Pierre Penninckx
8700b76403
flake.lock: Update (#324)
Automated changes by the
[update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock)
GitHub Action.

```
Flake lock file updates:

• Updated input 'nixpkgs':
    'github:nixos/nixpkgs/1997e4aa514312c1af7e2bda7fad1644e778ff26?narHash=sha256-I4tuhRpZFa6Fu6dcH9Dlo5LlH17peT79vx1y1SpeKt0%3D' (2024-10-20)
  → 'github:nixos/nixpkgs/2768c7d042a37de65bb1b5b3268fc987e534c49d?narHash=sha256-AlcmCXJZPIlO5dmFzV3V2XF6x/OpNWUV8Y/FMPGd8Z4%3D' (2024-10-23)
```

### Running GitHub Actions on this PR

GitHub Actions will not run workflows on pull requests which are opened
by a GitHub Action.

To run GitHub Actions workflows on this PR, run:

```sh
git branch -D update_flake_lock_action
git fetch origin
git checkout update_flake_lock_action
git commit --amend --no-edit
git push origin update_flake_lock_action --force
```

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-10-25 01:41:28 +00:00
ibizaman
238de568d5 update changelog 2024-10-24 22:27:47 +02:00
ibizaman
52fc7e29a9 fix tests in lib 2024-10-24 22:27:47 +02:00
ibizaman
a589a9fe00 add contract test for secret contract 2024-10-24 22:27:47 +02:00
ibizaman
9d81a72d51 use hardcodedsecret in restic test 2024-10-24 22:27:47 +02:00
ibizaman
b134abeb6d switch forgejo to new secrets contract 2024-10-24 22:27:47 +02:00
ibizaman
b85705ab74 switch authelia to new secrets contract 2024-10-24 22:27:47 +02:00
ibizaman
fa87855ee5 switch jellyfin to new secrets contract
This rabbit hole of a task lead me to:
- Introduce a hardcoded secret module that is a secret provider
  for tests.
- Update LDAP and SSO modules to use the secret contract.
- Refactor the replaceSecrets library function to correctly fail
  when a secret file could not be read.
2024-10-24 22:27:47 +02:00
Pierre Penninckx
b405988e60
fix lldap backup (#323)
closes #319
2024-10-24 21:21:01 +02:00
Pierre Penninckx
b23ce56955
flake.lock: Update (#318)
Automated changes by the
[update-flake-lock](https://github.com/DeterminateSystems/update-flake-lock)
GitHub Action.

```
Flake lock file updates:

• Updated input 'nixpkgs':
    'github:nixos/nixpkgs/5633bcff0c6162b9e4b5f1264264611e950c8ec7?narHash=sha256-9UTxR8eukdg%2BXZeHgxW5hQA9fIKHsKCdOIUycTryeVw%3D' (2024-10-09)
  → 'github:nixos/nixpkgs/1997e4aa514312c1af7e2bda7fad1644e778ff26?narHash=sha256-I4tuhRpZFa6Fu6dcH9Dlo5LlH17peT79vx1y1SpeKt0%3D' (2024-10-20)
```

### Running GitHub Actions on this PR

GitHub Actions will not run workflows on pull requests which are opened
by a GitHub Action.

To run GitHub Actions workflows on this PR, run:

```sh
git branch -D update_flake_lock_action
git fetch origin
git checkout update_flake_lock_action
git commit --amend --no-edit
git push origin update_flake_lock_action --force
```

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-10-24 15:52:41 +00:00
Pierre Penninckx
2dd3f4cff1
fix builds in CI (#322) 2024-10-24 08:27:33 -07:00
24 changed files with 666 additions and 200 deletions

105
.github/workflows/build.yaml vendored Normal file
View file

@ -0,0 +1,105 @@
# name: build
# on: push
# jobs:
# checks:
# uses: nixbuild/nixbuild-action/.github/workflows/ci-workflow.yml@v19
# with:
# nix_conf: |
# allow-import-from-derivation = true
# secrets:
# nixbuild_token: ${{ secrets.nixbuild_token }}
name: "build"
on:
pull_request:
push:
branches: [ "main" ]
jobs:
build-matrix:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
extra-conf: "system-features = nixos-test benchmark big-parallel kvm"
- name: Setup Caching
uses: cachix/cachix-action@v14
with:
name: selfhostblocks
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: Generate Matrix
id: generate-matrix
run: |
nix flake show --allow-import-from-derivation --json \
| jq -c '.["checks"]["x86_64-linux"] | keys' > .output
cat .output
echo dynamic_list="$(cat .output)" >> "$GITHUB_OUTPUT"
outputs:
check: ${{ steps.generate-matrix.outputs.dynamic_list }}
manual:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
extra-conf: "system-features = nixos-test benchmark big-parallel kvm"
- name: Setup Caching
uses: cachix/cachix-action@v14
with:
name: selfhostblocks
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: Build
run: |
nix \
--print-build-logs \
--option keep-going true \
--show-trace \
build .#manualHtml
tests:
runs-on: ubuntu-latest
needs: [ "build-matrix" ]
strategy:
matrix:
check: ${{ fromJson(needs.build-matrix.outputs.check) }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
extra-conf: "system-features = nixos-test benchmark big-parallel kvm"
- name: Setup Caching
uses: cachix/cachix-action@v14
with:
name: selfhostblocks
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: Build
run: |
nix build --print-build-logs --show-trace .#checks.x86_64-linux.${{ matrix.check }}
results:
if: ${{ always() }}
runs-on: ubuntu-latest
name: Final Results
needs: [ manual, tests ]
steps:
- run: |
result="${{ needs.tests.result }}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi

View file

@ -3,15 +3,31 @@ name: Demo
on: on:
workflow_dispatch: workflow_dispatch:
pull_request: pull_request:
paths: &paths
- 'demo/**'
push: push:
branches: branches:
- main - main
paths: *paths
jobs: jobs:
path-filter:
runs-on: ubuntu-latest
outputs:
changed: ${{ steps.filter.outputs.changed }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: dorny/paths-filter@v3
id: filter
with:
filters: |
changed:
- '.github/workflows/demo.yml'
- 'demo/**'
build: build:
needs: [ "path-filter" ]
if: needs.path-filter.outputs.changed == 'true'
strategy: strategy:
matrix: matrix:
demo: demo:
@ -38,7 +54,7 @@ jobs:
- uses: cachix/cachix-action@v14 - uses: cachix/cachix-action@v14
with: with:
name: mycache name: selfhostblocks
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: Build ${{ matrix.demo.name }} .#${{ matrix.demo.flake }} - name: Build ${{ matrix.demo.name }} .#${{ matrix.demo.flake }}
@ -50,4 +66,15 @@ jobs:
--show-trace \ --show-trace \
build .#nixosConfigurations.${{ matrix.demo.flake }}.config.system.build.vm build .#nixosConfigurations.${{ matrix.demo.flake }}.config.system.build.vm
result:
runs-on: ubuntu-latest
needs: [ "build" ]
if: '!cancelled()'
steps:
- run: |
result="${{ needs.build.result }}"
if [[ $result == "success" || $result == "skipped" ]]; then
exit 0
else
exit 1
fi

View file

@ -1,22 +0,0 @@
# Leaving commented because it does not work.
#
# name: "Final Results"
#
# on:
# check_suite:
# types: [completed]
#
# jobs:
# results:
# name: Final Results
# runs-on: ubuntu-latest
# steps:
# - run: echo
# - run: exit 1
# # see https://stackoverflow.com/a/67532120/4907315
# if: >-
# ${{
# contains(needs.*.result, 'failure')
# || contains(needs.*.result, 'cancelled')
# || contains(needs.*.result, 'skipped')
# }}

View file

@ -2,14 +2,8 @@
name: Deploy docs name: Deploy docs
on: on:
# Runs on pushes targeting the default branch
push: push:
branches: ["main"] branches: ["main"]
# TODO: needed ?
# schedule:
# - cron: 0 0 * * 1
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch: workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
@ -40,9 +34,10 @@ jobs:
- name: Install nix - name: Install nix
uses: cachix/install-nix-action@v20 uses: cachix/install-nix-action@v20
- uses: cachix/cachix-action@v14 - name: Setup Caching
uses: cachix/cachix-action@v14
with: with:
name: mycache name: selfhostblocks
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- name: Build docs - name: Build docs

View file

@ -14,9 +14,26 @@
- `shb.authelia.oidcClients.description` -> `shb.authelia.oidcClients.client_name` - `shb.authelia.oidcClients.description` -> `shb.authelia.oidcClients.client_name`
- `shb.authelia.oidcClients.secret` -> `shb.authelia.oidcClients.client_secret` - `shb.authelia.oidcClients.secret` -> `shb.authelia.oidcClients.client_secret`
- `shb.authelia.ldapEndpoint` -> `shb.authelia.ldapHostname` and `shb.authelia.ldapPort` - `shb.authelia.ldapEndpoint` -> `shb.authelia.ldapHostname` and `shb.authelia.ldapPort`
- `shb.authelia.jwtSecretFile` -> `shb.authelia.jwtSecret.result.path`
- `shb.authelia.ldapAdminPasswordFile` -> `shb.authelia.ldapAdminPassword.result.path`
- `shb.authelia.sessionSecretFile` -> `shb.authelia.sessionSecret.result.path`
- `shb.authelia.storageEncryptionKeyFile` -> `shb.authelia.storageEncryptionKey.result.path`
- `shb.authelia.identityProvidersOIDCIssuerPrivateKeyFile` -> `shb.authelia.identityProvidersOIDCIssuerPrivateKey.result.path`
- `shb.authelia.smtp.passwordFile` -> `shb.authelia.smtp.password.result.path`
- Make Nextcloud automatically disable maintenance mode upon service restart. - Make Nextcloud automatically disable maintenance mode upon service restart.
- `shb.ldap.ldapUserPasswordFile` -> `shb.ldap.ldapUserPassword.result.path` - `shb.ldap.ldapUserPasswordFile` -> `shb.ldap.ldapUserPassword.result.path`
- `shb.ldap.jwtSecretFile` -> `shb.ldap.jwtSecret.result.path` - `shb.ldap.jwtSecretFile` -> `shb.ldap.jwtSecret.result.path`
- Jellyfin changes:
- `shb.jellyfin.ldap.passwordFile` -> `shb.jellyfin.ldap.adminPassword.result.path`.
- `shb.jellyfin.sso.secretFile` -> `shb.jellyfin.ldap.sharedSecret.result.path`.
- + `shb.jellyfin.ldap.sharedSecretForAuthelia`.
- Forgejo changes:
- `shb.forgejo.ldap.adminPasswordFile` -> `shb.forgejo.ldap.adminPassword.result.path`.
- `shb.forgejo.sso.secretFile` -> `shb.forgejo.ldap.sharedSecret.result.path`.
- `shb.forgejo.sso.secretFileForAuthelia` -> `shb.forgejo.ldap.sharedSecretForAuthelia.result.path`.
- `shb.forgejo.adminPasswordFile` -> `shb.forgejo.adminPassword.result.path`.
- `shb.forgejo.databasePasswordFile` -> `shb.forgejo.databasePassword.result.path`.
## User Facing Backwards Compatible Changes ## User Facing Backwards Compatible Changes

View file

@ -3,7 +3,7 @@
*Modular server management based on NixOS modules and focused on best practices.* *Modular server management based on NixOS modules and focused on best practices.*
[![Documentation](https://github.com/ibizaman/selfhostblocks/actions/workflows/pages.yml/badge.svg)](https://github.com/ibizaman/selfhostblocks/actions/workflows/pages.yml) [![Documentation](https://github.com/ibizaman/selfhostblocks/actions/workflows/pages.yml/badge.svg)](https://github.com/ibizaman/selfhostblocks/actions/workflows/pages.yml)
[![Tests](https://img.shields.io/endpoint.svg?url=https%3A%2F%2Fgarnix.io%2Fapi%2Fbadges%2Fibizaman%2Fselfhostblocks%3Fbranch%3Dmain)](https://garnix.io) (using Garnix) [![Tests](https://github.com/ibizaman/selfhostblocks/actions/workflows/build.yaml/badge.svg)](https://github.com/ibizaman/selfhostblocks/actions/workflows/build.yaml)
[![Demo](https://github.com/ibizaman/selfhostblocks/actions/workflows/demo.yml/badge.svg)](https://github.com/ibizaman/selfhostblocks/actions/workflows/demo.yml) [![Demo](https://github.com/ibizaman/selfhostblocks/actions/workflows/demo.yml/badge.svg)](https://github.com/ibizaman/selfhostblocks/actions/workflows/demo.yml)
SHB (Self Host Blocks) is yet another server management tool SHB (Self Host Blocks) is yet another server management tool

View file

@ -35,11 +35,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1728492678, "lastModified": 1731139594,
"narHash": "sha256-9UTxR8eukdg+XZeHgxW5hQA9fIKHsKCdOIUycTryeVw=", "narHash": "sha256-IigrKK3vYRpUu+HEjPL/phrfh7Ox881er1UEsZvw9Q4=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "5633bcff0c6162b9e4b5f1264264611e950c8ec7", "rev": "76612b17c0ce71689921ca12d9ffdc9c23ce40b2",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -26,7 +26,6 @@
src = nixpkgs; src = nixpkgs;
inherit patches; inherit patches;
}; };
pkgs = import patchedNixpkgs { pkgs = import patchedNixpkgs {
inherit system; inherit system;
}; };
@ -34,6 +33,7 @@
allModules = [ allModules = [
modules/blocks/authelia.nix modules/blocks/authelia.nix
modules/blocks/davfs.nix modules/blocks/davfs.nix
modules/blocks/hardcodedsecret.nix
modules/blocks/ldap.nix modules/blocks/ldap.nix
modules/blocks/monitoring.nix modules/blocks/monitoring.nix
modules/blocks/nginx.nix modules/blocks/nginx.nix
@ -79,12 +79,14 @@
checks = checks =
let let
inherit (pkgs.lib) foldl foldlAttrs mergeAttrs optionalAttrs;
importFiles = files: importFiles = files:
map (m: pkgs.callPackage m {}) files; map (m: pkgs.callPackage m {}) files;
mergeTests = pkgs.lib.lists.foldl pkgs.lib.trivial.mergeAttrs {}; mergeTests = foldl mergeAttrs {};
flattenAttrs = root: attrset: pkgs.lib.attrsets.foldlAttrs (acc: name: value: acc // { flattenAttrs = root: attrset: foldlAttrs (acc: name: value: acc // {
"${root}_${name}" = value; "${root}_${name}" = value;
}) {} attrset; }) {} attrset;
@ -96,19 +98,21 @@
); );
shblib = pkgs.callPackage ./lib {}; shblib = pkgs.callPackage ./lib {};
in (rec { in (optionalAttrs (system == "x86_64-linux") ({
modules = shblib.check { modules = shblib.check {
inherit pkgs; inherit pkgs;
tests = tests =
mergeTests (importFiles [ mergeTests (importFiles [
./test/modules/arr.nix ./test/modules/arr.nix
./test/modules/davfs.nix ./test/modules/davfs.nix
# TODO: Make this not use IFD
./test/modules/lib.nix ./test/modules/lib.nix
./test/modules/nginx.nix ./test/modules/nginx.nix
./test/modules/postgresql.nix ./test/modules/postgresql.nix
]); ]);
}; };
# TODO: Make this not use IFD
lib = nix-flake-tests.lib.check { lib = nix-flake-tests.lib.check {
inherit pkgs; inherit pkgs;
tests = pkgs.callPackage ./test/modules/lib.nix {}; tests = pkgs.callPackage ./test/modules/lib.nix {};
@ -119,7 +123,7 @@
// (vm_test "deluge" ./test/services/deluge.nix) // (vm_test "deluge" ./test/services/deluge.nix)
// (vm_test "forgejo" ./test/services/forgejo.nix) // (vm_test "forgejo" ./test/services/forgejo.nix)
// (vm_test "grocy" ./test/services/grocy.nix) // (vm_test "grocy" ./test/services/grocy.nix)
// (vm_test "home-assistant" ./test/services/home-assistant.nix) // (vm_test "homeassistant" ./test/services/home-assistant.nix)
// (vm_test "jellyfin" ./test/services/jellyfin.nix) // (vm_test "jellyfin" ./test/services/jellyfin.nix)
// (vm_test "monitoring" ./test/services/monitoring.nix) // (vm_test "monitoring" ./test/services/monitoring.nix)
// (vm_test "nextcloud" ./test/services/nextcloud.nix) // (vm_test "nextcloud" ./test/services/nextcloud.nix)
@ -131,7 +135,11 @@
// (vm_test "postgresql" ./test/blocks/postgresql.nix) // (vm_test "postgresql" ./test/blocks/postgresql.nix)
// (vm_test "restic" ./test/blocks/restic.nix) // (vm_test "restic" ./test/blocks/restic.nix)
// (vm_test "ssl" ./test/blocks/ssl.nix) // (vm_test "ssl" ./test/blocks/ssl.nix)
);
// (vm_test "contracts-secret" ./test/contracts/secret.nix)
));
} }
); ) // {
herculesCI.ciSystems = [ "x86_64-linux" ];
};
} }

View file

@ -1,7 +1,7 @@
{ pkgs, lib }: { pkgs, lib }:
let let
inherit (builtins) isAttrs hasAttr; inherit (builtins) isAttrs hasAttr;
inherit (lib) concatStringsSep; inherit (lib) concatMapStringsSep concatStringsSep mapAttrsToList;
in in
rec { rec {
# Replace secrets in a file. # Replace secrets in a file.
@ -31,17 +31,34 @@ rec {
resultPath = newPath; resultPath = newPath;
}; };
genReplacement = secret:
let
t = { transform ? null, ... }: if isNull transform then x: x else transform;
in
lib.attrsets.nameValuePair (secretName secret.name) ((t secret) "$(cat ${toString secret.source})");
replaceSecretsScript = { file, resultPath, replacements, user ? null, permissions ? "u=r,g=r,o=" }: replaceSecretsScript = { file, resultPath, replacements, user ? null, permissions ? "u=r,g=r,o=" }:
let let
templatePath = resultPath + ".template"; templatePath = resultPath + ".template";
sedPatterns = lib.strings.concatStringsSep " " (lib.attrsets.mapAttrsToList (from: to: "-e \"s|${from}|${to}|\"") replacements);
sedCmd = if replacements == {} # We check that the files containing the secrets have the
# correct permissions for us to read them in this separate
# step. Otherwise, the $(cat ...) commands inside the sed
# replacements could fail but not fail individually but
# not fail the whole script.
checkPermissions = concatMapStringsSep "\n" (pattern: "cat ${pattern.source} > /dev/null") replacements;
sedPatterns = concatMapStringsSep " " (pattern: "-e \"s|${pattern.name}|${pattern.value}|\"") (map genReplacement replacements);
sedCmd = if replacements == []
then "cat" then "cat"
else "${pkgs.gnused}/bin/sed ${sedPatterns}"; else "${pkgs.gnused}/bin/sed ${sedPatterns}";
in in
'' ''
set -euo pipefail set -euo pipefail
${checkPermissions}
mkdir -p $(dirname ${templatePath}) mkdir -p $(dirname ${templatePath})
ln -fs ${file} ${templatePath} ln -fs ${file} ${templatePath}
rm -f ${resultPath} rm -f ${resultPath}
@ -71,8 +88,8 @@ rec {
}; };
}; };
secretName = name: secretName = names:
"%SECRET${lib.strings.toUpper (lib.strings.concatMapStrings (s: "_" + s) name)}%"; "%SECRET${lib.strings.toUpper (lib.strings.concatMapStrings (s: "_" + s) names)}%";
withReplacements = attrs: withReplacements = attrs:
let let
@ -91,15 +108,8 @@ rec {
else value // { name = name; }; else value // { name = name; };
secretsWithName = mapAttrsRecursiveCond (v: ! v ? "source") addNameField attrs; secretsWithName = mapAttrsRecursiveCond (v: ! v ? "source") addNameField attrs;
allSecrets = collect (v: builtins.isAttrs v && v ? "source") secretsWithName;
t = { transform ? null, ... }: if isNull transform then x: x else transform;
genReplacement = secret:
lib.attrsets.nameValuePair (secretName secret.name) ((t secret) "$(cat ${toString secret.source})");
in in
lib.attrsets.listToAttrs (map genReplacement allSecrets); collect (v: builtins.isAttrs v && v ? "source") secretsWithName;
# Inspired lib.attrsets.mapAttrsRecursiveCond but also recurses on lists. # Inspired lib.attrsets.mapAttrsRecursiveCond but also recurses on lists.
mapAttrsRecursiveCond = mapAttrsRecursiveCond =
@ -238,7 +248,7 @@ rec {
results = pkgs.lib.runTests tests; results = pkgs.lib.runTests tests;
in in
if results != [ ] then if results != [ ] then
builtins.throw (builtins.concatStringsSep "\n" (map resultToString (lib.traceValSeqN 3 results))) builtins.throw (concatStringsSep "\n" (map resultToString (lib.traceValSeqN 3 results)))
else else
pkgs.runCommand "nix-flake-tests-success" { } "echo > $out"; pkgs.runCommand "nix-flake-tests-success" { } "echo > $out";

View file

@ -1,7 +1,8 @@
{ config, pkgs, lib, ... }: { config, options, pkgs, lib, ... }:
let let
cfg = config.shb.authelia; cfg = config.shb.authelia;
opt = options.shb.authelia;
contracts = pkgs.callPackage ../contracts {}; contracts = pkgs.callPackage ../contracts {};
shblib = pkgs.callPackage ../../lib {}; shblib = pkgs.callPackage ../../lib {};
@ -67,33 +68,45 @@ in
description = "Secrets needed by Authelia"; description = "Secrets needed by Authelia";
type = lib.types.submodule { type = lib.types.submodule {
options = { options = {
jwtSecretFile = lib.mkOption { jwtSecret = contracts.secret.mkOption {
type = lib.types.path; description = "JWT secret.";
description = "File containing the JWT secret."; mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${opt.subdomain}.${opt.domain}" ];
}; };
ldapAdminPasswordFile = lib.mkOption { ldapAdminPassword = contracts.secret.mkOption {
type = lib.types.path; description = "LDAP admin user password.";
description = "File containing the LDAP admin user password."; mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${opt.subdomain}.${opt.domain}" ];
}; };
sessionSecretFile = lib.mkOption { sessionSecret = contracts.secret.mkOption {
type = lib.types.path; description = "Session secret.";
description = "File containing the session secret."; mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${opt.subdomain}.${opt.domain}" ];
}; };
storageEncryptionKeyFile = lib.mkOption { storageEncryptionKey = contracts.secret.mkOption {
type = lib.types.path; description = "Storage encryption key.";
description = "File containing the storage encryption key."; mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${opt.subdomain}.${opt.domain}" ];
}; };
identityProvidersOIDCHMACSecretFile = lib.mkOption { identityProvidersOIDCHMACSecret = contracts.secret.mkOption {
type = lib.types.path; description = "Identity provider OIDC HMAC secret.";
description = "File containing the identity provider OIDC HMAC secret."; mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${opt.subdomain}.${opt.domain}" ];
}; };
identityProvidersOIDCIssuerPrivateKeyFile = lib.mkOption { identityProvidersOIDCIssuerPrivateKey = contracts.secret.mkOption {
type = lib.types.path;
description = '' description = ''
File containing the identity provider OIDC issuer private key. Identity provider OIDC issuer private key.
Generate one with `nix run nixpkgs#openssl -- genrsa -out keypair.pem 2048` Generate one with `nix run nixpkgs#openssl -- genrsa -out keypair.pem 2048`
''; '';
mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${opt.subdomain}.${opt.domain}" ];
}; };
}; };
}; };
@ -207,9 +220,11 @@ in
type = lib.types.str; type = lib.types.str;
description = "Username to connect to the SMTP host."; description = "Username to connect to the SMTP host.";
}; };
passwordFile = lib.mkOption { password = contracts.secret.mkOption {
type = lib.types.str;
description = "File containing the password to connect to the SMTP host."; description = "File containing the password to connect to the SMTP host.";
mode = "0400";
owner = cfg.autheliaUser;
restartUnits = [ "authelia-${fqdn}" ];
}; };
}; };
})) }))
@ -282,19 +297,20 @@ in
user = cfg.autheliaUser; user = cfg.autheliaUser;
secrets = { secrets = {
inherit (cfg.secrets) jwtSecretFile storageEncryptionKeyFile; jwtSecretFile = cfg.secrets.jwtSecret.result.path;
storageEncryptionKeyFile = cfg.secrets.storageEncryptionKey.result.path;
}; };
# See https://www.authelia.com/configuration/methods/secrets/ # See https://www.authelia.com/configuration/methods/secrets/
environmentVariables = { environmentVariables = {
AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE = toString cfg.secrets.ldapAdminPasswordFile; AUTHELIA_AUTHENTICATION_BACKEND_LDAP_PASSWORD_FILE = toString cfg.secrets.ldapAdminPassword.result.path;
AUTHELIA_SESSION_SECRET_FILE = toString cfg.secrets.sessionSecretFile; AUTHELIA_SESSION_SECRET_FILE = toString cfg.secrets.sessionSecret.result.path;
# Not needed since we use peer auth. # Not needed since we use peer auth.
# AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE = "/run/secrets/authelia/postgres_password"; # AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE = "/run/secrets/authelia/postgres_password";
AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE = toString cfg.secrets.storageEncryptionKeyFile; AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE = toString cfg.secrets.storageEncryptionKey.result.path;
AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE = toString cfg.secrets.identityProvidersOIDCHMACSecretFile; AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE = toString cfg.secrets.identityProvidersOIDCHMACSecret.result.path;
AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE = toString cfg.secrets.identityProvidersOIDCIssuerPrivateKeyFile; AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE = toString cfg.secrets.identityProvidersOIDCIssuerPrivateKey.result.path;
AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE = lib.mkIf (!(builtins.isString cfg.smtp)) (toString cfg.smtp.passwordFile); AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE = lib.mkIf (!(builtins.isString cfg.smtp)) (toString cfg.smtp.password.result.path);
}; };
settings = { settings = {
server.address = "tcp://127.0.0.1:9091"; server.address = "tcp://127.0.0.1:9091";

View file

@ -0,0 +1,108 @@
{ config, options, lib, pkgs, ... }:
let
cfg = config.shb.hardcodedsecret;
opt = options.shb.hardcodedsecret;
inherit (lib) mapAttrs' mkOption nameValuePair;
inherit (lib.types) attrsOf listOf path nullOr str submodule;
inherit (pkgs) writeText;
in
{
options.shb.hardcodedsecret = mkOption {
default = {};
description = ''
Hardcoded secrets. These should only be used in tests.
'';
example = lib.literalExpression ''
{
mySecret = {
user = "me";
mode = "0400";
restartUnits = [ "myservice.service" ];
content = "My Secrets";
};
}
'';
type = attrsOf (submodule ({ name, ... }: {
options = {
mode = mkOption {
description = ''
Mode of the secret file.
'';
type = str;
default = "0400";
};
owner = mkOption {
description = ''
Linux user owning the secret file.
'';
type = str;
default = "root";
};
group = mkOption {
description = ''
Linux group owning the secret file.
'';
type = str;
default = "root";
};
restartUnits = mkOption {
description = ''
Systemd units to restart after the secret is updated.
'';
type = listOf str;
default = [];
};
path = mkOption {
type = path;
description = ''
Path to the file containing the secret generated out of band.
This path will exist after deploying to a target host,
it is not available through the nix store.
'';
default = "/run/hardcodedsecrets/hardcodedsecret_${name}";
};
content = mkOption {
type = nullOr str;
description = ''
Content of the secret.
This will be stored in the nix store and should only be used for testing or maybe in dev.
'';
default = null;
};
source = mkOption {
type = nullOr str;
description = ''
Source of the content of the secret.
'';
default = null;
};
};
}));
};
config = {
system.activationScripts = mapAttrs' (n: cfg':
let
source = if cfg'.source != null
then cfg'.source
else writeText "hardcodedsecret_${n}_content" cfg'.content;
in
nameValuePair "hardcodedsecret_${n}" ''
mkdir -p "$(dirname "${cfg'.path}")"
touch "${cfg'.path}"
chmod ${cfg'.mode} "${cfg'.path}"
chown ${cfg'.owner}:${cfg'.group} "${cfg'.path}"
cp ${source} "${cfg'.path}"
''
) cfg;
};
}

View file

@ -112,9 +112,12 @@ in
''; '';
readOnly = true; readOnly = true;
default = { default = {
user = "lldap"; # TODO: is there a workaround that avoid needing to use root?
# root because otherwise we cannot access the private StateDiretory
user = "root";
# /private because the systemd service uses DynamicUser=true
sourceDirectories = [ sourceDirectories = [
"/var/lib/lldap" "/var/lib/private/lldap"
]; ];
}; };
}; };

View file

@ -1,7 +1,10 @@
{ lib }: { pkgs, lib }:
{ {
backup = import ./backup.nix { inherit lib; }; backup = import ./backup.nix { inherit lib; };
mount = import ./mount.nix { inherit lib; }; mount = import ./mount.nix { inherit lib; };
secret = import ./secret.nix { inherit lib; }; secret = import ./secret.nix { inherit lib; };
ssl = import ./ssl.nix { inherit lib; }; ssl = import ./ssl.nix { inherit lib; };
test = {
secret = import ./secret/test.nix { inherit pkgs lib; };
};
} }

View file

@ -0,0 +1,64 @@
{ pkgs, lib, ... }:
let
pkgs' = pkgs;
testLib = pkgs.callPackage ../../../test/common.nix {};
inherit (lib) getAttrFromPath setAttrByPath;
inherit (lib) mkIf;
in
{ name,
configRoot,
createContent, # config to create a secret with value "secretA".
modules ? [],
owner ? "root",
group ? "root",
mode ? "0400",
restartUnits ? [ "myunit.service" ],
}: pkgs.testers.runNixOSTest {
name = "secret_${name}_${owner}_${group}_${mode}";
nodes.machine = { config, ... }: {
imports = ( testLib.baseImports pkgs' ) ++ modules;
config = lib.mkMerge [
(setAttrByPath configRoot {
A = {
inherit owner group mode restartUnits;
} // createContent;
})
(mkIf (owner != "root") {
users.users.${owner}.isNormalUser = true;
})
(mkIf (group != "root") {
users.groups.${group} = {};
})
];
};
testScript = { nodes, ... }:
let
cfg = (getAttrFromPath configRoot nodes.machine)."A";
in
''
owner = machine.succeed("stat -c '%U' ${cfg.path}").strip()
print(f"Got owner {owner}")
if owner != "${owner}":
raise Exception(f"Owner should be '${owner}' but got '{owner}'")
group = machine.succeed("stat -c '%G' ${cfg.path}").strip()
print(f"Got group {group}")
if group != "${group}":
raise Exception(f"Group should be '${group}' but got '{group}'")
mode = str(int(machine.succeed("stat -c '%a' ${cfg.path}").strip()))
print(f"Got mode {mode}")
wantedMode = str(int("${mode}"))
if mode != wantedMode:
raise Exception(f"Mode should be '{wantedMode}' but got '{mode}'")
content = machine.succeed("cat ${cfg.path}").strip()
print(f"Got content {content}")
if content != "secretA":
raise Exception(f"Content should be 'secretA' but got '{content}'")
'';
}

View file

@ -82,14 +82,12 @@ in
default = "admin"; default = "admin";
}; };
adminPasswordFile = lib.mkOption { adminPassword = contracts.secret.mkOption {
type = lib.types.path; description = "LDAP admin password.";
description = '' mode = "0440";
File containing the admin password of the LDAP server. owner = "forgejo";
group = "forgejo";
Must be readable by the forgejo system user. restartUnits = [ "forgejo.service" ];
'';
default = "";
}; };
userGroup = lib.mkOption { userGroup = lib.mkOption {
@ -140,37 +138,37 @@ in
default = "one_factor"; default = "one_factor";
}; };
secretFile = lib.mkOption { sharedSecret = contracts.secret.mkOption {
type = lib.types.path; description = "OIDC shared secret for Forgejo.";
description = '' mode = "0440";
File containing the secret for the OIDC endpoint. owner = "forgejo";
group = "forgejo";
Must be readable by the forgejo system user. restartUnits = [ "forgejo.service" ];
'';
}; };
secretFileForAuthelia = lib.mkOption { sharedSecretForAuthelia = contracts.secret.mkOption {
type = lib.types.path; description = "OIDC shared secret for Authelia.";
description = '' mode = "0400";
File containing the secret for the OIDC endpoint, must be readable by the Authelia user. owner = "authelia";
Must be readable by the authelia system user.
'';
}; };
}; };
}; };
}; };
adminPasswordFile = lib.mkOption { adminPassword = contracts.secret.mkOption {
type = lib.types.path;
description = "File containing the Forgejo admin user password."; description = "File containing the Forgejo admin user password.";
example = "/run/secrets/forgejo/adminPassword"; mode = "0440";
owner = "forgejo";
group = "forgejo";
restartUnits = [ "forgejo.service" ];
}; };
databasePasswordFile = lib.mkOption { databasePassword = contracts.secret.mkOption {
type = lib.types.path;
description = "File containing the Forgejo database password."; description = "File containing the Forgejo database password.";
example = "/run/secrets/forgejo/databasePassword"; mode = "0440";
owner = "forgejo";
group = "forgejo";
restartUnits = [ "forgejo.service" ];
}; };
repositoryRoot = lib.mkOption { repositoryRoot = lib.mkOption {
@ -344,7 +342,7 @@ in
services.forgejo.database = { services.forgejo.database = {
type = "postgres"; type = "postgres";
passwordFile = cfg.databasePasswordFile; passwordFile = cfg.databasePassword.result.path;
}; };
}) })
@ -380,7 +378,7 @@ in
--host ${cfg.ldap.host} \ --host ${cfg.ldap.host} \
--port ${toString cfg.ldap.port} \ --port ${toString cfg.ldap.port} \
--bind-dn uid=${cfg.ldap.adminName},ou=people,${cfg.ldap.dcdomain} \ --bind-dn uid=${cfg.ldap.adminName},ou=people,${cfg.ldap.dcdomain} \
--bind-password $(tr -d '\n' < ${cfg.ldap.adminPasswordFile}) \ --bind-password $(tr -d '\n' < ${cfg.ldap.adminPassword.result.path}) \
--security-protocol Unencrypted \ --security-protocol Unencrypted \
--user-search-base ou=people,${cfg.ldap.dcdomain} \ --user-search-base ou=people,${cfg.ldap.dcdomain} \
--user-filter '(&(memberof=cn=${cfg.ldap.userGroup},ou=groups,${cfg.ldap.dcdomain})(|(uid=%[1]s)(mail=%[1]s)))' \ --user-filter '(&(memberof=cn=${cfg.ldap.userGroup},ou=groups,${cfg.ldap.dcdomain})(|(uid=%[1]s)(mail=%[1]s)))' \
@ -399,7 +397,7 @@ in
--host ${cfg.ldap.host} \ --host ${cfg.ldap.host} \
--port ${toString cfg.ldap.port} \ --port ${toString cfg.ldap.port} \
--bind-dn uid=${cfg.ldap.adminName},ou=people,${cfg.ldap.dcdomain} \ --bind-dn uid=${cfg.ldap.adminName},ou=people,${cfg.ldap.dcdomain} \
--bind-password $(tr -d '\n' < ${cfg.ldap.adminPasswordFile}) \ --bind-password $(tr -d '\n' < ${cfg.ldap.adminPassword.result.path}) \
--security-protocol Unencrypted \ --security-protocol Unencrypted \
--user-search-base ou=people,${cfg.ldap.dcdomain} \ --user-search-base ou=people,${cfg.ldap.dcdomain} \
--user-filter '(&(memberof=cn=${cfg.ldap.userGroup},ou=groups,${cfg.ldap.dcdomain})(|(uid=%[1]s)(mail=%[1]s)))' \ --user-filter '(&(memberof=cn=${cfg.ldap.userGroup},ou=groups,${cfg.ldap.dcdomain})(|(uid=%[1]s)(mail=%[1]s)))' \
@ -456,7 +454,7 @@ in
--name ${provider} \ --name ${provider} \
--provider openidConnect \ --provider openidConnect \
--key forgejo \ --key forgejo \
--secret $(tr -d '\n' < ${cfg.sso.secretFile}) \ --secret $(tr -d '\n' < ${cfg.sso.sharedSecret.result.path}) \
--auto-discover-url ${cfg.sso.endpoint}/.well-known/openid-configuration --auto-discover-url ${cfg.sso.endpoint}/.well-known/openid-configuration
else else
echo Did not find any sso configuration, creating one with name ${provider}. echo Did not find any sso configuration, creating one with name ${provider}.
@ -464,7 +462,7 @@ in
--name ${provider} \ --name ${provider} \
--provider openidConnect \ --provider openidConnect \
--key forgejo \ --key forgejo \
--secret $(tr -d '\n' < ${cfg.sso.secretFile}) \ --secret $(tr -d '\n' < ${cfg.sso.sharedSecret.result.path}) \
--auto-discover-url ${cfg.sso.endpoint}/.well-known/openid-configuration --auto-discover-url ${cfg.sso.endpoint}/.well-known/openid-configuration
fi fi
''; '';
@ -475,7 +473,7 @@ in
in { in {
client_id = cfg.sso.clientID; client_id = cfg.sso.clientID;
client_name = "Forgejo"; client_name = "Forgejo";
client_secret.source = cfg.sso.secretFileForAuthelia; client_secret.source = cfg.sso.sharedSecretForAuthelia.result.path;
public = false; public = false;
authorization_policy = cfg.sso.authorization_policy; authorization_policy = cfg.sso.authorization_policy;
redirect_uris = [ "https://${cfg.subdomain}.${cfg.domain}/user/oauth2/${provider}/callback" ]; redirect_uris = [ "https://${cfg.subdomain}.${cfg.domain}/user/oauth2/${provider}/callback" ];
@ -486,8 +484,8 @@ in
(lib.mkIf cfg.enable { (lib.mkIf cfg.enable {
systemd.services.forgejo.preStart = '' systemd.services.forgejo.preStart = ''
admin="${lib.getExe config.services.forgejo.package} admin user" admin="${lib.getExe config.services.forgejo.package} admin user"
$admin create --admin --email "root@localhost" --username meadmin --password "$(tr -d '\n' < ${cfg.adminPasswordFile})" || true $admin create --admin --email "root@localhost" --username meadmin --password "$(tr -d '\n' < ${cfg.adminPassword.result.path})" || true
$admin change-password --username meadmin --password "$(tr -d '\n' < ${cfg.adminPasswordFile})" || true $admin change-password --username meadmin --password "$(tr -d '\n' < ${cfg.adminPassword.result.path})" || true
''; '';
}) })

View file

@ -67,9 +67,12 @@ in
default = "jellyfin_admin"; default = "jellyfin_admin";
}; };
passwordFile = lib.mkOption { adminPassword = contracts.secret.mkOption {
type = lib.types.path; description = "LDAP admin password.";
description = "File containing the LDAP admin password."; mode = "0440";
owner = "jellyfin";
group = "jellyfin";
restartUnits = [ "jellyfin.service" ];
}; };
}; };
}; };
@ -118,9 +121,18 @@ in
default = "one_factor"; default = "one_factor";
}; };
secretFile = lib.mkOption { sharedSecret = contracts.secret.mkOption {
type = lib.types.path; description = "OIDC shared secret for Jellyfin.";
description = "File containing the OIDC shared secret."; mode = "0440";
owner = "jellyfin";
group = "jellyfin";
restartUnits = [ "jellyfin.service" ];
};
sharedSecretForAuthelia = contracts.secret.mkOption {
description = "OIDC shared secret for Authelia.";
mode = "0400";
owner = config.shb.authelia.autheliaUser;
}; };
}; };
}; };
@ -400,30 +412,35 @@ in
lib.strings.optionalString cfg.ldap.enable (shblib.replaceSecretsScript { lib.strings.optionalString cfg.ldap.enable (shblib.replaceSecretsScript {
file = ldapConfig; file = ldapConfig;
resultPath = "/var/lib/jellyfin/plugins/configurations/LDAP-Auth.xml"; resultPath = "/var/lib/jellyfin/plugins/configurations/LDAP-Auth.xml";
replacements = { replacements = [
"%LDAP_PASSWORD%" = "$(cat ${cfg.ldap.passwordFile})"; {
}; name = [ "%LDAP_PASSWORD%" ];
source = cfg.ldap.adminPassword.result.path;
}
];
}) })
+ lib.strings.optionalString cfg.sso.enable (shblib.replaceSecretsScript { + lib.strings.optionalString cfg.sso.enable (shblib.replaceSecretsScript {
file = ssoConfig; file = ssoConfig;
resultPath = "/var/lib/jellyfin/plugins/configurations/SSO-Auth.xml"; resultPath = "/var/lib/jellyfin/plugins/configurations/SSO-Auth.xml";
replacements = { replacements = [
"%SSO_SECRET%" = "$(cat ${cfg.sso.secretFile})"; {
}; name = [ "%SSO_SECRET%" ];
source = cfg.sso.sharedSecret.result.path;
}
];
}) })
+ lib.strings.optionalString cfg.sso.enable (shblib.replaceSecretsScript { + lib.strings.optionalString cfg.sso.enable (shblib.replaceSecretsScript {
file = brandingConfig; file = brandingConfig;
resultPath = "/var/lib/jellyfin/config/branding.xml"; resultPath = "/var/lib/jellyfin/config/branding.xml";
replacements = { replacements = [
"%a%" = "%a%"; ];
};
}); });
shb.authelia.oidcClients = lib.lists.optionals (!(isNull cfg.sso)) [ shb.authelia.oidcClients = lib.lists.optionals (!(isNull cfg.sso)) [
{ {
client_id = cfg.sso.clientID; client_id = cfg.sso.clientID;
client_name = "Jellyfin"; client_name = "Jellyfin";
client_secret.source = cfg.sso.secretFile; client_secret.source = cfg.sso.sharedSecretForAuthelia.result.path;
public = false; public = false;
authorization_policy = cfg.sso.authorization_policy; authorization_policy = cfg.sso.authorization_policy;
redirect_uris = [ "https://${cfg.subdomain}.${cfg.domain}/sso/OID/r/${cfg.sso.provider}" ]; redirect_uris = [ "https://${cfg.subdomain}.${cfg.domain}/sso/OID/r/${cfg.sso.provider}" ];

View file

@ -13,6 +13,7 @@ in
(pkgs'.path + "/nixos/modules/profiles/headless.nix") (pkgs'.path + "/nixos/modules/profiles/headless.nix")
(pkgs'.path + "/nixos/modules/profiles/qemu-guest.nix") (pkgs'.path + "/nixos/modules/profiles/qemu-guest.nix")
../../modules/blocks/authelia.nix ../../modules/blocks/authelia.nix
../../modules/blocks/hardcodedsecret.nix
../../modules/blocks/ldap.nix ../../modules/blocks/ldap.nix
../../modules/blocks/postgresql.nix ../../modules/blocks/postgresql.nix
]; ];
@ -44,14 +45,12 @@ in
ldapPort = config.shb.ldap.ldapPort; ldapPort = config.shb.ldap.ldapPort;
dcdomain = config.shb.ldap.dcdomain; dcdomain = config.shb.ldap.dcdomain;
secrets = { secrets = {
jwtSecretFile = pkgs.writeText "jwtSecretFile" "jwtSecretFile"; jwtSecret.result.path = config.shb.hardcodedsecret.autheliaJwtSecret.path;
ldapAdminPasswordFile = pkgs.writeText "ldapAdminPasswordFile" ldapAdminPassword; ldapAdminPassword.result.path = config.shb.hardcodedsecret.ldapAdminPassword.path;
sessionSecretFile = pkgs.writeText "sessionSecretFile" "sessionSecretFile"; sessionSecret.result.path = config.shb.hardcodedsecret.sessionSecret.path;
storageEncryptionKeyFile = pkgs.writeText "storageEncryptionKeyFile" "storageEncryptionKeyFile"; storageEncryptionKey.result.path = config.shb.hardcodedsecret.storageEncryptionKey.path;
identityProvidersOIDCHMACSecretFile = pkgs.writeText "identityProvidersOIDCHMACSecretFile" "identityProvidersOIDCHMACSecretFile"; identityProvidersOIDCHMACSecret.result.path = config.shb.hardcodedsecret.identityProvidersOIDCHMACSecret.path;
# This needs to be of the correct shape and at least 2048 bits. Generated with: identityProvidersOIDCIssuerPrivateKey.result.path = config.shb.hardcodedsecret.identityProvidersOIDCIssuerPrivateKey.path;
# nix run nixpkgs#openssl -- genrsa -out keypair.pem 2048
identityProvidersOIDCIssuerPrivateKeyFile = pkgs.writeText "identityProvidersOIDCIssuerPrivateKeyFile" (builtins.readFile ./keypair.pem);
}; };
oidcClients = [ oidcClients = [
@ -73,6 +72,28 @@ in
} }
]; ];
}; };
shb.hardcodedsecret.autheliaJwtSecret = config.shb.authelia.secrets.jwtSecret.request // {
content = "jwtSecret";
};
shb.hardcodedsecret.ldapAdminPassword = config.shb.authelia.secrets.ldapAdminPassword.request // {
content = ldapAdminPassword;
};
shb.hardcodedsecret.sessionSecret = config.shb.authelia.secrets.sessionSecret.request // {
content = "sessionSecret";
};
shb.hardcodedsecret.storageEncryptionKey = config.shb.authelia.secrets.storageEncryptionKey.request // {
content = "storageEncryptionKey";
};
shb.hardcodedsecret.identityProvidersOIDCHMACSecret = config.shb.authelia.secrets.identityProvidersOIDCHMACSecret.request // {
content = "identityProvidersOIDCHMACSecret";
};
shb.hardcodedsecret.identityProvidersOIDCIssuerPrivateKey = config.shb.authelia.secrets.identityProvidersOIDCIssuerPrivateKey.request // {
source = (pkgs.runCommand "gen-private-key" {} ''
mkdir $out
${pkgs.openssl}/bin/openssl genrsa -out $out/private.pem 4096
'') + "/private.pem";
};
}; };
testScript = { nodes, ... }: '' testScript = { nodes, ... }: ''

View file

@ -12,11 +12,25 @@ let
commonTest = user: pkgs.testers.runNixOSTest { commonTest = user: pkgs.testers.runNixOSTest {
name = "restic_backupAndRestore_${user}"; name = "restic_backupAndRestore_${user}";
nodes.machine = { nodes.machine = { config, ... }: {
imports = ( testLib.baseImports pkgs' ) ++ [ imports = ( testLib.baseImports pkgs' ) ++ [
../../modules/blocks/hardcodedsecret.nix
../../modules/blocks/restic.nix ../../modules/blocks/restic.nix
]; ];
shb.hardcodedsecret.A = {
owner = "root";
group = "keys";
mode = "0440";
content = "secretA";
};
shb.hardcodedsecret.B = {
owner = "root";
group = "keys";
mode = "0440";
content = "secretB";
};
shb.restic.instances."testinstance" = { shb.restic.instances."testinstance" = {
enable = true; enable = true;
@ -39,8 +53,8 @@ let
# Those are not needed by the repository but are still included # Those are not needed by the repository but are still included
# so we can test them in the hooks section. # so we can test them in the hooks section.
secrets = { secrets = {
A.source = "/run/secrets/A"; A.source = config.shb.hardcodedsecret.A.path;
B.source = "/run/secrets/B"; B.source = config.shb.hardcodedsecret.B.path;
}; };
} }
{ {
@ -97,19 +111,6 @@ let
if len(result) > 0: if len(result) > 0:
raise Exception("Unexpected files:", result) raise Exception("Unexpected files:", result)
with subtest("Create secrets"):
print(machine.succeed("""
mkdir -p /run/secrets/
echo secretA > /run/secrets/A
echo secretB > /run/secrets/B
chown root:keys -R /run/secrets
find /run/secrets -type d -exec chmod u=rwx,g=rx,o=x '{}' ';'
find /run/secrets -type f -exec chmod u=r,g=r,o= '{}' ';'
ls -l /run/secrets
"""))
with subtest("Create initial content"): with subtest("Create initial content"):
machine.succeed(""" machine.succeed("""
mkdir -p /opt/files/A mkdir -p /opt/files/A

View file

@ -1,6 +1,4 @@
{ { lib }:
lib,
}:
let let
baseImports = pkgs: [ baseImports = pkgs: [
(pkgs.path + "/nixos/modules/profiles/headless.nix") (pkgs.path + "/nixos/modules/profiles/headless.nix")
@ -109,6 +107,7 @@ in
../modules/blocks/postgresql.nix ../modules/blocks/postgresql.nix
../modules/blocks/authelia.nix ../modules/blocks/authelia.nix
../modules/blocks/nginx.nix ../modules/blocks/nginx.nix
../modules/blocks/hardcodedsecret.nix
] ]
++ additionalModules; ++ additionalModules;
@ -138,7 +137,7 @@ in
systemd.services.nginx.requires = [ config.shb.certs.certs.selfsigned.n.systemdService ]; systemd.services.nginx.requires = [ config.shb.certs.certs.selfsigned.n.systemdService ];
}; };
ldap = domain: pkgs: { ldap = domain: pkgs: { config, ... }: {
imports = [ imports = [
../modules/blocks/ldap.nix ../modules/blocks/ldap.nix
]; ];
@ -147,6 +146,13 @@ in
"127.0.0.1" = [ "ldap.${domain}" ]; "127.0.0.1" = [ "ldap.${domain}" ];
}; };
shb.hardcodedsecret.ldapUserPassword = config.shb.ldap.ldapUserPassword.request // {
content = "ldapUserPassword";
};
shb.hardcodedsecret.jwtSecret = config.shb.ldap.ldapUserPassword.request // {
content = "jwtSecrets";
};
shb.ldap = { shb.ldap = {
enable = true; enable = true;
inherit domain; inherit domain;
@ -154,8 +160,8 @@ in
ldapPort = 3890; ldapPort = 3890;
webUIListenPort = 17170; webUIListenPort = 17170;
dcdomain = "dc=example,dc=com"; dcdomain = "dc=example,dc=com";
ldapUserPassword.result.path = pkgs.writeText "ldapUserPassword" "ldapUserPassword"; ldapUserPassword.result.path = config.shb.hardcodedsecret.ldapUserPassword.path;
jwtSecret.result.path = pkgs.writeText "jwtSecret" "jwtSecret"; jwtSecret.result.path = config.shb.hardcodedsecret.jwtSecret.path;
}; };
}; };
@ -179,17 +185,36 @@ in
dcdomain = config.shb.ldap.dcdomain; dcdomain = config.shb.ldap.dcdomain;
secrets = { secrets = {
jwtSecretFile = pkgs.writeText "jwtSecret" "jwtSecret"; jwtSecret.result.path = config.shb.hardcodedsecret.autheliaJwtSecret.path;
ldapAdminPasswordFile = pkgs.writeText "ldapUserPassword" "ldapUserPassword"; ldapAdminPassword.result.path = config.shb.hardcodedsecret.ldapAdminPassword.path;
sessionSecretFile = pkgs.writeText "sessionSecret" "sessionSecret"; sessionSecret.result.path = config.shb.hardcodedsecret.sessionSecret.path;
storageEncryptionKeyFile = pkgs.writeText "storageEncryptionKey" "storageEncryptionKey"; storageEncryptionKey.result.path = config.shb.hardcodedsecret.storageEncryptionKey.path;
identityProvidersOIDCHMACSecretFile = pkgs.writeText "identityProvidersOIDCHMACSecret" "identityProvidersOIDCHMACSecret"; identityProvidersOIDCHMACSecret.result.path = config.shb.hardcodedsecret.identityProvidersOIDCHMACSecret.path;
identityProvidersOIDCIssuerPrivateKeyFile = (pkgs.runCommand "gen-private-key" {} '' identityProvidersOIDCIssuerPrivateKey.result.path = config.shb.hardcodedsecret.identityProvidersOIDCIssuerPrivateKey.path;
};
};
shb.hardcodedsecret.autheliaJwtSecret = config.shb.authelia.secrets.jwtSecret.request // {
content = "jwtSecret";
};
shb.hardcodedsecret.ldapAdminPassword = config.shb.authelia.secrets.ldapAdminPassword.request // {
content = "ldapUserPassword";
};
shb.hardcodedsecret.sessionSecret = config.shb.authelia.secrets.sessionSecret.request // {
content = "sessionSecret";
};
shb.hardcodedsecret.storageEncryptionKey = config.shb.authelia.secrets.storageEncryptionKey.request // {
content = "storageEncryptionKey";
};
shb.hardcodedsecret.identityProvidersOIDCHMACSecret = config.shb.authelia.secrets.identityProvidersOIDCHMACSecret.request // {
content = "identityProvidersOIDCHMACSecret";
};
shb.hardcodedsecret.identityProvidersOIDCIssuerPrivateKey = config.shb.authelia.secrets.identityProvidersOIDCIssuerPrivateKey.request // {
source = (pkgs.runCommand "gen-private-key" {} ''
mkdir $out mkdir $out
${pkgs.openssl}/bin/openssl genrsa -out $out/private.pem 4096 ${pkgs.openssl}/bin/openssl genrsa -out $out/private.pem 4096
'') + "/private.pem"; '') + "/private.pem";
}; };
}; };
};
} }

35
test/contracts/secret.nix Normal file
View file

@ -0,0 +1,35 @@
{ pkgs, ... }:
let
contracts = pkgs.callPackage ../../modules/contracts {};
in
{
hardcoded_root_root = contracts.test.secret {
name = "hardcoded";
modules = [ ../../modules/blocks/hardcodedsecret.nix ];
configRoot = [ "shb" "hardcodedsecret" ];
createContent = {
content = "secretA";
};
};
hardcoded_user_group = contracts.test.secret {
name = "hardcoded";
modules = [ ../../modules/blocks/hardcodedsecret.nix ];
configRoot = [ "shb" "hardcodedsecret" ];
createContent = {
content = "secretA";
};
owner = "user";
group = "group";
mode = "640";
};
# TODO: how to do this?
# sops = contracts.test.secret {
# name = "sops";
# configRoot = cfg: name: cfg.sops.secrets.${name};
# createContent = content: {
# sopsFile = ./secret/sops.yaml;
# };
# };
}

View file

View file

@ -1,6 +1,8 @@
{ pkgs, lib, ... }: { pkgs, lib, ... }:
let let
shblib = pkgs.callPackage ../../lib {}; shblib = pkgs.callPackage ../../lib {};
inherit (lib) nameValuePair;
in in
{ {
# Tests that withReplacements can: # Tests that withReplacements can:
@ -79,15 +81,15 @@ in
testLibGetReplacements = { testLibGetReplacements = {
expected = expected =
let let
secrets = root: { secrets = root: [
"%SECRET_${root}B%" = "$(cat /path/B)"; (nameValuePair "%SECRET_${root}B%" "$(cat /path/B)")
"%SECRET_${root}C%" = "prefix-$(cat /path/C)-suffix"; (nameValuePair "%SECRET_${root}C%" "prefix-$(cat /path/C)-suffix")
}; ];
in in
(secrets "") // (secrets "") ++
(secrets "NESTEDATTR_") // (secrets "DOUBLENESTEDLIST_0_N_") ++
(secrets "NESTEDLIST_0_") // (secrets "NESTEDATTR_") ++
(secrets "DOUBLENESTEDLIST_0_N_"); (secrets "NESTEDLIST_0_");
expr = expr =
let let
item = { item = {
@ -99,13 +101,13 @@ in
c.other = "other"; c.other = "other";
}; };
in in
shblib.getReplacements ( map shblib.genReplacement (shblib.getReplacements (
item // { item // {
nestedAttr = item; nestedAttr = item;
nestedList = [ item ]; nestedList = [ item ];
doubleNestedList = [ { n = item; } ]; doubleNestedList = [ { n = item; } ];
} }
); ));
}; };
testParseXML = { testParseXML = {

View file

@ -29,19 +29,27 @@ let
../../modules/services/forgejo.nix ../../modules/services/forgejo.nix
]; ];
basic = { basic = { config, ... }: {
shb.forgejo = { shb.forgejo = {
enable = true; enable = true;
inherit domain subdomain; inherit domain subdomain;
adminPasswordFile = pkgs.writeText "adminPasswordFile" adminPassword; adminPassword.result.path = config.shb.hardcodedsecret.forgejoAdminPassword.path;
databasePasswordFile = pkgs.writeText "databasePassword" "databasePassword"; databasePassword.result.path = config.shb.hardcodedsecret.forgejoDatabasePassword.path;
}; };
# Needed for gitea-runner-local to be able to ping forgejo. # Needed for gitea-runner-local to be able to ping forgejo.
networking.hosts = { networking.hosts = {
"127.0.0.1" = [ "${subdomain}.${domain}" ]; "127.0.0.1" = [ "${subdomain}.${domain}" ];
}; };
shb.hardcodedsecret.forgejoAdminPassword = config.shb.forgejo.adminPassword.request // {
content = adminPassword;
};
shb.hardcodedsecret.forgejoDatabasePassword = config.shb.forgejo.databasePassword.request // {
content = "databasePassword";
};
}; };
https = { config, ... }: { https = { config, ... }: {
@ -57,9 +65,13 @@ let
host = "127.0.0.1"; host = "127.0.0.1";
port = config.shb.ldap.ldapPort; port = config.shb.ldap.ldapPort;
dcdomain = config.shb.ldap.dcdomain; dcdomain = config.shb.ldap.dcdomain;
adminPasswordFile = config.shb.ldap.ldapUserPassword.result.path; adminPassword.result.path = config.shb.hardcodedsecret.forgejoLdapUserPassword.path;
}; };
}; };
shb.hardcodedsecret.forgejoLdapUserPassword = config.shb.forgejo.ldap.adminPassword.request // {
content = "ldapUserPassword";
};
}; };
sso = { config, ... }: { sso = { config, ... }: {
@ -67,10 +79,18 @@ let
sso = { sso = {
enable = true; enable = true;
endpoint = "https://${config.shb.authelia.subdomain}.${config.shb.authelia.domain}"; endpoint = "https://${config.shb.authelia.subdomain}.${config.shb.authelia.domain}";
secretFile = pkgs.writeText "ssoSecretFile" "ssoSecretFile"; sharedSecret.result.path = config.shb.hardcodedsecret.forgejoSSOPassword.path;
secretFileForAuthelia = pkgs.writeText "ssoSecretFile" "ssoSecretFile"; sharedSecretForAuthelia.result.path = config.shb.hardcodedsecret.forgejoSSOPasswordAuthelia.path;
}; };
}; };
shb.hardcodedsecret.forgejoSSOPassword = config.shb.forgejo.sso.sharedSecret.request // {
content = "ssoPassword";
};
shb.hardcodedsecret.forgejoSSOPasswordAuthelia = config.shb.forgejo.sso.sharedSecretForAuthelia.request // {
content = "ssoPassword";
};
}; };
in in
{ {

View file

@ -43,9 +43,13 @@ let
host = "127.0.0.1"; host = "127.0.0.1";
port = config.shb.ldap.ldapPort; port = config.shb.ldap.ldapPort;
dcdomain = config.shb.ldap.dcdomain; dcdomain = config.shb.ldap.dcdomain;
passwordFile = config.shb.ldap.ldapUserPassword.result.path; adminPassword.result.path = config.shb.hardcodedsecret.jellyfinLdapUserPassword.path;
}; };
}; };
shb.hardcodedsecret.jellyfinLdapUserPassword = config.shb.jellyfin.ldap.adminPassword.request // {
content = "ldapUserPassword";
};
}; };
sso = { config, ... }: { sso = { config, ... }: {
@ -53,9 +57,18 @@ let
sso = { sso = {
enable = true; enable = true;
endpoint = "https://${config.shb.authelia.subdomain}.${config.shb.authelia.domain}"; endpoint = "https://${config.shb.authelia.subdomain}.${config.shb.authelia.domain}";
secretFile = pkgs.writeText "ssoSecretFile" "ssoSecretFile"; sharedSecret.result.path = config.shb.hardcodedsecret.jellyfinSSOPassword.path;
sharedSecretForAuthelia.result.path = config.shb.hardcodedsecret.jellyfinSSOPasswordAuthelia.path;
}; };
}; };
shb.hardcodedsecret.jellyfinSSOPassword = config.shb.jellyfin.sso.sharedSecret.request // {
content = "ssoPassword";
};
shb.hardcodedsecret.jellyfinSSOPasswordAuthelia = config.shb.jellyfin.sso.sharedSecretForAuthelia.request // {
content = "ssoPassword";
};
}; };
in in
{ {