Compare commits

..

71 Commits

Author SHA1 Message Date
7fd79c9911 Enable sysrq for debugging. 2025-12-06 12:25:17 +00:00
41eacfec02 Typo fix. 2025-12-02 20:39:25 +00:00
0a0748b920 Disable byte range locking for smbfs. 2025-12-02 20:38:48 +00:00
d6e0e09e87 Update flake. 2025-11-28 13:00:17 +00:00
61c3020a5e Update flake. 2025-11-25 18:53:43 +00:00
972b973f58 Update flake. 2025-11-25 14:05:10 +00:00
8c5a7b78c6 Update flake. 2025-11-24 13:33:04 +00:00
675204816a Even more RAM for Plex. 2025-11-23 20:10:58 +00:00
3bb82dbc6b Initial config. 2025-11-23 08:55:38 +00:00
0f6233c3ec More RAM. 2025-11-23 07:54:04 +00:00
43fa56bf35 Bind on all addresses and rely on firewall for blocking public ssh.
Otherwise, sshd will try and fail to bind on the tailscale IP before
tailscale is up.
2025-11-23 07:24:09 +00:00
50c930eeaf Add flaresolverr, disable bazarr, tweak resources. 2025-11-22 19:27:37 +00:00
8dde15b8ef Add prowlarr, recyclarr, and jellyseerr. 2025-11-22 17:32:14 +00:00
6100d8dc69 Fix override. 2025-11-21 16:43:39 +00:00
a92f0fcb28 Tighten up security. 2025-11-21 16:39:45 +00:00
bd4604cdcc Auth docs. 2025-11-21 14:12:19 +00:00
31db372b43 Remove now unused authentik config. 2025-11-21 14:00:47 +00:00
360e776745 Set up ollama. 2025-11-17 22:33:44 +00:00
5a819f70bb Static port for claude code accessibility. 2025-11-17 19:05:17 +00:00
b2c055ffb2 MCP server for tiddlywiki. 2025-11-17 17:56:05 +00:00
6e0b34843b Allow claude-code to read&write. 2025-11-16 21:07:58 +00:00
e8485e3bb7 Update flake. 2025-11-12 15:10:11 +00:00
e8cd970960 Make it an exit node. 2025-11-05 16:50:05 +00:00
78b59cec4f Put PHP port back on 9000, where the rest of the stuff expects it. 2025-11-05 15:54:46 +00:00
e6d40a9f7e Set an actual password. 2025-11-04 20:26:50 +00:00
7733a1be46 yet another replication fix. 2025-11-04 19:57:52 +00:00
a5df98bc5a Update docs. 2025-11-04 19:08:27 +00:00
fb9b0dd2f5 Move NFS server to sparky. 2025-11-04 19:00:18 +00:00
0dc214069c Fix curl-induced failures. 2025-11-04 18:59:50 +00:00
a6c4be9530 Use clone source for btrfs send. 2025-11-04 17:51:34 +00:00
6e338e6d65 Stop replicating to c1. 2025-11-04 14:03:49 +00:00
41f16fa0b8 Make sparky a standby again. 2025-11-04 12:58:34 +00:00
1b05728817 Switch to Pocket ID. 2025-11-04 12:58:15 +00:00
520a417316 Pocket ID config. 2025-11-04 11:04:33 +00:00
88ed5360ca Keys for sparky reinstall. 2025-11-04 11:04:20 +00:00
392d40def3 Update flake. 2025-11-04 10:26:18 +00:00
5ef4d832fb Only keep 10 snapshots, and push metrics. 2025-11-04 10:22:11 +00:00
49afc0c084 Remove standby from sparky. 2025-11-04 09:39:45 +00:00
b2c82ceaa8 Don't replicate to sparky for now. 2025-11-04 09:39:23 +00:00
b9286d7243 More CPU. 2025-11-02 06:50:38 +00:00
22931e6747 Add some items. 2025-11-01 17:55:40 +00:00
ac030018c6 Install prusa slicer. 2025-10-31 17:54:27 +00:00
7386d3a5ee Don't try to run consul on the cloud. 2025-10-31 15:55:37 +00:00
2a5a9f2ee9 Actually make sparky a NFS replica. 2025-10-31 15:54:32 +00:00
963a7c10fa Fix include. 2025-10-31 15:45:32 +00:00
283cf9d614 Make sparky a NFS backup instead of desktop. 2025-10-31 15:41:12 +00:00
5b3b4ea2ed Make sure to keep some snapshots around even if they stop coming. 2025-10-31 15:40:19 +00:00
5a9d5de5c4 (try to) show better diffs 2025-10-31 15:40:08 +00:00
a5e3f613c2 Set correct interface name for beefy. 2025-10-30 07:46:37 +00:00
8b8fac2d89 Try to fix systemd pager errors. 2025-10-30 07:37:21 +00:00
31d79ba75b Typo fix. 2025-10-30 07:28:32 +00:00
6faf148fde Don't try to use the RSA SSH key, not supported by sops. 2025-10-30 07:24:48 +00:00
e88f1c93c5 Another attempt at thoroughly fixing tmux ssh agent. 2025-10-30 07:21:40 +00:00
51375db1e4 Passphrase from beefy. 2025-10-30 06:29:49 +00:00
9415a8ece2 Make ssh agent settings autoheal in tmux. 2025-10-29 20:55:46 +00:00
da85ee776d Post-install beefy updates. 2025-10-29 17:25:43 +00:00
e23dc7df5b Configs for beefy. 2025-10-29 17:13:23 +00:00
163b9e4c22 Fix ghostty terminfo on remote hosts. 2025-10-29 15:17:46 +00:00
d521c3b013 Fix WiFi for stinky. 2025-10-29 15:17:46 +00:00
d123400ea9 Less CPU. 2025-10-28 19:50:03 +00:00
9c64a8ec00 Fix ghostty termcap. 2025-10-28 19:06:47 +00:00
4907238726 stinky wifi 2025-10-28 17:25:15 +00:00
37aad7d951 More tmpfs impermanence fixes. 2025-10-28 16:49:39 +00:00
ac34f029ed Update flake. 2025-10-28 15:55:30 +00:00
8d04add7dc Remove code server. 2025-10-28 15:44:09 +00:00
d7a07cebf5 Cleanup old snapshots hourly. 2025-10-28 14:40:28 +00:00
2ba961bfa8 TS key. 2025-10-28 11:35:49 +00:00
765e92f9c7 Keys for stinky. 2025-10-28 11:30:57 +00:00
1bb202d017 Add nixos-hardware flake for stinky. 2025-10-28 10:59:16 +00:00
98769f59d6 Fix stinky build. 2025-10-27 16:17:26 +00:00
762037d17f (untested) config for stinky and diff script. 2025-10-27 12:21:57 +00:00
77 changed files with 2017 additions and 568 deletions

1
.envrc Normal file
View File

@@ -0,0 +1 @@
use flake

1
.gitignore vendored
View File

@@ -3,3 +3,4 @@
result result
.aider* .aider*
.claude .claude
.direnv/

View File

@@ -2,7 +2,9 @@ keys:
- &admin_ppetru age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn - &admin_ppetru age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn
- &server_zippy age1gtyw202hd07hddac9886as2cs8pm07e4exlnrgfm72lync75ng9qc5fjac - &server_zippy age1gtyw202hd07hddac9886as2cs8pm07e4exlnrgfm72lync75ng9qc5fjac
- &server_chilly age16yqffw4yl5jqvsr7tyd883vn98zw0attuv9g5snc329juff6dy3qw2w5wp - &server_chilly age16yqffw4yl5jqvsr7tyd883vn98zw0attuv9g5snc329juff6dy3qw2w5wp
- &server_sparky age10zxwwufrf5uu9cv9p9znse2ftfm74q9ce893us6cnvxjc7e3ypcqy709dy - &server_sparky age14aml5s3sxksa8qthnt6apl3pu6egxyn0cz7pdzzvp2yl6wncad0q56udyj
- &server_stinky age1me78u46409q9ez6fj0qanrfffc5e9kuq7n7uuvlljfwwc2mdaezqmyzxhx
- &server_beefy age1cs8uqj243lspyp042ueu5aes4t3azgyuaxl9au70ggrl2meulq4sgqpc7y
- &server_alo_cloud_1 age1w5w4wfvtul3sge9mt205zvrkjaeh3qs9gsxhmq7df2g4dztnvv6qylup8z - &server_alo_cloud_1 age1w5w4wfvtul3sge9mt205zvrkjaeh3qs9gsxhmq7df2g4dztnvv6qylup8z
- &server_c1 age1wwufz86tm3auxn6pn27c47s8rvu7en58rk00nghtaxsdpw0gya6qj6qxdt - &server_c1 age1wwufz86tm3auxn6pn27c47s8rvu7en58rk00nghtaxsdpw0gya6qj6qxdt
- &server_c2 age1jy7pe4530s8w904wtvrmpxvteztqy5ewdt92a7y3lq87sg9jce5qxxuydt - &server_c2 age1jy7pe4530s8w904wtvrmpxvteztqy5ewdt92a7y3lq87sg9jce5qxxuydt
@@ -15,6 +17,8 @@ creation_rules:
- *server_zippy - *server_zippy
- *server_chilly - *server_chilly
- *server_sparky - *server_sparky
- *server_stinky
- *server_beefy
- *server_alo_cloud_1 - *server_alo_cloud_1
- *server_c1 - *server_c1
- *server_c2 - *server_c2
@@ -34,6 +38,21 @@ creation_rules:
- age: - age:
- *admin_ppetru - *admin_ppetru
- *server_sparky - *server_sparky
- path_regex: secrets/stinky\.yaml
key_groups:
- age:
- *admin_ppetru
- *server_stinky
- path_regex: secrets/beefy\.yaml
key_groups:
- age:
- *admin_ppetru
- *server_beefy
- path_regex: secrets/wifi\.yaml
key_groups:
- age:
- *admin_ppetru
- *server_stinky
- path_regex: secrets/alo-cloud-1\.yaml - path_regex: secrets/alo-cloud-1\.yaml
key_groups: key_groups:
- age: - age:

View File

@@ -8,42 +8,35 @@ NixOS cluster configuration using flakes. Homelab infrastructure with Nomad/Cons
├── common/ ├── common/
│ ├── global/ # Applied to all hosts (backup, sops, users, etc.) │ ├── global/ # Applied to all hosts (backup, sops, users, etc.)
│ ├── minimal-node.nix # Base (ssh, user, boot, impermanence) │ ├── minimal-node.nix # Base (ssh, user, boot, impermanence)
│ ├── cluster-member.nix # Consul + storage clients (NFS/CIFS/GlusterFS) │ ├── cluster-member.nix # Consul agent + storage mounts (NFS/CIFS)
│ ├── nomad-worker.nix # Nomad client (runs jobs) + Docker + NFS deps │ ├── nomad-worker.nix # Nomad client (runs jobs) + Docker + NFS deps
│ ├── nomad-server.nix # Enables Consul + Nomad server mode │ ├── nomad-server.nix # Enables Consul + Nomad server mode
│ ├── cluster-tools.nix # Just CLI tools (nomad, wander, damon) │ ├── cluster-tools.nix # Just CLI tools (nomad, wander, damon)
│ ├── workstation-node.nix # Dev tools (wget, deploy-rs, docker, nix-ld) │ ├── workstation-node.nix # Dev tools (wget, deploy-rs, docker, nix-ld)
│ ├── desktop-node.nix # Hyprland + GUI environment │ ├── desktop-node.nix # Hyprland + GUI environment
│ ├── nfs-services-server.nix # NFS server + btrfs replication (zippy) │ ├── nfs-services-server.nix # NFS server + btrfs replication
│ └── nfs-services-standby.nix # NFS standby + receive replication (c1) │ └── nfs-services-standby.nix # NFS standby + receive replication
├── hosts/ ├── hosts/ # Host configs - check imports for roles
│ ├── c1/, c2/, c3/ # Cattle nodes (quorum + workers)
│ ├── zippy/ # Primary storage + NFS server + worker (not quorum)
│ ├── chilly/ # Home Assistant VM + cluster member (Consul only)
│ ├── sparky/ # Desktop + cluster member (Consul only)
│ ├── fractal/ # (Proxmox, will become NixOS storage node)
│ └── sunny/ # (Standalone ethereum node, not in cluster)
├── docs/ ├── docs/
│ ├── CLUSTER_REVAMP.md # Master plan for architecture changes │ ├── CLUSTER_REVAMP.md # Master plan for architecture changes
│ ├── MIGRATION_TODO.md # Tracking checklist for migration │ ├── MIGRATION_TODO.md # Tracking checklist for migration
── NFS_FAILOVER.md # NFS failover procedures ── NFS_FAILOVER.md # NFS failover procedures
│ └── AUTH_SETUP.md # Authentication (Pocket ID + Traefik OIDC)
└── services/ # Nomad job specs (.hcl files) └── services/ # Nomad job specs (.hcl files)
``` ```
## Current Architecture ## Current Architecture
### Storage Mounts ### Storage Mounts
- `/data/services` - NFS from `data-services.service.consul` (zippy primary, c1 standby) - `/data/services` - NFS from `data-services.service.consul` (check nfs-services-server.nix for primary)
- `/data/media` - CIFS from fractal (existing, unchanged) - `/data/media` - CIFS from fractal
- `/data/shared` - CIFS from fractal (existing, unchanged) - `/data/shared` - CIFS from fractal
### Hosts ### Cluster Roles (check hosts/*/default.nix for each host's imports)
- **c1, c2, c3**: Cattle nodes, run most workloads, Nomad/Consul quorum members - **Quorum**: hosts importing `nomad-server.nix` (3 expected for consensus)
- **zippy**: Primary NFS server, runs workloads (affinity), NOT quorum, replicates to c1 every 5min - **Workers**: hosts importing `nomad-worker.nix` (run Nomad jobs)
- **chilly**: Home Assistant VM, cluster member (Consul agent + CLI tools), no workloads - **NFS server**: host importing `nfs-services-server.nix` (affinity for direct disk access like DBs)
- **sparky**: Desktop/laptop, cluster member (Consul agent + CLI tools), no workloads - **Standby**: hosts importing `nfs-services-standby.nix` (receive replication)
- **fractal**: Storage node (Proxmox/ZFS), will join quorum after GlusterFS removed
- **sunny**: Standalone ethereum staking node (not in cluster)
## Config Architecture ## Config Architecture
@@ -58,19 +51,22 @@ NixOS cluster configuration using flakes. Homelab infrastructure with Nomad/Cons
- `workstation-node.nix` - Dev tools (deploy-rs, docker, nix-ld, emulation) - `workstation-node.nix` - Dev tools (deploy-rs, docker, nix-ld, emulation)
- `desktop-node.nix` - Extends workstation + Hyprland/GUI - `desktop-node.nix` - Extends workstation + Hyprland/GUI
**Host composition examples**: **Composition patterns**:
- c1/c2/c3: `cluster-member + nomad-worker + nomad-server` (quorum + runs jobs) - Quorum member: `cluster-member + nomad-worker + nomad-server`
- zippy: `cluster-member + nomad-worker` (runs jobs, not quorum) - Worker only: `cluster-member + nomad-worker`
- chilly/sparky: `cluster-member + cluster-tools` (Consul + CLI only) - CLI only: `cluster-member + cluster-tools` (Consul agent, no Nomad service)
- NFS primary: `cluster-member + nomad-worker + nfs-services-server`
- Standalone: `minimal-node` only (no cluster membership)
**Key insight**: Profiles (workstation/desktop) no longer imply cluster membership. Hosts explicitly declare roles via imports. **Key insight**: Profiles (workstation/desktop) don't imply cluster roles. Check imports for actual roles.
## Key Patterns ## Key Patterns
**NFS Server/Standby**: **NFS Server/Standby**:
- Primary (zippy): imports `nfs-services-server.nix`, sets `standbys = ["c1"]` - Primary: imports `nfs-services-server.nix`, sets `standbys = [...]`
- Standby (c1): imports `nfs-services-standby.nix`, sets `replicationKeys = [...]` - Standby: imports `nfs-services-standby.nix`, sets `replicationKeys = [...]`
- Replication: btrfs send/receive every 5min, incremental with fallback to full - Replication: btrfs send/receive every 5min, incremental with fallback to full
- Check host configs for current primary/standby assignments
**Backups**: **Backups**:
- Kopia client on all nodes → Kopia server on fractal - Kopia client on all nodes → Kopia server on fractal
@@ -81,6 +77,12 @@ NixOS cluster configuration using flakes. Homelab infrastructure with Nomad/Cons
- SOPS for secrets, files in `secrets/` - SOPS for secrets, files in `secrets/`
- Keys managed per-host - Keys managed per-host
**Authentication**:
- Pocket ID (OIDC provider) at `pocket-id.v.paler.net`
- Traefik uses `traefik-oidc-auth` plugin for SSO
- Services add `middlewares=oidc-auth@file` tag to protect
- See `docs/AUTH_SETUP.md` for details
## Migration Status ## Migration Status
**Phase 3 & 4**: COMPLETE! GlusterFS removed, all services on NFS **Phase 3 & 4**: COMPLETE! GlusterFS removed, all services on NFS
@@ -92,7 +94,7 @@ See `docs/MIGRATION_TODO.md` for detailed checklist.
**Deploy a host**: `deploy -s '.#hostname'` **Deploy a host**: `deploy -s '.#hostname'`
**Deploy all**: `deploy` **Deploy all**: `deploy`
**Check replication**: `ssh zippy journalctl -u replicate-services-to-c1.service -f` **Check replication**: Check NFS primary host, then `ssh <primary> journalctl -u replicate-services-to-*.service -f`
**NFS failover**: See `docs/NFS_FAILOVER.md` **NFS failover**: See `docs/NFS_FAILOVER.md`
**Nomad jobs**: `services/*.hcl` - service data stored at `/data/services/<service-name>` **Nomad jobs**: `services/*.hcl` - service data stored at `/data/services/<service-name>`
@@ -106,8 +108,8 @@ See `docs/MIGRATION_TODO.md` for detailed checklist.
## Important Files ## Important Files
- `common/global/backup.nix` - Kopia backup configuration - `common/global/backup.nix` - Kopia backup configuration
- `hosts/zippy/default.nix` - NFS server config, replication targets - `common/nfs-services-server.nix` - NFS server role (check hosts for which imports this)
- `hosts/c1/default.nix` - NFS standby config, authorized replication keys - `common/nfs-services-standby.nix` - NFS standby role (check hosts for which imports this)
- `flake.nix` - Host definitions, nixpkgs inputs - `flake.nix` - Host definitions, nixpkgs inputs
--- ---

View File

@@ -22,7 +22,6 @@ Each layer extends the previous one, inheriting all configurations. Hosts select
### Special Node Types ### Special Node Types
- **cloud-node**: Minimal + Consul only (cloud VPS deployments)
- **compute-node**: Cluster + Nomad worker (container orchestration) - **compute-node**: Cluster + Nomad worker (container orchestration)
## Directory Structure ## Directory Structure
@@ -40,7 +39,6 @@ Each layer extends the previous one, inheriting all configurations. Hosts select
│ ├── server-node.nix # Server layer: bare metal services (future) │ ├── server-node.nix # Server layer: bare metal services (future)
│ ├── workstation-node.nix # Workstation layer: dev tools │ ├── workstation-node.nix # Workstation layer: dev tools
│ ├── desktop-node.nix # Desktop layer: GUI environment │ ├── desktop-node.nix # Desktop layer: GUI environment
│ ├── cloud-node.nix # Cloud VPS profile
│ ├── compute-node.nix # Nomad worker profile │ ├── compute-node.nix # Nomad worker profile
│ └── [feature modules] # Individual feature configs │ └── [feature modules] # Individual feature configs
├── hosts/ ├── hosts/
@@ -101,7 +99,7 @@ This ensures system and user configurations stay synchronized.
| Host | Profile | Role | Hardware | | Host | Profile | Role | Hardware |
|------|---------|------|----------| |------|---------|------|----------|
| **c1, c2, c3** | compute-node | Nomad workers | Bare metal servers | | **c1, c2, c3** | compute-node | Nomad workers | Bare metal servers |
| **alo-cloud-1** | cloud-node | Reverse proxy | Cloud VPS | | **alo-cloud-1** | minimal | Reverse proxy (Traefik) | Cloud VPS |
| **chilly** | server | Home Assistant in a VM | Bare metal server | | **chilly** | server | Home Assistant in a VM | Bare metal server |
| **zippy** | workstation | Development machine, server | Bare metal server | | **zippy** | workstation | Development machine, server | Bare metal server |
| **sparky** | desktop | Desktop environment | Bare metal desktop | | **sparky** | desktop | Desktop environment | Bare metal desktop |

View File

@@ -19,6 +19,8 @@
enable = true; enable = true;
cache = { cache = {
hostName = config.networking.hostName; hostName = config.networking.hostName;
# NOTE: These paths are hardcoded to /persist (not using config.custom.impermanence.persistPath)
# This is acceptable since this service is only enabled on btrfs-based hosts
dataPath = "/persist/ncps/data"; dataPath = "/persist/ncps/data";
tempPath = "/persist/ncps/tmp"; tempPath = "/persist/ncps/tmp";
databaseURL = "sqlite:/persist/ncps/db/db.sqlite"; databaseURL = "sqlite:/persist/ncps/db/db.sqlite";

View File

@@ -1,7 +1,7 @@
{ pkgs, ... }: { pkgs, ... }:
let let
# this line prevents hanging on network split # this line prevents hanging on network split
automount_opts = "x-systemd.automount,noauto,x-systemd.idle-timeout=60,x-systemd.mount-timeout=5s"; automount_opts = "x-systemd.automount,noauto,x-systemd.idle-timeout=60,x-systemd.mount-timeout=5s,nobrl";
in in
{ {
environment.systemPackages = [ pkgs.cifs-utils ]; environment.systemPackages = [ pkgs.cifs-utils ];

View File

@@ -1,8 +0,0 @@
{ pkgs, ... }:
{
# Cloud node: Minimal system with Consul for cloud deployments
imports = [
./minimal-node.nix
./consul.nix
];
}

View File

@@ -1,4 +1,4 @@
{ pkgs, ... }: { pkgs, lib, config, ... }:
{ {
# Cluster node configuration # Cluster node configuration
# Extends minimal-node with cluster-specific services (Consul, GlusterFS, CIFS, NFS) # Extends minimal-node with cluster-specific services (Consul, GlusterFS, CIFS, NFS)
@@ -11,7 +11,14 @@
./nfs-services-client.nix # New: NFS client for /data/services ./nfs-services-client.nix # New: NFS client for /data/services
]; ];
# Wait for eno1 to be routable before considering network online options.networking.cluster.primaryInterface = lib.mkOption {
# (hosts with different primary interfaces should override this) type = lib.types.str;
systemd.network.wait-online.extraArgs = [ "--interface=eno1:routable" ]; default = "eno1";
description = "Primary network interface for cluster communication (Consul, NFS, etc.)";
};
config = {
# Wait for primary interface to be routable before considering network online
systemd.network.wait-online.extraArgs = [ "--interface=${config.networking.cluster.primaryInterface}:routable" ];
};
} }

View File

@@ -13,7 +13,7 @@ in
services.consul = { services.consul = {
enable = true; enable = true;
webUi = true; webUi = true;
interface.advertise = "eno1"; interface.advertise = config.networking.cluster.primaryInterface;
extraConfig = { extraConfig = {
client_addr = "0.0.0.0"; client_addr = "0.0.0.0";
datacenter = "alo"; datacenter = "alo";
@@ -27,7 +27,7 @@ in
}; };
}; };
environment.persistence."/persist".directories = [ "/var/lib/consul" ]; environment.persistence.${config.custom.impermanence.persistPath}.directories = [ "/var/lib/consul" ];
networking.firewall = { networking.firewall = {
allowedTCPPorts = [ allowedTCPPorts = [

View File

@@ -44,4 +44,8 @@
environment.sessionVariables = { environment.sessionVariables = {
NIXOS_OZONE_WL = "1"; # Hint electron apps to use Wayland NIXOS_OZONE_WL = "1"; # Hint electron apps to use Wayland
}; };
environment.systemPackages = with pkgs; [
prusa-slicer
];
} }

View File

@@ -12,7 +12,7 @@
checkpoint-sync-url = "https://beaconstate.info"; checkpoint-sync-url = "https://beaconstate.info";
}; };
}; };
environment.persistence."/persist".directories = [ environment.persistence.${config.custom.impermanence.persistPath}.directories = [
"/var/lib/private/lighthouse-mainnet" "/var/lib/private/lighthouse-mainnet"
]; ];
} }

View File

@@ -6,8 +6,7 @@ let
btrfs = "${btrfsPkg}/bin/btrfs"; btrfs = "${btrfsPkg}/bin/btrfs";
snapshotBackup = pkgs.writeScript "kopia-snapshot-backup" (builtins.readFile ./kopia-snapshot-backup.sh); snapshotBackup = pkgs.writeScript "kopia-snapshot-backup" (builtins.readFile ./kopia-snapshot-backup.sh);
backupScript = pkgs.writeShellScript "backup-persist" '' backupScript = pkgs.writeShellScript "backup-persist" ''
target_path="/persist" target_path="${config.custom.impermanence.persistPath}"
snapshot_path="$target_path/kopia-backup-snapshot"
KOPIA_CHECK_FOR_UPDATES=false KOPIA_CHECK_FOR_UPDATES=false
${kopia} repository connect server \ ${kopia} repository connect server \
@@ -16,8 +15,13 @@ let
-p "$(cat ${config.sops.secrets.kopia.path})" \ -p "$(cat ${config.sops.secrets.kopia.path})" \
|| exit 1 || exit 1
[ -e "$snapshot_path" ] && ${btrfs} subvolume delete "$snapshot_path" # Check if target_path is on btrfs filesystem
fs_type=$(stat -f -c %T "$target_path")
if [ "$fs_type" = "btrfs" ]; then
# On btrfs: use snapshot for consistency
snapshot_path="$target_path/kopia-backup-snapshot"
[ -e "$snapshot_path" ] && ${btrfs} subvolume delete "$snapshot_path"
${btrfs} subvolume snapshot -r "$target_path" "$snapshot_path" ${btrfs} subvolume snapshot -r "$target_path" "$snapshot_path"
# --no-send-snapshot-path due to https://github.com/kopia/kopia/issues/4402 # --no-send-snapshot-path due to https://github.com/kopia/kopia/issues/4402
@@ -28,6 +32,12 @@ let
-- "$snapshot_path" -- "$snapshot_path"
${btrfs} subvolume delete "$snapshot_path" ${btrfs} subvolume delete "$snapshot_path"
else
# On non-btrfs (e.g., ext4): backup directly without snapshot
${kopia} snapshot create --no-send-snapshot-report --override-source "$target_path" \
-- "$target_path"
fi
${kopia} repository disconnect ${kopia} repository disconnect
''; '';
in in
@@ -41,7 +51,7 @@ in
services."backup-persist" = { services."backup-persist" = {
description = "Backup persistent data with Kopia"; description = "Backup persistent data with Kopia";
serviceConfig = { serviceConfig = {
type = "oneshot"; Type = "oneshot";
User = "root"; User = "root";
ExecStart = "${backupScript}"; ExecStart = "${backupScript}";
}; };

View File

@@ -5,6 +5,7 @@
./console.nix ./console.nix
./cpufreq.nix ./cpufreq.nix
./flakes.nix ./flakes.nix
./impermanence-options.nix
./kernel.nix ./kernel.nix
./locale.nix ./locale.nix
./network.nix ./network.nix

View File

@@ -0,0 +1,14 @@
{
lib,
...
}:
{
# Define impermanence options that need to be available to all modules
# The actual impermanence implementation is in common/impermanence.nix or common/impermanence-tmpfs.nix
options.custom.impermanence.persistPath = lib.mkOption {
type = lib.types.str;
default = "/persist";
description = "Path where persistent data is stored (e.g., /persist for btrfs, /nix/persist for tmpfs)";
};
}

View File

@@ -1,4 +1,4 @@
{ lib, ... }: { lib, config, ... }:
{ {
networking = { networking = {
useDHCP = true; useDHCP = true;
@@ -10,7 +10,7 @@
''; '';
}; };
environment.persistence."/persist" = { environment.persistence.${config.custom.impermanence.persistPath} = {
directories = [ "/var/db/dhcpcd" ]; directories = [ "/var/db/dhcpcd" ];
}; };
} }

View File

@@ -3,8 +3,7 @@
sops = { sops = {
# sometimes the impermanence bind mount is stopped when sops needs these # sometimes the impermanence bind mount is stopped when sops needs these
age.sshKeyPaths = [ age.sshKeyPaths = [
"/persist/etc/ssh/ssh_host_ed25519_key" "${config.custom.impermanence.persistPath}/etc/ssh/ssh_host_ed25519_key"
"/persist/etc/ssh/ssh_host_rsa_key"
]; ];
defaultSopsFile = ./../../secrets/common.yaml; defaultSopsFile = ./../../secrets/common.yaml;
secrets = { secrets = {

View File

@@ -22,6 +22,6 @@ in
config = mkIf cfg.enable { config = mkIf cfg.enable {
services.tailscaleAutoconnect.enable = true; services.tailscaleAutoconnect.enable = true;
services.tailscale.package = pkgs.unstable.tailscale; services.tailscale.package = pkgs.unstable.tailscale;
environment.persistence."/persist".directories = [ "/var/lib/tailscale" ]; environment.persistence.${config.custom.impermanence.persistPath}.directories = [ "/var/lib/tailscale" ];
}; };
} }

View File

@@ -0,0 +1,30 @@
{
lib,
config,
...
}:
{
# Common impermanence configuration shared by both btrfs and tmpfs variants
# This module should be imported by impermanence.nix or impermanence-tmpfs.nix
# The option custom.impermanence.persistPath is defined in common/global/impermanence-options.nix
environment.persistence.${config.custom.impermanence.persistPath} = {
directories = [
"/var/lib/nixos"
"/home"
];
files = [
"/etc/machine-id"
"/etc/ssh/ssh_host_ed25519_key"
"/etc/ssh/ssh_host_ed25519_key.pub"
"/etc/ssh/ssh_host_rsa_key"
"/etc/ssh/ssh_host_rsa_key.pub"
];
};
users.mutableUsers = false;
security.sudo.extraConfig = ''
Defaults lecture = never
'';
}

View File

@@ -0,0 +1,30 @@
{
lib,
config,
...
}:
{
# Impermanence configuration for tmpfs root filesystem
# Used for systems with tmpfs root (e.g., Raspberry Pi with SD card)
# Root is in-memory and wiped on every boot
# Persistent data is stored in /nix/persist (directory on the /nix partition)
# Import common impermanence configuration
imports = [ ./impermanence-common.nix ];
config = {
# Use /nix/persist for tmpfs-based impermanence
custom.impermanence.persistPath = "/nix/persist";
# tmpfs root filesystem
fileSystems."/" = {
device = "none";
fsType = "tmpfs";
options = [
"defaults"
"size=2G"
"mode=755"
];
};
};
}

View File

@@ -1,6 +1,5 @@
{ {
pkgs, pkgs,
inputs,
lib, lib,
config, config,
... ...
@@ -9,31 +8,22 @@ let
cfg = config.custom.impermanence; cfg = config.custom.impermanence;
in in
{ {
# Import common impermanence configuration
imports = [ ./impermanence-common.nix ];
options.custom.impermanence = { options.custom.impermanence = {
enable = lib.mkOption { enable = lib.mkOption {
type = lib.types.bool; type = lib.types.bool;
default = true; default = true;
description = "Enable impermanent root fs"; description = "Enable impermanent root fs with btrfs subvolume rollback";
}; };
}; };
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
environment.persistence = { # Use /persist for btrfs-based impermanence
"/persist" = { custom.impermanence.persistPath = "/persist";
directories = [
"/var/lib/nixos"
"/home"
];
files = [
"/etc/machine-id"
"/etc/ssh/ssh_host_ed25519_key"
"/etc/ssh/ssh_host_ed25519_key.pub"
"/etc/ssh/ssh_host_rsa_key"
"/etc/ssh/ssh_host_rsa_key.pub"
];
};
};
# Btrfs-specific filesystem options
fileSystems."/".options = [ fileSystems."/".options = [
"compress=zstd" "compress=zstd"
"noatime" "noatime"
@@ -53,17 +43,7 @@ in
]; ];
fileSystems."/var/log".neededForBoot = true; fileSystems."/var/log".neededForBoot = true;
users.mutableUsers = false; # Btrfs subvolume rollback at each boot
# rollback results in sudo lectures after each reboot
security.sudo.extraConfig = ''
Defaults lecture = never
'';
# needed for allowOther in the home-manager impermanence config
programs.fuse.userAllowOther = true;
# reset / at each boot
# Note `lib.mkBefore` is used instead of `lib.mkAfter` here. # Note `lib.mkBefore` is used instead of `lib.mkAfter` here.
boot.initrd.postDeviceCommands = pkgs.lib.mkBefore '' boot.initrd.postDeviceCommands = pkgs.lib.mkBefore ''
mkdir /mnt mkdir /mnt

View File

@@ -24,7 +24,7 @@ in
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
# Persist root SSH directory for replication key # Persist root SSH directory for replication key
environment.persistence."/persist" = { environment.persistence.${config.custom.impermanence.persistPath} = {
directories = [ directories = [
"/root/.ssh" "/root/.ssh"
]; ];
@@ -103,11 +103,14 @@ in
] ++ (lib.forEach cfg.standbys (standby: { ] ++ (lib.forEach cfg.standbys (standby: {
"replicate-services-to-${standby}" = { "replicate-services-to-${standby}" = {
description = "Replicate /persist/services to ${standby}"; description = "Replicate /persist/services to ${standby}";
path = [ pkgs.btrfs-progs pkgs.openssh pkgs.coreutils pkgs.findutils pkgs.gnugrep ]; path = [ pkgs.btrfs-progs pkgs.openssh pkgs.coreutils pkgs.findutils pkgs.gnugrep pkgs.curl ];
script = '' script = ''
set -euo pipefail set -euo pipefail
START_TIME=$(date +%s)
REPLICATION_SUCCESS=0
SSH_KEY="/persist/root/.ssh/btrfs-replication" SSH_KEY="/persist/root/.ssh/btrfs-replication"
if [ ! -f "$SSH_KEY" ]; then if [ ! -f "$SSH_KEY" ]; then
echo "ERROR: SSH key not found at $SSH_KEY" echo "ERROR: SSH key not found at $SSH_KEY"
@@ -130,15 +133,19 @@ in
echo "Attempting incremental send from $(basename $PREV_LOCAL) to ${standby}" echo "Attempting incremental send from $(basename $PREV_LOCAL) to ${standby}"
# Try incremental send, if it fails (e.g., parent missing on receiver), fall back to full # Try incremental send, if it fails (e.g., parent missing on receiver), fall back to full
if btrfs send -p "$PREV_LOCAL" "$SNAPSHOT_PATH" | \ # Use -c to help with broken Received UUID chains
if btrfs send -p "$PREV_LOCAL" -c "$PREV_LOCAL" "$SNAPSHOT_PATH" | \
ssh -i "$SSH_KEY" -o StrictHostKeyChecking=accept-new root@${standby} \ ssh -i "$SSH_KEY" -o StrictHostKeyChecking=accept-new root@${standby} \
"btrfs receive /persist/services-standby"; then "btrfs receive /persist/services-standby"; then
echo "Incremental send completed successfully" echo "Incremental send completed successfully"
REPLICATION_SUCCESS=1
else else
echo "Incremental send failed (likely missing parent on receiver), falling back to full send" echo "Incremental send failed (likely missing parent on receiver), falling back to full send"
# Plain full send without clone source (receiver may have no snapshots)
btrfs send "$SNAPSHOT_PATH" | \ btrfs send "$SNAPSHOT_PATH" | \
ssh -i "$SSH_KEY" -o StrictHostKeyChecking=accept-new root@${standby} \ ssh -i "$SSH_KEY" -o StrictHostKeyChecking=accept-new root@${standby} \
"btrfs receive /persist/services-standby" "btrfs receive /persist/services-standby"
REPLICATION_SUCCESS=1
fi fi
else else
# First snapshot, do full send # First snapshot, do full send
@@ -146,10 +153,28 @@ in
btrfs send "$SNAPSHOT_PATH" | \ btrfs send "$SNAPSHOT_PATH" | \
ssh -i "$SSH_KEY" -o StrictHostKeyChecking=accept-new root@${standby} \ ssh -i "$SSH_KEY" -o StrictHostKeyChecking=accept-new root@${standby} \
"btrfs receive /persist/services-standby" "btrfs receive /persist/services-standby"
REPLICATION_SUCCESS=1
fi fi
# Cleanup old snapshots on sender (keep last 24 hours = 288 snapshots at 5min intervals) # Cleanup old snapshots on sender (keep last 10 snapshots, sorted by name/timestamp)
find /persist -maxdepth 1 -name 'services@*' -mmin +1440 -exec btrfs subvolume delete {} \; ls -1d /persist/services@* 2>/dev/null | sort | head -n -10 | xargs -r btrfs subvolume delete
# Calculate metrics
END_TIME=$(date +%s)
DURATION=$((END_TIME - START_TIME))
SNAPSHOT_COUNT=$(ls -1d /persist/services@* 2>/dev/null | wc -l)
# Push metrics to Prometheus pushgateway
cat <<METRICS | curl -s --data-binary @- http://pushgateway.service.consul:9091/metrics/job/nfs_replication/instance/${standby} || true
# TYPE nfs_replication_last_success_timestamp gauge
nfs_replication_last_success_timestamp $END_TIME
# TYPE nfs_replication_duration_seconds gauge
nfs_replication_duration_seconds $DURATION
# TYPE nfs_replication_snapshot_count gauge
nfs_replication_snapshot_count $SNAPSHOT_COUNT
# TYPE nfs_replication_success gauge
nfs_replication_success $REPLICATION_SUCCESS
METRICS
''; '';
serviceConfig = { serviceConfig = {

View File

@@ -39,17 +39,28 @@ in
noCheck = true; noCheck = true;
}; };
# Cleanup old snapshots on standby (keep last 4 hours for HA failover) # Cleanup old snapshots on standby (keep last 10 snapshots)
systemd.services.cleanup-services-standby-snapshots = { systemd.services.cleanup-services-standby-snapshots = {
description = "Cleanup old btrfs snapshots in services-standby"; description = "Cleanup old btrfs snapshots in services-standby";
path = [ pkgs.btrfs-progs pkgs.findutils ]; path = [ pkgs.btrfs-progs pkgs.findutils pkgs.coreutils pkgs.curl ];
script = '' script = ''
set -euo pipefail set -euo pipefail
# Keep last 4 hours of snapshots (48 snapshots at 5min intervals)
find /persist/services-standby -maxdepth 1 -name 'services@*' -mmin +240 -exec btrfs subvolume delete {} \; || true
'';
# Cleanup old snapshots on standby (keep last 10 snapshots, sorted by name/timestamp)
ls -1d /persist/services-standby/services@* 2>/dev/null | sort | head -n -10 | xargs -r btrfs subvolume delete || true
# Calculate metrics
CLEANUP_TIME=$(date +%s)
SNAPSHOT_COUNT=$(ls -1d /persist/services-standby/services@* 2>/dev/null | wc -l)
# Push metrics to Prometheus pushgateway
cat <<METRICS | curl -s --data-binary @- http://pushgateway.service.consul:9091/metrics/job/nfs_standby_cleanup/instance/$(hostname) || true
# TYPE nfs_standby_snapshot_count gauge
nfs_standby_snapshot_count $SNAPSHOT_COUNT
# TYPE nfs_standby_cleanup_last_run_timestamp gauge
nfs_standby_cleanup_last_run_timestamp $CLEANUP_TIME
METRICS
'';
serviceConfig = { serviceConfig = {
Type = "oneshot"; Type = "oneshot";
User = "root"; User = "root";
@@ -60,7 +71,7 @@ in
description = "Timer for cleaning up old snapshots on standby"; description = "Timer for cleaning up old snapshots on standby";
wantedBy = [ "timers.target" ]; wantedBy = [ "timers.target" ];
timerConfig = { timerConfig = {
OnCalendar = "daily"; OnCalendar = "hourly";
Persistent = true; Persistent = true;
}; };
}; };

View File

@@ -150,7 +150,7 @@ in
plugin.raw_exec.config.enabled = true; plugin.raw_exec.config.enabled = true;
}; };
environment.persistence."/persist".directories = [ environment.persistence.${config.custom.impermanence.persistPath}.directories = [
"/var/lib/docker" "/var/lib/docker"
"/var/lib/nomad" "/var/lib/nomad"
]; ];

View File

@@ -17,6 +17,7 @@
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIH+QbeQG/gTPJ2sIMPgZ3ZPEirVo5qX/carbZMKt50YN petru@happy" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIH+QbeQG/gTPJ2sIMPgZ3ZPEirVo5qX/carbZMKt50YN petru@happy"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOOQ2EcJ+T+7BItZl89oDYhq7ZW4B9KuQVCy2DuQaPKR ppetru@sparky" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOOQ2EcJ+T+7BItZl89oDYhq7ZW4B9KuQVCy2DuQaPKR ppetru@sparky"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFRYVOfrqk2nFSyiu7TzU23ql8D6TfXICFpMIEvPbNsc JuiceSSH" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFRYVOfrqk2nFSyiu7TzU23ql8D6TfXICFpMIEvPbNsc JuiceSSH"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINBIqK6+aPIbmviJPWP8PI/k8GmaC7RO8v2ENnsK8sJx ppetru@beefy"
]; ];
}; };
} }

35
common/wifi.nix Normal file
View File

@@ -0,0 +1,35 @@
{ config, lib, ... }:
{
sops.secrets.wifi-password-pi = {
sopsFile = ./../secrets/wifi.yaml;
};
networking.wireless = {
enable = true;
secretsFile = config.sops.secrets.wifi-password-pi.path;
networks = {
"pi" = {
pskRaw = "ext:pi";
};
};
# Only enable on wireless interface, not ethernet
interfaces = [ "wlan0" ];
};
# Prefer wifi over ethernet, but keep ethernet as fallback
networking.dhcpcd.extraConfig = ''
# Prefer wlan0 over ethernet interfaces
interface wlan0
metric 100
interface eth0
metric 200
'';
# Persist wireless configuration across reboots (for impermanence)
environment.persistence.${config.custom.impermanence.persistPath} = {
files = [
"/etc/wpa_supplicant.conf"
];
};
}

55
docs/AUTH_SETUP.md Normal file
View File

@@ -0,0 +1,55 @@
# Authentication Setup
SSO for homelab services using OIDC.
## Architecture
**Pocket ID** (`pocket-id.v.paler.net`) - Lightweight OIDC provider, data in `/data/services/pocket-id`
**Traefik** - Uses `traefik-oidc-auth` plugin (v0.16.0) to protect services
- Plugin downloaded from GitHub at startup, cached in `/data/services/traefik/plugins-storage`
- Middleware config in `/data/services/traefik/rules/middlewares.yml`
- Protected services add tag: `traefik.http.routers.<name>.middlewares=oidc-auth@file`
## Flow
1. User hits protected service → Traefik intercepts
2. Redirects to Pocket ID for login
3. Pocket ID returns OIDC token
4. Traefik validates and forwards with `X-Oidc-Username` header
## Protected Services
Use `oidc-auth@file` middleware (grep codebase for full list):
- Wikis (TiddlyWiki instances)
- Media stack (Radarr, Sonarr, Plex, etc.)
- Infrastructure (Traefik dashboard, Loki, Jupyter, Unifi)
## Key Files
- `services/pocket-id.hcl` - OIDC provider
- `services/traefik.hcl` - Plugin declaration
- `/data/services/traefik/rules/middlewares.yml` - Middleware definitions (oidc-auth, simple-auth fallback)
## Cold Start Notes
- Traefik needs internet to download plugin on first start
- Pocket ID needs `/data/services` NFS mounted
- Pocket ID down = all protected services inaccessible
## Troubleshooting
**Infinite redirects**: Check `TRUST_PROXY=true` on Pocket ID
**Plugin not loading**: Clear cache in `/data/services/traefik/plugins-storage/`, restart Traefik
**401 after login**: Verify client ID/secret in middlewares.yml matches Pocket ID client config
## Migration History
- Previous: Authentik with forwardAuth (removed Nov 2024)
- Current: Pocket ID + traefik-oidc-auth (simpler, lighter)
---
*Manage users/clients via Pocket ID UI. Basic auth fallback available via `simple-auth` middleware.*

288
docs/DIFF_CONFIGS.md Normal file
View File

@@ -0,0 +1,288 @@
# Configuration Diff Tool
Tool to compare all NixOS host configurations between current working tree and HEAD commit.
## Purpose
Before committing changes (especially refactors), verify that you haven't accidentally broken existing host configurations. This tool:
- Builds all host configurations in current state (with uncommitted changes)
- Builds all host configurations at HEAD (last commit)
- Uses `nvd` to show readable diffs for each host
- Highlights which hosts changed and which didn't
## Usage
### Prerequisites
The script requires `nvd` to be in PATH. Use either:
**Option 1: direnv (recommended)**
```bash
# Allow direnv in the repository (one-time setup)
direnv allow
# direnv will automatically load the dev shell when you cd into the directory
cd /home/ppetru/projects/alo-cluster
# nvd is now in PATH
```
**Option 2: nix develop**
```bash
# Enter dev shell manually
nix develop
# Now run the script
./scripts/diff-configs.sh
```
### Quick Start
```bash
# Compare all hosts (summary)
./scripts/diff-configs.sh
# Compare with detailed path listing
./scripts/diff-configs.sh -v c1
# Compare with content diffs of changed files (deep mode)
./scripts/diff-configs.sh --deep c1
# Compare only x86_64 hosts (avoid slow ARM cross-compilation)
./scripts/diff-configs.sh c1 c2 c3 zippy chilly sparky
# Verbose mode with multiple hosts
./scripts/diff-configs.sh --verbose c1 c2 c3
# Via flake app
nix run .#diff-configs
# Show help
./scripts/diff-configs.sh --help
```
### Typical Workflow
```bash
# 1. Make changes to configurations
vim common/impermanence.nix
# 2. Stage changes (required for flake to see them)
git add common/impermanence.nix
# 3. Check what would change if you committed now
# For quick feedback, compare only x86_64 hosts first:
./scripts/diff-configs.sh c1 c2 c3 zippy chilly sparky
# 4. Review output, make adjustments if needed
# 5. If changes look good and affect ARM hosts, check those too:
./scripts/diff-configs.sh stinky alo-cloud-1
# 6. Commit when satisfied
git commit -m "Refactor impermanence config"
```
## Output Explanation
### No Changes
```
━━━ c1 ━━━
Building current... done
Building HEAD... done
✓ No changes
```
This host's configuration is identical between current and HEAD.
### Changes Detected
```
━━━ stinky ━━━
Building current... done
Building HEAD... done
⚠ Configuration changed
<<< /nix/store/abc-nixos-system-stinky-25.05 (HEAD)
>>> /nix/store/xyz-nixos-system-stinky-25.05 (current)
Version changes:
[C] octoprint: 1.9.3 -> 1.10.0
[A+] libcamera: ∅ -> 0.1.0
Closure size: 1500 -> 1520 (5 paths added, 2 paths removed, +3, +15.2 MB)
```
Legend:
- `[C]` - Changed package version
- `[A+]` - Added package
- `[R-]` - Removed package
- `[U.]` - Updated (same version, rebuilt)
### Verbose Mode (--verbose)
With `-v` or `--verbose`, also shows the actual store paths that changed:
```
━━━ c1 ━━━
Building current... done
Building HEAD... done
⚠ Configuration changed
[nvd summary as above]
Changed store paths:
Removed (17 paths):
- config.fish
- system-units
- home-manager-generation
- etc-fuse.conf
... and 13 more
Added (17 paths):
- config.fish
- system-units
- home-manager-generation
- etc-fuse.conf
... and 13 more
```
This is useful when nvd shows "No version changes" but paths still changed (e.g., refactors that rebuild config files).
### Deep Mode (--deep)
With `-d` or `--deep`, shows actual content diffs of changed files within store paths (implies verbose):
```
━━━ c1 ━━━
Building current... done
Building HEAD... done
⚠ Configuration changed
[nvd summary and path listing as above]
Content diffs of changed files:
▸ etc-fuse.conf
@@ -1,2 +1,2 @@
-user_allow_other
+#user_allow_other
mount_max = 1000
▸ nixos-system-c1-25.05
activate:
@@ -108,7 +108,7 @@
echo "setting up /etc..."
-/nix/store/...-perl/bin/perl /nix/store/...-setup-etc.pl /nix/store/abc-etc/etc
+/nix/store/...-perl/bin/perl /nix/store/...-setup-etc.pl /nix/store/xyz-etc/etc
▸ unit-dbus.service
dbus.service:
@@ -1,5 +1,5 @@
[Service]
+Environment="LD_LIBRARY_PATH=/nix/store/.../systemd/lib"
Environment="LOCALE_ARCHIVE=..."
```
**What it shows**:
- Matches changed paths by basename (e.g., both have "config.fish")
- Diffs important files: activate scripts, etc/*, *.conf, *.fish, *.service, *.nix
- Shows unified diff format (lines added/removed)
- Limits to first 50 lines per file
**When to use**:
- When you need to know **what exactly changed** in config files
- Debugging unexpected configuration changes
- Reviewing refactors that don't change package versions
- Understanding why a host rebuilt despite "No version changes"
### Build Failures
```
━━━ broken-host ━━━
Building current... FAILED
Error: attribute 'foo' missing
```
If a host fails to build, the error is shown and the script continues with other hosts.
## How It Works
1. **Discovers hosts**: Queries `deploy.nodes` from flake to get all configured hosts
2. **Creates worktree**: Uses `git worktree` to check out HEAD in a temporary directory
3. **Builds configurations**: Builds `config.system.build.toplevel` for each host in both locations
4. **Compares with nvd**: Runs `nvd diff` to show package-level changes
5. **Cleans up**: Removes temporary worktree automatically
## Important Notes
### Git Staging Required
Flakes only evaluate files that are tracked by git. To make changes visible:
```bash
# Stage new files
git add new-file.nix
# Stage changes to existing files
git add modified-file.nix
# Or stage everything
git add .
```
Unstaged changes to tracked files **are** visible (flake uses working tree content).
### Performance
- First run may be slow (building all configurations)
- Subsequent runs benefit from Nix evaluation cache
- Typical runtime: 1-5 minutes depending on changes
- **ARM cross-compilation is slow**: Use host filtering to avoid building ARM hosts when not needed
- Example: `./scripts/diff-configs.sh c1 c2 c3` (x86_64 only, fast)
- vs `./scripts/diff-configs.sh` (includes stinky/alo-cloud-1, slow)
### When to Use
**Good use cases**:
- Refactoring shared modules (like impermanence)
- Updating common configurations
- Before committing significant changes
- Verifying deploy target consistency
**Not needed for**:
- Adding a single new host
- Trivial one-host changes
- Documentation updates
## Troubleshooting
### "Not in a git repository"
```bash
cd /home/ppetru/projects/alo-cluster
./scripts/diff-configs.sh
```
### "No changes detected"
All changes are already committed. Stage some changes first:
```bash
git add .
```
### Build failures for all hosts
Check flake syntax:
```bash
nix flake check
```
### nvd not found
Install nvd:
```bash
nix profile install nixpkgs#nvd
```
(Already included in workstation-node.nix packages)
## Related Tools
- `nvd` - Package diff tool (used internally)
- `nix diff-closures` - Low-level closure diff
- `nix store diff-closures` - Alternative diff command
- `deploy-rs` - Actual deployment tool
## See Also
- `common/global/show-changelog.nix` - Shows changes during system activation
- `docs/RASPBERRY_PI_SD_IMAGE.md` - SD image building process

View File

@@ -0,0 +1,98 @@
# Raspberry Pi SD Image Building and Deployment
Guide for building and deploying NixOS SD card images for Raspberry Pi hosts (e.g., stinky).
## Overview
Raspberry Pi hosts use a different deployment strategy than regular NixOS hosts:
- **First deployment**: Build and flash an SD card image
- **Subsequent updates**: Use `deploy-rs` like other hosts
## Architecture
### Storage Layout
**Partition structure** (automatically created by NixOS):
- `/boot/firmware` - FAT32 partition (label: `FIRMWARE`)
- Contains Raspberry Pi firmware, U-Boot bootloader, device trees
- `/` - tmpfs (in-memory, ephemeral root)
- 2GB RAM disk, wiped on every boot
- `/nix` - ext4 partition (label: `NIXOS_SD`)
- Nix store and persistent data
- Contains `/nix/persist` directory for impermanence
### Impermanence with tmpfs
Unlike btrfs-based hosts that use `/persist`, Pi hosts use `/nix/persist`:
- Root filesystem is tmpfs (no disk writes, auto-wiped)
- Single ext4 partition mounted at `/nix`
- Persistent data stored in `/nix/persist/` (directory, not separate mount)
- Better for SD card longevity (fewer writes)
**Persisted paths**:
- `/nix/persist/var/lib/nixos` - System state
- `/nix/persist/home/ppetru` - User home directory
- `/nix/persist/etc` - SSH host keys, machine-id
- Service-specific: `/nix/persist/var/lib/octoprint`, etc.
## Building the SD Image
### Prerequisites
- ARM64 emulation enabled on build machine:
```nix
boot.binfmt.emulatedSystems = [ "aarch64-linux" ];
```
(Already configured in `workstation-node.nix`)
### Build Command
```bash
# Build SD image for stinky
nix build .#packages.aarch64-linux.stinky-sdImage
# Result location
ls -lh result/sd-image/
# nixos-sd-image-stinky-25.05-*.img.zst (compressed with zstd)
```
**Build location**: Defined in `flake.nix`:
```nix
packages.aarch64-linux.stinky-sdImage =
self.nixosConfigurations.stinky.config.system.build.sdImage;
```
## Flashing the SD Card
### Find SD Card Device
```bash
# Before inserting SD card
lsblk
# Insert SD card, then check again
lsblk
# Look for new device, typically:
# - /dev/sdX (USB SD card readers)
# - /dev/mmcblk0 (built-in SD card slots)
```
**Warning**: Double-check the device! Wrong device = data loss.
### Flash Image
```bash
# Decompress and flash in one command
zstd -d -c result/sd-image/*.img.zst | sudo dd of=/dev/sdX bs=4M status=progress conv=fsync
# Or decompress first, then flash
unzstd result/sd-image/*.img.zst
sudo dd if=result/sd-image/*.img of=/dev/sdX bs=4M status=progress conv=fsync
```
### Eject SD Card
```bash
sudo eject /dev/sdX
```

View File

@@ -1,3 +1,7 @@
* remote docker images used, can't come up if internet is down * remote docker images used, can't come up if internet is down
* local docker images pulled from gitea, can't come up if gitea isn't up (yet) * local docker images pulled from gitea, can't come up if gitea isn't up (yet)
* traefik-oidc-auth plugin downloaded from GitHub at startup (cached in /data/services/traefik/plugins-storage)
* renovate system of some kind * renovate system of some kind
* vector (or other log ingestion) everywhere, consider moving it off docker if possible
* monitor backup-persist success/fail

149
flake.lock generated
View File

@@ -62,11 +62,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1761247699, "lastModified": 1763766218,
"narHash": "sha256-yMQCRsD6F6eyt0ckCbAHH3W59mav7rbn9hwfUWd+rHU=", "narHash": "sha256-CM694zS6IeO/tFvUW7zhlb8t67+6L9QfvCDgQy0nVyQ=",
"owner": "nix-community", "owner": "nix-community",
"repo": "browser-previews", "repo": "browser-previews",
"rev": "5fc2e2d88f87b46b72767fd6fc2d4af7d983f2c7", "rev": "04f8550aa62ccda42a6eb839a4ccf6cdcf3d953d",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -84,11 +84,11 @@
"utils": "utils" "utils": "utils"
}, },
"locked": { "locked": {
"lastModified": 1756719547, "lastModified": 1762286984,
"narHash": "sha256-N9gBKUmjwRKPxAafXEk1EGadfk2qDZPBQp4vXWPHINQ=", "narHash": "sha256-9I2H9x5We6Pl+DBYHjR1s3UT8wgwcpAH03kn9CqtdQc=",
"owner": "serokell", "owner": "serokell",
"repo": "deploy-rs", "repo": "deploy-rs",
"rev": "125ae9e3ecf62fb2c0fd4f2d894eb971f1ecaed2", "rev": "9c870f63e28ec1e83305f7f6cb73c941e699f74f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -105,11 +105,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1741473158, "lastModified": 1762521437,
"narHash": "sha256-kWNaq6wQUbUMlPgw8Y+9/9wP0F8SHkjy24/mN3UAppg=", "narHash": "sha256-RXN+lcx4DEn3ZS+LqEJSUu/HH+dwGvy0syN7hTo/Chg=",
"owner": "numtide", "owner": "numtide",
"repo": "devshell", "repo": "devshell",
"rev": "7c9e793ebe66bcba8292989a68c0419b737a22a0", "rev": "07bacc9531f5f4df6657c0a02a806443685f384a",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -125,11 +125,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760701190, "lastModified": 1764110879,
"narHash": "sha256-y7UhnWlER8r776JsySqsbTUh2Txf7K30smfHlqdaIQw=", "narHash": "sha256-xanUzIb0tf3kJ+PoOFmXEXV1jM3PjkDT/TQ5DYeNYRc=",
"owner": "nix-community", "owner": "nix-community",
"repo": "disko", "repo": "disko",
"rev": "3a9450b26e69dcb6f8de6e2b07b3fc1c288d85f5", "rev": "aecba248f9a7d68c5d1ed15de2d1c8a4c994a3c5",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -141,7 +141,6 @@
"ethereum-nix": { "ethereum-nix": {
"inputs": { "inputs": {
"devshell": "devshell", "devshell": "devshell",
"flake-compat": "flake-compat_2",
"flake-parts": "flake-parts", "flake-parts": "flake-parts",
"flake-utils": "flake-utils_2", "flake-utils": "flake-utils_2",
"foundry-nix": "foundry-nix", "foundry-nix": "foundry-nix",
@@ -153,11 +152,11 @@
"treefmt-nix": "treefmt-nix" "treefmt-nix": "treefmt-nix"
}, },
"locked": { "locked": {
"lastModified": 1761217923, "lastModified": 1764174664,
"narHash": "sha256-usNQQSwmaHdHiRttmH4no/CVTUyEP+sIoAkkRMgdu0g=", "narHash": "sha256-CYAjcXbI6RzQ3cWKiW/u3ZiJCeVX9PQd2J0+V8zX7c8=",
"owner": "nix-community", "owner": "nix-community",
"repo": "ethereum.nix", "repo": "ethereum.nix",
"rev": "8c3827adc7e1ea75b43ad3d7c4f9ab9acc3b6273", "rev": "e3a1e2d86a6bc1ef25bdb395d9c770b471d53e7f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -183,21 +182,6 @@
} }
}, },
"flake-compat_2": { "flake-compat_2": {
"locked": {
"lastModified": 1746162366,
"narHash": "sha256-5SSSZ/oQkwfcAz/o/6TlejlVGqeK08wyREBQ5qFFPhM=",
"owner": "nix-community",
"repo": "flake-compat",
"rev": "0f158086a2ecdbb138cd0429410e44994f1b7e4b",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "flake-compat",
"type": "github"
}
},
"flake-compat_3": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1747046372, "lastModified": 1747046372,
@@ -218,11 +202,11 @@
"nixpkgs-lib": "nixpkgs-lib" "nixpkgs-lib": "nixpkgs-lib"
}, },
"locked": { "locked": {
"lastModified": 1760813311, "lastModified": 1762980239,
"narHash": "sha256-lbHQ7FXGzt6/IygWvJ1lCq+Txcut3xYYd6VIpF1ojkg=", "narHash": "sha256-8oNVE8TrD19ulHinjaqONf9QWCKK+w4url56cdStMpM=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "4e627ac2e1b8f1de7f5090064242de9a259dbbc8", "rev": "52a2caecc898d0b46b2b905f058ccc5081f842da",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -239,11 +223,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760948891, "lastModified": 1763759067,
"narHash": "sha256-TmWcdiUUaWk8J4lpjzu4gCGxWY6/Ok7mOK4fIFfBuU4=", "narHash": "sha256-LlLt2Jo/gMNYAwOgdRQBrsRoOz7BPRkzvNaI/fzXi2Q=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "864599284fc7c0ba6357ed89ed5e2cd5040f0c04", "rev": "2cccadc7357c0ba201788ae99c4dfa90728ef5e0",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -324,11 +308,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1759569036, "lastModified": 1762247499,
"narHash": "sha256-FuxbXLDArxD1NeRR8zNnsb8Xww5/+qdMwzN1m8Kow/M=", "narHash": "sha256-dPBqjoBcP3yczY7EUQP6BXf58wauRl+lZVZ/fabgq3E=",
"owner": "shazow", "owner": "shazow",
"repo": "foundry.nix", "repo": "foundry.nix",
"rev": "47ba6d3b02bf3faaa857d3572df82ff186d5279a", "rev": "ae6473c7190edea0e505f433293688014b556b29",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -368,11 +352,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1758463745, "lastModified": 1763992789,
"narHash": "sha256-uhzsV0Q0I9j2y/rfweWeGif5AWe0MGrgZ/3TjpDYdGA=", "narHash": "sha256-WHkdBlw6oyxXIra/vQPYLtqY+3G8dUVZM8bEXk0t8x4=",
"owner": "nix-community", "owner": "nix-community",
"repo": "home-manager", "repo": "home-manager",
"rev": "3b955f5f0a942f9f60cdc9cacb7844335d0f21c3", "rev": "44831a7eaba4360fb81f2acc5ea6de5fde90aaa3",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -732,11 +716,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760846226, "lastModified": 1763870992,
"narHash": "sha256-xmU8kAsRprJiTGBTaGrwmjBP3AMA9ltlrxHKFuy5JWc=", "narHash": "sha256-NPyc76Wxmv/vAsXJ8F+/8fXECHYcv2YGSqdiSHp/F/A=",
"owner": "nix-community", "owner": "nix-community",
"repo": "nix-index-database", "repo": "nix-index-database",
"rev": "5024e1901239a76b7bf94a4cd27f3507e639d49e", "rev": "d7423982c7a26586aa237d130b14c8b302c7a367",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -745,13 +729,29 @@
"type": "github" "type": "github"
} }
}, },
"nixos-hardware": {
"locked": {
"lastModified": 1764328224,
"narHash": "sha256-hFyF1XQd+XrRx7WZCrGJp544dykexD8Q5SrJJZpEQYg=",
"owner": "NixOS",
"repo": "nixos-hardware",
"rev": "d62603a997438e19182af69d3ce7be07565ecad4",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "master",
"repo": "nixos-hardware",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1761016216, "lastModified": 1763948260,
"narHash": "sha256-G/iC4t/9j/52i/nm+0/4ybBmAF4hzR8CNHC75qEhjHo=", "narHash": "sha256-dY9qLD0H0zOUgU3vWacPY6Qc421BeQAfm8kBuBtPVE0=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "481cf557888e05d3128a76f14c76397b7d7cc869", "rev": "1c8ba8d3f7634acac4a2094eef7c32ad9106532c",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -763,11 +763,11 @@
}, },
"nixpkgs-lib": { "nixpkgs-lib": {
"locked": { "locked": {
"lastModified": 1754788789, "lastModified": 1761765539,
"narHash": "sha256-x2rJ+Ovzq0sCMpgfgGaaqgBSwY+LST+WbZ6TytnT9Rk=", "narHash": "sha256-b0yj6kfvO8ApcSE+QmA6mUfu8IYG6/uU28OFn4PaC8M=",
"owner": "nix-community", "owner": "nix-community",
"repo": "nixpkgs.lib", "repo": "nixpkgs.lib",
"rev": "a73b9c743612e4244d865a2fdee11865283c04e6", "rev": "719359f4562934ae99f5443f20aa06c2ffff91fc",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -793,11 +793,11 @@
}, },
"nixpkgs-unstable": { "nixpkgs-unstable": {
"locked": { "locked": {
"lastModified": 1760872779, "lastModified": 1763191728,
"narHash": "sha256-c5C907Raf9eY8f1NUXYeju9aUDlm227s/V0OptEbypA=", "narHash": "sha256-esRhOS0APE6k40Hs/jjReXg+rx+J5LkWw7cuWFKlwYA=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "63bdb5d90fa2fa11c42f9716ad1e23565613b07c", "rev": "1d4c88323ac36805d09657d13a5273aea1b34f0c",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -809,11 +809,11 @@
}, },
"nixpkgs-unstable_2": { "nixpkgs-unstable_2": {
"locked": { "locked": {
"lastModified": 1761114652, "lastModified": 1764242076,
"narHash": "sha256-f/QCJM/YhrV/lavyCVz8iU3rlZun6d+dAiC3H+CDle4=", "narHash": "sha256-sKoIWfnijJ0+9e4wRvIgm/HgE27bzwQxcEmo2J/gNpI=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "01f116e4df6a15f4ccdffb1bcd41096869fb385c", "rev": "2fad6eac6077f03fe109c4d4eb171cf96791faa4",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -849,11 +849,11 @@
"systems": "systems_5" "systems": "systems_5"
}, },
"locked": { "locked": {
"lastModified": 1761222236, "lastModified": 1764238240,
"narHash": "sha256-Um296vYIWjSjm4btukpjyVPLIz5ovQgoAtEYXFb/Jr4=", "narHash": "sha256-7Znm3koZ4sF+O41Y7rJqf651BPEbjIUYF3r9H23GRGw=",
"owner": "nix-community", "owner": "nix-community",
"repo": "nixvim", "repo": "nixvim",
"rev": "39443b9f5737b6f8ee0b654eb47d3a64daac1bd0", "rev": "f1e07ba53abd0fb4872a365cba45562144ad6130",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -872,11 +872,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760652422, "lastModified": 1761730856,
"narHash": "sha256-C88Pgz38QIl9JxQceexqL2G7sw9vodHWx1Uaq+NRJrw=", "narHash": "sha256-t1i5p/vSWwueZSC0Z2BImxx3BjoUDNKyC2mk24krcMY=",
"owner": "NuschtOS", "owner": "NuschtOS",
"repo": "search", "repo": "search",
"rev": "3ebeebe8b6a49dfb11f771f761e0310f7c48d726", "rev": "e29de6db0cb3182e9aee75a3b1fd1919d995d85b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -897,11 +897,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760558991, "lastModified": 1762999930,
"narHash": "sha256-E8MMVwy7QNURBtCLiCjFXfv7uZUEg6QVSZLu4q9YGpk=", "narHash": "sha256-uKyxLwiN6sD6EmRSno66y1a8oqISr1XiWxbWHoMJT7I=",
"owner": "henrysipp", "owner": "henrysipp",
"repo": "omarchy-nix", "repo": "omarchy-nix",
"rev": "fba993c589920fbe68d9f7918e52903c476adad2", "rev": "308e0f85a0deb820c01cfbe1b4faee1daab4da12",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -912,7 +912,7 @@
}, },
"pre-commit-hooks": { "pre-commit-hooks": {
"inputs": { "inputs": {
"flake-compat": "flake-compat_3", "flake-compat": "flake-compat_2",
"gitignore": "gitignore", "gitignore": "gitignore",
"nixpkgs": [ "nixpkgs": [
"omarchy-nix", "omarchy-nix",
@@ -943,6 +943,7 @@
"home-manager": "home-manager", "home-manager": "home-manager",
"impermanence": "impermanence", "impermanence": "impermanence",
"nix-index-database": "nix-index-database", "nix-index-database": "nix-index-database",
"nixos-hardware": "nixos-hardware",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"nixpkgs-unstable": "nixpkgs-unstable_2", "nixpkgs-unstable": "nixpkgs-unstable_2",
"nixvim": "nixvim", "nixvim": "nixvim",
@@ -957,11 +958,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760998189, "lastModified": 1764021963,
"narHash": "sha256-ee2e1/AeGL5X8oy/HXsZQvZnae6XfEVdstGopKucYLY=", "narHash": "sha256-1m84V2ROwNEbqeS9t37/mkry23GBhfMt8qb6aHHmjuc=",
"owner": "Mic92", "owner": "Mic92",
"repo": "sops-nix", "repo": "sops-nix",
"rev": "5a7d18b5c55642df5c432aadb757140edfeb70b3", "rev": "c482a1c1bbe030be6688ed7dc84f7213f304f1ec",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -1068,11 +1069,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1760889407, "lastModified": 1762938485,
"narHash": "sha256-ppIp04fmz+BaTpJs1nIOmPADg02asfQFrFbhb3SmxsE=", "narHash": "sha256-AlEObg0syDl+Spi4LsZIBrjw+snSVU4T8MOeuZJUJjM=",
"owner": "numtide", "owner": "numtide",
"repo": "treefmt-nix", "repo": "treefmt-nix",
"rev": "3f258dead9fed51f53862366d3a6bc1b622ee7cb", "rev": "5b4ee75aeefd1e2d5a1cc43cf6ba65eba75e83e4",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@@ -38,6 +38,7 @@
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
inputs.home-manager.follows = "home-manager"; inputs.home-manager.follows = "home-manager";
}; };
nixos-hardware.url = "github:NixOS/nixos-hardware/master";
}; };
outputs = outputs =
@@ -53,6 +54,7 @@
sops-nix, sops-nix,
browser-previews, browser-previews,
omarchy-nix, omarchy-nix,
nixos-hardware,
... ...
}@inputs: }@inputs:
let let
@@ -157,12 +159,14 @@
c2 = mkHost "x86_64-linux" "minimal" [ ./hosts/c2 ]; c2 = mkHost "x86_64-linux" "minimal" [ ./hosts/c2 ];
c3 = mkHost "x86_64-linux" "minimal" [ ./hosts/c3 ]; c3 = mkHost "x86_64-linux" "minimal" [ ./hosts/c3 ];
alo-cloud-1 = mkHost "aarch64-linux" "cloud" [ ./hosts/alo-cloud-1 ]; alo-cloud-1 = mkHost "aarch64-linux" "cloud" [ ./hosts/alo-cloud-1 ];
zippy = mkHost "x86_64-linux" "minimal" [ zippy = mkHost "x86_64-linux" "minimal" [ ./hosts/zippy ];
ethereum-nix.nixosModules.default
./hosts/zippy
];
chilly = mkHost "x86_64-linux" "workstation" [ ./hosts/chilly ]; chilly = mkHost "x86_64-linux" "workstation" [ ./hosts/chilly ];
sparky = mkHost "x86_64-linux" "desktop" [ ./hosts/sparky ]; sparky = mkHost "x86_64-linux" "minimal" [ ./hosts/sparky ];
beefy = mkHost "x86_64-linux" "desktop" [ ./hosts/beefy ];
stinky = mkHost "aarch64-linux" "minimal" [
nixos-hardware.nixosModules.raspberry-pi-4
./hosts/stinky
];
}; };
deploy = { deploy = {
@@ -224,8 +228,53 @@
}; };
}; };
}; };
beefy = {
hostname = "beefy";
profiles = {
system = {
user = "root";
path = (deployPkgsFor "x86_64-linux").deploy-rs.lib.activate.nixos self.nixosConfigurations.beefy;
}; };
}; };
};
stinky = {
hostname = "stinky";
profiles = {
system = {
user = "root";
path = (deployPkgsFor "aarch64-linux").deploy-rs.lib.activate.nixos self.nixosConfigurations.stinky;
};
};
};
};
};
# SD card image for stinky (Raspberry Pi 4)
packages.aarch64-linux.stinky-sdImage = self.nixosConfigurations.stinky.config.system.build.sdImage;
# Apps - utility scripts
apps.x86_64-linux.diff-configs = {
type = "app";
program = "${(pkgsFor "x86_64-linux").writeShellScriptBin "diff-configs" (builtins.readFile ./scripts/diff-configs.sh)}/bin/diff-configs";
};
apps.aarch64-linux.diff-configs = {
type = "app";
program = "${(pkgsFor "aarch64-linux").writeShellScriptBin "diff-configs" (builtins.readFile ./scripts/diff-configs.sh)}/bin/diff-configs";
};
# Development shells
devShells.x86_64-linux.default = (pkgsFor "x86_64-linux").mkShell {
packages = with (pkgsFor "x86_64-linux"); [
nvd
];
};
devShells.aarch64-linux.default = (pkgsFor "aarch64-linux").mkShell {
packages = with (pkgsFor "aarch64-linux"); [
nvd
];
};
checks = builtins.mapAttrs (system: deployLib: deployLib.deployChecks self.deploy) deploy-rs.lib; checks = builtins.mapAttrs (system: deployLib: deployLib.deployChecks self.deploy) deploy-rs.lib;

View File

@@ -12,7 +12,7 @@
MOSH_SERVER_NETWORK_TMOUT = 604800; MOSH_SERVER_NETWORK_TMOUT = 604800;
NOMAD_ADDR = "http://nomad.service.consul:4646"; NOMAD_ADDR = "http://nomad.service.consul:4646";
LESS = "-F -i -M -+S -R -w -X -z-4"; LESS = "-F -i -M -+S -R -w -X -z-4";
SYSTEMD_LESS = "FiM+SRwXz-4"; SYSTEMD_LESS = "FiM+SRwX";
NIX_LD = "${pkgs.glibc}/lib/ld-linux-x86-64.so.2"; NIX_LD = "${pkgs.glibc}/lib/ld-linux-x86-64.so.2";
NIX_LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ NIX_LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [
pkgs.stdenv.cc.cc pkgs.stdenv.cc.cc
@@ -26,5 +26,13 @@
shellAliases = { shellAliases = {
reload-home-manager-config = "home-manager switch --flake ${builtins.toString ./.}"; reload-home-manager-config = "home-manager switch --flake ${builtins.toString ./.}";
}; };
file.".ssh/rc".text = ''
#!/bin/sh
if test "$SSH_AUTH_SOCK"; then
ln -sf "$SSH_AUTH_SOCK" "$HOME/.ssh/ssh_auth_sock"
fi
'';
file.".ssh/rc".executable = true;
}; };
} }

View File

@@ -2,6 +2,9 @@
{ {
imports = [ ./workstation.nix ]; imports = [ ./workstation.nix ];
# Override ghostty to use unstable version (1.2.0+) for ssh-terminfo support
programs.ghostty.package = pkgs.unstable.ghostty;
wayland.windowManager.hyprland = { wayland.windowManager.hyprland = {
enable = true; enable = true;
settings = { settings = {
@@ -15,4 +18,10 @@
"$browser" = "google-chrome-stable --new-window --ozone-platform=wayland"; "$browser" = "google-chrome-stable --new-window --ozone-platform=wayland";
}; };
}; };
# Extend ghostty configuration from omarchy-nix
programs.ghostty.settings = {
# Automatically handle TERM compatibility for SSH (requires ghostty 1.2.0+)
shell-integration-features = "ssh-terminfo";
};
} }

View File

@@ -327,7 +327,6 @@
enable = true; enable = true;
shellAbbrs = { shellAbbrs = {
fix-ssh = "eval $(tmux show-env | grep ^SSH_AUTH_SOCK | sed 's/=/ /;s/^/set /')";
diff-persist = "sudo rsync -amvxx --dry-run --no-links --exclude '/tmp/*' --exclude '/root/*' / /persist/ | rg -v '^skipping|/$'"; diff-persist = "sudo rsync -amvxx --dry-run --no-links --exclude '/tmp/*' --exclude '/root/*' / /persist/ | rg -v '^skipping|/$'";
}; };
@@ -398,6 +397,12 @@
setw -g automatic-rename on setw -g automatic-rename on
set -g set-titles on set -g set-titles on
# first, unset update-environment[SSH_AUTH_SOCK] (idx 3), to prevent
# the client overriding the global value
set-option -g -u update-environment[3]
# And set the global value to our static symlink'd path:
set-environment -g SSH_AUTH_SOCK $HOME/.ssh/ssh_auth_sock
''; '';
}; };
}; };

View File

@@ -1,8 +1,8 @@
{ pkgs, inputs, ... }: { pkgs, lib, inputs, ... }:
{ {
imports = [ imports = [
../../common/global ../../common/global
../../common/cloud-node.nix # Minimal system with Consul ../../common/minimal-node.nix
./hardware.nix ./hardware.nix
./reverse-proxy.nix ./reverse-proxy.nix
]; ];
@@ -12,4 +12,27 @@
networking.hostName = "alo-cloud-1"; networking.hostName = "alo-cloud-1";
services.tailscaleAutoconnect.authkey = "tskey-auth-kbdARC7CNTRL-pNQddmWV9q5C2sRV3WGep5ehjJ1qvcfD"; services.tailscaleAutoconnect.authkey = "tskey-auth-kbdARC7CNTRL-pNQddmWV9q5C2sRV3WGep5ehjJ1qvcfD";
services.tailscale = {
enable = true;
useRoutingFeatures = lib.mkForce "server"; # enables IPv4/IPv6 forwarding + loose rp_filter
extraUpFlags = [ "--advertise-exit-node" ];
};
networking.nat = {
enable = true;
externalInterface = "enp1s0";
internalInterfaces = [ "tailscale0" ];
};
networking.firewall = {
enable = lib.mkForce true;
allowedTCPPorts = [ 80 443 ]; # Public web traffic only
allowedUDPPorts = [ 41641 ]; # Tailscale
trustedInterfaces = [ "tailscale0" ]; # Full access via VPN
};
services.openssh = {
settings.PasswordAuthentication = false; # Keys only
};
} }

View File

@@ -1,7 +1,7 @@
{ pkgs, ... }: { pkgs, config, ... }:
{ {
environment.systemPackages = [ pkgs.traefik ]; environment.systemPackages = [ pkgs.traefik ];
environment.persistence."/persist".files = [ "/acme/acme.json" ]; environment.persistence.${config.custom.impermanence.persistPath}.files = [ "/acme/acme.json" ];
services.traefik = { services.traefik = {
enable = true; enable = true;

24
hosts/beefy/default.nix Normal file
View File

@@ -0,0 +1,24 @@
{ pkgs, inputs, ... }:
{
imports = [
../../common/encrypted-btrfs-layout.nix
../../common/global
../../common/desktop-node.nix # Hyprland + GUI environment
../../common/cluster-member.nix # Consul + storage clients
../../common/cluster-tools.nix # Nomad CLI (no service)
./hardware.nix
];
diskLayout = {
mainDiskDevice = "/dev/disk/by-id/nvme-CT1000P3PSSD8_25164F81F31D";
#keyDiskDevice = "/dev/disk/by-id/usb-Intenso_Micro_Line_22080777650797-0:0";
keyDiskDevice = "/dev/sda";
};
networking.hostName = "beefy";
networking.cluster.primaryInterface = "enp1s0";
services.tailscaleAutoconnect.authkey = "tskey-auth-k79UsDTw2v11CNTRL-oYqji35BE9c7CqM89Dzs9cBF14PmqYsi";
# Enable all SysRq functions for debugging hangs
boot.kernel.sysctl."kernel.sysrq" = 1;
}

19
hosts/beefy/hardware.nix Normal file
View File

@@ -0,0 +1,19 @@
{
config,
lib,
pkgs,
modulesPath,
...
}:
{
imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];
boot.initrd.availableKernelModules = [ "nvme" "xhci_pci" "usbhid" "usb_storage" "sd_mod" ];
boot.initrd.kernelModules = [ ];
boot.kernelModules = [ "kvm-amd" ];
boot.extraModulePackages = [ ];
nixpkgs.hostPlatform = "x86_64-linux";
hardware.cpu.amd.updateMicrocode = true; # Uncomment for AMD
}

BIN
hosts/beefy/key.bin Normal file

Binary file not shown.

View File

@@ -23,8 +23,8 @@
networking.hostName = "c1"; networking.hostName = "c1";
services.tailscaleAutoconnect.authkey = "tskey-auth-k2nQ771YHM11CNTRL-YVpoumL2mgR6nLPG51vNhRpEKMDN7gLAi"; services.tailscaleAutoconnect.authkey = "tskey-auth-k2nQ771YHM11CNTRL-YVpoumL2mgR6nLPG51vNhRpEKMDN7gLAi";
# NFS standby configuration: accept replication from zippy
nfsServicesStandby.replicationKeys = [ nfsServicesStandby.replicationKeys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHyTKsMCbwCIlMcC/aopgz5Yfx/Q9QdlWC9jzMLgYFAV root@zippy-replication" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHyTKsMCbwCIlMcC/aopgz5Yfx/Q9QdlWC9jzMLgYFAV root@zippy-replication"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIO5s73FSUiysHijWRGYCJY8lCtZkX1DGKAqp2671REDq root@sparky-replication"
]; ];
} }

View File

@@ -21,13 +21,11 @@
}; };
networking.hostName = "chilly"; networking.hostName = "chilly";
networking.cluster.primaryInterface = "br0";
services.tailscaleAutoconnect.authkey = "tskey-auth-kRXS9oPyPm11CNTRL-BE6YnbP9J6ZZuV9dHkX17ZMnm1JGdu93"; services.tailscaleAutoconnect.authkey = "tskey-auth-kRXS9oPyPm11CNTRL-BE6YnbP9J6ZZuV9dHkX17ZMnm1JGdu93";
services.consul.interface.advertise = lib.mkForce "br0";
networking.useNetworkd = true; networking.useNetworkd = true;
systemd.network.enable = true; systemd.network.enable = true;
# Wait for br0 to be routable before considering network online
systemd.network.wait-online.extraArgs = [ "--interface=br0:routable" ];
# not useful and potentially a security loophole # not useful and potentially a security loophole
services.resolved.llmnr = "false"; services.resolved.llmnr = "false";

View File

@@ -3,18 +3,24 @@
imports = [ imports = [
../../common/encrypted-btrfs-layout.nix ../../common/encrypted-btrfs-layout.nix
../../common/global ../../common/global
../../common/desktop-node.nix # Hyprland + GUI environment ../../common/cluster-member.nix
../../common/cluster-member.nix # Consul + storage clients ../../common/nomad-worker.nix
../../common/cluster-tools.nix # Nomad CLI (no service) ../../common/nfs-services-server.nix
# To move NFS server role to another host:
# 1. Follow procedure in docs/NFS_FAILOVER.md
# 2. Replace above line with: ../../common/nfs-services-standby.nix
# 3. Add nfsServicesStandby.replicationKeys with the new server's public key
./hardware.nix ./hardware.nix
]; ];
diskLayout = { diskLayout = {
mainDiskDevice = "/dev/disk/by-id/nvme-Samsung_SSD_970_EVO_Plus_250GB_S4EUNF0MA33640P"; mainDiskDevice = "/dev/disk/by-id/nvme-KIOXIA-EXCERIA_with_Heatsink_SSD_84GF7016FA4S";
#keyDiskDevice = "/dev/disk/by-id/usb-Intenso_Micro_Line_22080777660468-0:0"; #keyDiskDevice = "/dev/disk/by-id/usb-Intenso_Micro_Line_22080777660468-0:0";
keyDiskDevice = "/dev/sda"; keyDiskDevice = "/dev/sda";
}; };
networking.hostName = "sparky"; networking.hostName = "sparky";
services.tailscaleAutoconnect.authkey = "tskey-auth-kBCKN7QNv411CNTRL-n5Td7Jw7h3TAjubEeLmy1THy33JvD9JnM"; services.tailscaleAutoconnect.authkey = "tskey-auth-k6VC79UrzN11CNTRL-rvPmd4viyrQ261ifCrfTrQve7c2FesxrG";
nfsServicesServer.standbys = [ "c1" ];
} }

61
hosts/stinky/default.nix Normal file
View File

@@ -0,0 +1,61 @@
{
lib,
pkgs,
config,
...
}:
{
imports = [
../../common/global
../../common/impermanence-common.nix # Impermanence with custom root config (see hardware.nix)
../../common/resource-limits.nix
../../common/sshd.nix
../../common/user-ppetru.nix
../../common/wifi.nix
# Note: No systemd-boot.nix - Raspberry Pi uses generic-extlinux-compatible (from sd-image module)
./hardware.nix
];
hardware = {
raspberry-pi."4".apply-overlays-dtmerge.enable = true;
deviceTree = {
enable = true;
filter = "*rpi-4-*.dtb";
};
};
networking.hostName = "stinky";
# Configure impermanence for tmpfs root (filesystem config in hardware.nix)
custom.impermanence.persistPath = "/nix/persist";
# Tailscale configuration
services.tailscaleAutoconnect.authkey = "tskey-auth-kZC8HX3wSw11CNTRL-7QvqxAphyzM7QeMUTKXv2Ng2RK4XCmg9A";
# OctoPrint for 3D printer
services.octoprint = {
enable = true;
};
# Persist OctoPrint data
environment.persistence.${config.custom.impermanence.persistPath}.directories = [
"/var/lib/octoprint"
];
# Pi HQ Camera support
boot.kernelModules = [ "bcm2835-v4l2" ];
environment.systemPackages = with pkgs; [
libcamera
libraspberrypi
raspberrypi-eeprom
];
# Firewall: Allow access to OctoPrint
networking.firewall.allowedTCPPorts = [
5000 # OctoPrint
];
# Override global default (stinky is a new system with 25.05)
system.stateVersion = lib.mkForce "25.05";
}

73
hosts/stinky/hardware.nix Normal file
View File

@@ -0,0 +1,73 @@
{
config,
lib,
pkgs,
modulesPath,
...
}:
{
imports = [
(modulesPath + "/installer/sd-card/sd-image-aarch64.nix")
];
# Raspberry Pi 4 platform
nixpkgs.hostPlatform = lib.mkDefault "aarch64-linux";
# Disable ZFS (not needed, and broken with latest kernel)
boot.supportedFilesystems.zfs = lib.mkForce false;
# Boot configuration - provided by sd-image-aarch64.nix
# (grub disabled, generic-extlinux-compatible enabled, U-Boot setup)
# /boot/firmware is automatically configured by sd-image module
# Device: /dev/disk/by-label/FIRMWARE (vfat)
# tmpfs root with impermanence
# Override sd-image module's ext4 root definition with mkForce
fileSystems."/" = lib.mkForce {
device = "none";
fsType = "tmpfs";
options = [
"defaults"
"size=2G"
"mode=755"
];
};
# The SD partition contains /nix/store and /nix/persist at its root
# Mount it at a hidden location, then bind mount its /nix to /nix
fileSystems."/mnt/nixos-sd" = {
device = "/dev/disk/by-label/NIXOS_SD";
fsType = "ext4";
options = [ "noatime" ];
neededForBoot = true;
};
# Bind mount /nix from the SD partition
fileSystems."/nix" = {
device = "/mnt/nixos-sd/nix";
fsType = "none";
options = [ "bind" ];
neededForBoot = true;
depends = [ "/mnt/nixos-sd" ];
};
# No swap on SD card (wear concern)
swapDevices = [ ];
# SD image build configuration
sdImage = {
compressImage = true;
# Populate root with directories
populateRootCommands = ''
mkdir -p ./files/boot
${config.boot.loader.generic-extlinux-compatible.populateCmd} -c ${config.system.build.toplevel} -d ./files/boot
# Create /nix/persist directory structure for impermanence
mkdir -p ./files/nix/persist/var/lib/nixos
mkdir -p ./files/nix/persist/home/ppetru
mkdir -p ./files/nix/persist/etc
'';
};
}

View File

@@ -5,13 +5,6 @@
../../common/global ../../common/global
../../common/cluster-member.nix # Consul + storage clients ../../common/cluster-member.nix # Consul + storage clients
../../common/nomad-worker.nix # Nomad client (runs jobs) ../../common/nomad-worker.nix # Nomad client (runs jobs)
# NOTE: zippy is NOT a server - no nomad-server.nix import
# ../../common/ethereum.nix
../../common/nfs-services-server.nix # NFS server for /data/services
# To move NFS server role to another host:
# 1. Follow procedure in docs/NFS_FAILOVER.md
# 2. Replace above line with: ../../common/nfs-services-standby.nix
# 3. Add nfsServicesStandby.replicationKeys with the new server's public key
./hardware.nix ./hardware.nix
]; ];
@@ -23,7 +16,4 @@
networking.hostName = "zippy"; networking.hostName = "zippy";
services.tailscaleAutoconnect.authkey = "tskey-auth-ktKyQ59f2p11CNTRL-ut8E71dLWPXsVtb92hevNX9RTjmk4owBf"; services.tailscaleAutoconnect.authkey = "tskey-auth-ktKyQ59f2p11CNTRL-ut8E71dLWPXsVtb92hevNX9RTjmk4owBf";
# NFS server configuration: replicate to c1 as standby
nfsServicesServer.standbys = [ "c1" ];
} }

374
scripts/diff-configs.sh Executable file
View File

@@ -0,0 +1,374 @@
#!/usr/bin/env bash
# Compare NixOS configurations between current state and HEAD
# Shows what would change if you committed the current changes
#
# Requirements: nvd must be in PATH
# Run inside `nix develop` or with direnv enabled
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
BLUE='\033[0;34m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color
# Normalize nix store paths by replacing 32-char hashes with placeholder
normalize_nix_paths() {
sed -E 's|/nix/store/[a-z0-9]{32}-|/nix/store/HASH-|g'
}
# Filter diff output to remove hunks where only nix store hashes differ
# Returns: filtered diff (empty if only hash changes), exit code 0 if real changes found
filter_hash_only_diffs() {
local diff_output="$1"
local current_hunk=""
local output=""
local has_real_changes=false
# Process line by line
while IFS= read -r line || [ -n "$line" ]; do
if [[ "$line" =~ ^@@ ]]; then
# New hunk starts - process previous one if it exists
if [ -n "$current_hunk" ]; then
if hunk_has_real_changes "$current_hunk"; then
output+="$current_hunk"$'\n'
has_real_changes=true
fi
fi
# Start new hunk
current_hunk="$line"$'\n'
else
# Add line to current hunk
current_hunk+="$line"$'\n'
fi
done <<< "$diff_output"
# Process last hunk
if [ -n "$current_hunk" ]; then
if hunk_has_real_changes "$current_hunk"; then
output+="$current_hunk"
has_real_changes=true
fi
fi
# Remove trailing newline
output="${output%$'\n'}"
if [ "$has_real_changes" = true ]; then
echo "$output"
return 0
else
return 1
fi
}
# Check if a diff hunk has real changes (not just hash changes)
hunk_has_real_changes() {
local hunk="$1"
# Use temp file to avoid bash here-string issues
local temp_hunk=$(mktemp)
printf '%s' "$hunk" > "$temp_hunk"
local minus_lines=()
local plus_lines=()
# Extract - and + lines (skip @@ and context lines)
while IFS= read -r line || [ -n "$line" ]; do
if [[ "$line" =~ ^- && ! "$line" =~ ^--- ]]; then
minus_lines+=("${line:1}") # Remove the - prefix
elif [[ "$line" =~ ^\+ && ! "$line" =~ ^\+\+\+ ]]; then
plus_lines+=("${line:1}") # Remove the + prefix
fi
done < "$temp_hunk"
rm -f "$temp_hunk"
# If counts don't match, there are structural changes
if [ ${#minus_lines[@]} -ne ${#plus_lines[@]} ]; then
return 0 # Has real changes
fi
# If no changes at all, skip
if [ ${#minus_lines[@]} -eq 0 ]; then
return 1 # No real changes
fi
# Compare each pair of lines after normalization
for i in "${!minus_lines[@]}"; do
local minus_norm=$(echo "${minus_lines[$i]}" | normalize_nix_paths)
local plus_norm=$(echo "${plus_lines[$i]}" | normalize_nix_paths)
if [ "$minus_norm" != "$plus_norm" ]; then
return 0 # Has real changes
fi
done
# All changes are hash-only
return 1
}
# Check for nvd
if ! command -v nvd &> /dev/null; then
echo "Error: nvd not found in PATH"
echo "Run this script inside 'nix develop' or enable direnv"
exit 1
fi
# Parse flags
verbose=false
deep=false
hosts_args=()
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
echo "Usage: $0 [-v|--verbose] [-d|--deep] [HOST...]"
echo "Compare NixOS configurations between working tree and HEAD"
echo ""
echo "Options:"
echo " -v, --verbose Show detailed list of added/removed store paths"
echo " -d, --deep Show content diffs of changed files (implies -v)"
echo ""
echo "Arguments:"
echo " HOST One or more hostnames to compare (default: all)"
echo ""
echo "Examples:"
echo " $0 # Compare all hosts (summary)"
echo " $0 -v c1 # Compare c1 with path list"
echo " $0 --deep c1 # Compare c1 with content diffs"
echo " $0 c1 c2 c3 # Compare only c1, c2, c3"
exit 0
;;
-v|--verbose)
verbose=true
shift
;;
-d|--deep)
deep=true
verbose=true # deep implies verbose
shift
;;
*)
hosts_args+=("$1")
shift
;;
esac
done
# Restore positional parameters
set -- "${hosts_args[@]}"
# Check if we're in a git repo
if ! git rev-parse --git-dir > /dev/null 2>&1; then
echo "Error: Not in a git repository"
exit 1
fi
# Check if there are any changes
if git diff --quiet && git diff --cached --quiet; then
echo "No changes detected between working tree and HEAD"
exit 0
fi
echo "Comparing configurations: current working tree vs HEAD"
echo "======================================================="
echo
# Get list of hosts to compare
if [ $# -gt 0 ]; then
# Use hosts provided as arguments
hosts="$@"
echo -e "${YELLOW}Comparing selected hosts: $hosts${NC}"
else
# Get all hosts from flake
echo "Discovering all hosts from flake..."
hosts=$(nix eval --raw .#deploy.nodes --apply 'nodes: builtins.concatStringsSep "\n" (builtins.attrNames nodes)' 2>/dev/null)
if [ -z "$hosts" ]; then
echo "Error: No hosts found in flake"
exit 1
fi
fi
echo
# Create temp worktree at HEAD
worktree=$(mktemp -d)
trap "git worktree remove --force '$worktree' &>/dev/null || true; rm -rf '$worktree'" EXIT
echo "Creating temporary worktree at HEAD..."
git worktree add --quiet --detach "$worktree" HEAD
echo "Building and comparing configurations..."
echo
any_changes=false
for host in $hosts; do
echo -e "${BLUE}━━━ $host ━━━${NC}"
# Build current (with uncommitted changes)
echo -n " Building current... "
if ! current=$(nix build --no-link --print-out-paths \
".#nixosConfigurations.$host.config.system.build.toplevel" 2>/dev/null); then
echo -e "${RED}FAILED${NC}"
# Re-run to show error
nix build --no-link ".#nixosConfigurations.$host.config.system.build.toplevel" 2>&1 | head -20 | sed 's/^/ /'
continue
fi
echo "done"
# Build HEAD
echo -n " Building HEAD... "
if ! head=$(nix build --no-link --print-out-paths \
"$worktree#nixosConfigurations.$host.config.system.build.toplevel" 2>/dev/null); then
echo -e "${RED}FAILED${NC}"
# Re-run to show error
nix build --no-link "$worktree#nixosConfigurations.$host.config.system.build.toplevel" 2>&1 | head -20 | sed 's/^/ /'
continue
fi
echo "done"
# Compare
if [ "$head" = "$current" ]; then
echo -e " ${GREEN}✓ No changes${NC}"
else
any_changes=true
echo -e " ${RED}⚠ Configuration changed${NC}"
echo
# Show nvd summary
if ! nvd diff "$head" "$current" 2>&1; then
echo -e " ${RED}(nvd diff failed - see error above)${NC}"
fi
# Show detailed closure diff if verbose
if [ "$verbose" = true ]; then
echo
echo -e " ${YELLOW}Changed store paths:${NC}"
# Get paths unique to HEAD and current
head_only=$(comm -23 <(nix-store -q --requisites "$head" 2>/dev/null | sort) \
<(nix-store -q --requisites "$current" 2>/dev/null | sort))
current_only=$(comm -13 <(nix-store -q --requisites "$head" 2>/dev/null | sort) \
<(nix-store -q --requisites "$current" 2>/dev/null | sort))
# Count changes
removed_count=$(echo "$head_only" | wc -l)
added_count=$(echo "$current_only" | wc -l)
echo -e " ${RED}Removed ($removed_count paths):${NC}"
echo "$head_only" | head -10 | sed 's|^/nix/store/[^-]*-| - |'
if [ "$removed_count" -gt 10 ]; then
echo " ... and $((removed_count - 10)) more"
fi
echo
echo -e " ${GREEN}Added ($added_count paths):${NC}"
echo "$current_only" | head -10 | sed 's|^/nix/store/[^-]*-| - |'
if [ "$added_count" -gt 10 ]; then
echo " ... and $((added_count - 10)) more"
fi
# Show content diffs if deep mode
if [ "$deep" = true ]; then
echo
echo -e " ${YELLOW}Content diffs of changed files:${NC}"
# Extract basenames for matching
declare -A head_paths
while IFS= read -r path; do
[ -z "$path" ] && continue
basename="${path#/nix/store/[a-z0-9]*-}"
head_paths["$basename"]="$path"
done <<< "$head_only"
# Find matching pairs and diff them
matched=false
while IFS= read -r path; do
[ -z "$path" ] && continue
basename="${path#/nix/store/[a-z0-9]*-}"
# Check if we have a matching path in head
if [ -n "${head_paths[$basename]:-}" ]; then
old_path="${head_paths[$basename]}"
new_path="$path"
matched=true
echo
echo -e " ${BLUE}$basename${NC}"
# If it's a directory, diff all files within it
if [ -d "$old_path" ] && [ -d "$new_path" ]; then
# Count files to avoid processing huge directories
file_count=$(find "$new_path" -maxdepth 3 -type f 2>/dev/null | wc -l)
# Skip very large directories (e.g., system-path with 900+ files)
if [ "$file_count" -gt 100 ]; then
echo " (skipping directory with $file_count files - too large)"
else
# Diff all files in the directory
for file in $(find "$new_path" -maxdepth 3 -type f 2>/dev/null); do
[ -z "$file" ] && continue
relpath="${file#$new_path/}"
old_file="$old_path/$relpath"
if [ -f "$old_file" ] && [ -f "$file" ]; then
# Check if file is text
if file "$file" | grep -q "text"; then
# Get diff output
diff_output=$(diff -u "$old_file" "$file" 2>/dev/null | head -50 | tail -n +3 || true)
# Filter hash-only changes
if [ -n "$diff_output" ]; then
filtered_diff=$(filter_hash_only_diffs "$diff_output" || true)
if [ -n "$filtered_diff" ]; then
echo -e " ${YELLOW}$relpath:${NC}"
echo "$filtered_diff" | sed 's/^/ /'
fi
fi
fi
fi
done
fi
# If it's a file, diff it directly
elif [ -f "$old_path" ] && [ -f "$new_path" ]; then
if file "$new_path" | grep -q "text"; then
# Get diff output
diff_output=$(diff -u "$old_path" "$new_path" 2>/dev/null | head -50 | tail -n +3 || true)
# Filter hash-only changes
if [ -n "$diff_output" ]; then
filtered_diff=$(filter_hash_only_diffs "$diff_output" || true)
if [ -n "$filtered_diff" ]; then
echo "$filtered_diff" | sed 's/^/ /'
else
echo " (only hash changes)"
fi
fi
else
echo " (binary file)"
fi
fi
fi
done <<< "$current_only"
if [ "$matched" = false ]; then
echo " (no matching paths found to compare)"
fi
fi
fi
fi
echo
done
if [ "$any_changes" = false ]; then
echo -e "${GREEN}✓ All configurations unchanged${NC}"
else
echo -e "${RED}⚠ Some configurations changed - review carefully before committing${NC}"
fi

25
secrets/beefy.yaml Normal file
View File

@@ -0,0 +1,25 @@
kopia: ENC[AES256_GCM,data:/6jqArNgeBoGnEdJ1eshrsG8RJs=,iv:2nNdrKczus70QDdvO/MC2wJubGnAf3M8PtzSe1aoBF4=,tag:aOoktsqhQLXr0YkjYZq4OQ==,type:str]
sops:
age:
- recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBODczb2FsMis0cVIvN2FK
UG1QVWt6U1MvaU1Rd0dXWDhmK2RpZ2dSUXlRClZ5ZGRZRk1vUFp0eVVwVzA5R0Ni
RUdjVFh5T3o5ZllFaHVFS1pCWjNzVkEKLS0tIGoxNWhhZUhVSms5cEJEa3lZQWlz
aXNBMWhUNFBHTDJUZEtDeU85Z1pPU1kKWNm6Wk+Mbc9QIXMXiwleIvP4hlGLvmpI
u+udOAinxTxmB9LOXG+y3iPuS9n0B6Y+4WbTjKm9jEqaqNoW8JypJA==
-----END AGE ENCRYPTED FILE-----
- recipient: age1cs8uqj243lspyp042ueu5aes4t3azgyuaxl9au70ggrl2meulq4sgqpc7y
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBjTG1MZjM1bVNqeFNqeTgy
UXZBWVVacVVsaHJKMkJ1ZWdCbG4zS2tBWDFjCnpSbUw0ZFZMVENNNmYyTWZFdndL
RmxUajdsU1l1cmlZa2NQQjJublVsMmMKLS0tIHNpZmRpY2hIbVVZSUdGNHM2WnN6
R21jYU96SGVHOUxmZjlldS96K2VqbWcKC28wLdT/zx6yHluCLqB/cFRmc0Alq6AH
DqmAaxRhOg/SI5ljCX1gE5BB9rNIJ1Gq8+li7wCpsdfLMr5Yy/HAsw==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2025-11-04T20:25:17Z"
mac: ENC[AES256_GCM,data:llS+R5Pj51ZUkU8FkJx2KqqE4D42Uno3Btn31FadIl4kFamnrL6uJjbiNEJpFFO+SchXD3l7VCatbBhMSoxsPYd+rdDRT2klq+iIcZU/k413GC87xdmHIwWE+L2pujv36iBjtM+HJTSvXI0xOxjUmzH4FPdEa1r3Z5yGNnCI+Q4=,iv:ld6pSEzvKTSZtBb+QjHyqqj2VT05YegxBrUR2yxhjKY=,tag:7/vYBh8lDOcVXJL3esTIZQ==,type:str]
unencrypted_suffix: _unencrypted
version: 3.11.0

View File

@@ -4,74 +4,92 @@ sops:
- recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn - recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB1TExMb1NCc0xJR05nRE1o YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB4WExPaEtTdEljYkF1ZUQw
aVJzT0oyVGxNNld1ZFFRdUQ0azBMUkRxTVFZCk5RcktWWHlWMHFmTjcrZkR6QmR1 UHhRNDJZb2wydWVUaXFmR213SjJsNDFKU0FjCnJ3Tk1yZDZkU3orcHZ2UDY3elRi
WjlNZjRFYk1wOEpENXlQeDl4Q2tZSE0KLS0tIDNETkxmWjlXQk5NZmpHTGpnOVor WW9FMXU0cDNjV3QrOWo3MVB0UzMwakUKLS0tIEhQVldBVWhmR0k0WW9jTE0xc2ZW
dEgwb09UWGRzUUhwNlNCendsVTZPZjgKP0cpeidAWKX9CtLGv4fHdOGqaCQLOEI1 RWp4ZjlVN0FWaURlRHNONDhXdmJpS1EKZVXYyFRFD9KdyWuMoQytkQk4VxpBRyAV
qj4mkKpTYzChtkfiE4lx92uC0O53A8vQ/BlDL9uLEgRJLHmh/5R+pg== lF4FA99wjGMhHFNQExnqYYLYtFkA18/SB6pkneOjdhIvEr0IFLJEqg==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age1gtyw202hd07hddac9886as2cs8pm07e4exlnrgfm72lync75ng9qc5fjac - recipient: age1gtyw202hd07hddac9886as2cs8pm07e4exlnrgfm72lync75ng9qc5fjac
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBPT1ptNXhVY2xCZVArT3h6 YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBjZ29wdk1aOHZJYWFjaG9v
ZUJ5M2xURWsvd1dUNmNLS2xCdy8vRzRpbWlBClhRWkZ4aXNicklUeHJORmU5K0RS RGxsek95QmtrZS9xRWdKMFdLSHZ3NmlZRGxzCnBvRXZkYnkxdkhJWkY0Ukg1M0dE
ZDA3WmZCaTZGQmFqVWpRR3NNZHFHc28KLS0tIFdUSFN6dXN1cnJEd0x6SkNPUTJC dWc3QWtCdkV5Ymd4MkxhZWl0ZDNCZXcKLS0tIGMrVWtNNWtscm9STUN1aHVZc2Ny
ZnJtRmlWSUNud1c2bHF6VFZnK2JPQVUKOmVTEQeUTgTDIYkMA9dKYWf2JrB2lBhA Vm1oaFFTbTBpRWxuR3gxbUZ0YkZieVkKdaSSXrDzAUGkj3w8/YcFZaJTiUUEbJdw
WpJ/1qXAENxN94UtAhCP6pK9bKPCCm0JLUZ+YAuDuDoC5TAsg7Durg== GjuLz7bxX8+HQvhSbu6/KCwG6R4j1eO5Zg1w0wYtyeUOV1HfZEGQog==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age16yqffw4yl5jqvsr7tyd883vn98zw0attuv9g5snc329juff6dy3qw2w5wp - recipient: age16yqffw4yl5jqvsr7tyd883vn98zw0attuv9g5snc329juff6dy3qw2w5wp
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB3dUZmNjd0bGlGMXJGRHNW YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBIMGpibmNRUDRFaFVOTDB4
M1lYd2tIZGZKMEpXVVExUlQ5c2l3TWJFdzNzClVvaGQzMjBjYktIa0E4TGZBTWRJ RVdTc1RrTmRPb0dlZGlpcGxuRlJ1L2w5MVVBCi9HdXNGZmdSaVZsQWRoa2RpVDNV
SWJiRjVHdUxVNkpVLzJCVU8yOUI0NGMKLS0tIGN5QzAycEtWSnVoYWROSmtLaFNv OXBtS0pwYnhjS2hCUk10UUtwam4zMWcKLS0tIFV0dVpQNGpSOEVoZnE5OGpCZkxa
MXdrNytSU1B2SVl4TmFjclFNMjdzSzgK88Tny535JqjoIyomHXcFOd/EdUTOozeL MFMxSG95dmJncGJzR29mQkVzNjFIQUEKrJ0MDTBmiwiAaLt7CJ1pjlxuFvZJuRkR
x23zilMdVJVSDD0tPTzqpvxgcgopE44B9V3J28GeUBTW8osBUOQ9og== EuLYOYLdVaxgZ442io5OE7wme0P4LLcxSAreDG84GVs67JHvsFE89g==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age10zxwwufrf5uu9cv9p9znse2ftfm74q9ce893us6cnvxjc7e3ypcqy709dy - recipient: age14aml5s3sxksa8qthnt6apl3pu6egxyn0cz7pdzzvp2yl6wncad0q56udyj
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBDMGNuTTFkNEFQLzgyRFhp YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBucHdSNGNyRkVITmNDVkpx
alROV0FkeG83aGQvcUM1VXJacUJyNFFMTmlnCmp2N3NTMWZhTUxIZ1hoWE1pazJ3 QVFKK0VucFNSMnNqSGRFRmRoRWpsZ0srUUhrCkwwY2pDSkJ0aGlqc3U3ZXNJUVl0
emN4SDNqU3NJSWFMNGlkSXBMaGRDWmsKLS0tIEI2OGJUSGR5SHRwWlhMNGI4dFhE bXZMSVg3bDhaK3d1MTBnL1BQVUhkMUkKLS0tIDdxSk1DMVpsbnI1QlFnNEFJYXRD
WVlKM05XdUVKL0ZZVVcyS2NwdFg2dU0KQ8rk4s2u0eSoOcTd5ibepQx8pPsUNC8m RTNxYUxlUGxsM1NvekZ4R1hQVE9KMk0KocfE75DTfQMj/RsznOdeF82aO8WwO4HD
aX1iIMwJ9S7bOaorlK9Hwdnrfpbb1Gsj8q9KNnw6BWT08oCypxrQMw== 1xakOM2FHoHi60Q5uOWzfGtz0i+R4ue9hafa5Esn01TOjc3qWSlW3A==
-----END AGE ENCRYPTED FILE-----
- recipient: age1me78u46409q9ez6fj0qanrfffc5e9kuq7n7uuvlljfwwc2mdaezqmyzxhx
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBYTEhiSDZvZTg3ZWxJRXlu
a0ozOXRVL2lia054SkNLc2tEYmlZUCt1NW1JCkorK0hub1pLQTE0QThEUDRDWXJV
YWtGamNxMTFIYjVDT2RqTXh0Z2hVTjAKLS0tIGxoRTAwc3FKVVNSQndtbTZmc3BR
QnMrK2lMT25tR1ErV2xvS01JWWswVUEKtrGaLETMfY2D8qmgml/fgGxkvQLoiMTP
l3a7Y6kwutuzRnmW1tnWv7yoPbTn+BDwfOwBcnesl2x0aJ5iLUKruA==
-----END AGE ENCRYPTED FILE-----
- recipient: age1cs8uqj243lspyp042ueu5aes4t3azgyuaxl9au70ggrl2meulq4sgqpc7y
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqMFJ1bzQxWjlZTmF6eEl0
d3VVd0VsbGZGdDBKRG9uNEhaMThmWmpuQ1hFClA1aDhwRU1Pb2pibGh6T0pqdmlq
S3cxM0wyWWlCL3U5TjV4Vkg4blRsUVkKLS0tIENnYk5GbmZWbFo4cElON1Z0ZVlv
ZDdsci9rcG5Wc2V0NlQ3MWx1cFF4dUkKumFT4xtjGDBGK+/SV27Dh/vyGMJAEZNo
9gTmVLfR9vXVAXUdOMcqgo7Nl4OJCS4HrDxvVCoER/bVoQVRiPzuXw==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age1w5w4wfvtul3sge9mt205zvrkjaeh3qs9gsxhmq7df2g4dztnvv6qylup8z - recipient: age1w5w4wfvtul3sge9mt205zvrkjaeh3qs9gsxhmq7df2g4dztnvv6qylup8z
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB5aXVmQS8wWDY0ZVQxSXhV YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBCM2E5a2lsZGJzRnk5N3Rr
aDc1em5jUzVxajd1SnNwRE0xalJqS0F2R213CnpEcnVqNXRzTE45NmdyQmJOZFJu bWRwRlI2c0c4NzBNdEVqMGZFWTZtNDlvSzJFCmFPM05XbndsazRGWEw3Zy83dldm
cGJSeVdNbVM4cUhBTklVMzRBd3pOdGMKLS0tIDE1SUxWVFdDbEpPK2xxNFpqYzF5 eXhEZUZQZWk5bVNwaEk5SDRka0NOWjAKLS0tIHNvZ016Rjh5bmYwRUxyRWRydFpI
ZDFmSU1aSkUyM2VZY3VLZ01lTE9rdjgK2W3HVCG1YmJXvfX96W3wRJsYpgmF3f/o Z0NHYjFzem55bVNORGlVbVNxR2psc2cK6JpNZwznwgl61d/W9g+48/894TQRe4gJ
RnrcooHZQH4sQeY50CPKOI2F0YuaNL8T1Nq5fzoE5v+FCYcPTOPpUQ== nl4oDwRPbZwJZgdAKQVfTTujB0QbWpJc24mDGD4I4CydqTKwy6FN3A==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age1wwufz86tm3auxn6pn27c47s8rvu7en58rk00nghtaxsdpw0gya6qj6qxdt - recipient: age1wwufz86tm3auxn6pn27c47s8rvu7en58rk00nghtaxsdpw0gya6qj6qxdt
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB4VFJGMkhYYmxXSmx0RlQ0 YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBlK1A1eVdRQThQUHdqbHdk
UjVDWXdUcEd5bWRMWUZCd1RJa1g0NCtaRHlVCm5oaTVDZ0FnaWdULzF0b09pb1Ir b1MyMlBJUFluTm13ZWwwc1RNbThFZUMrNXhzCnRPTVhPSzUzM0VtaUVJbFl5Wllj
TmtIRENmd0pSc0MvbWZMK3R3bFllK0kKLS0tIHRNWFVwbTQ4bGo2SFBKVVQ4eVNq NUlndzc3Yzhjd1JSb3czajI3UmRDZ1kKLS0tIE03M1hab1MxU0I2VExBWlh2TnJC
b3NqakZtaWdKZ2lxSEhGMXJ4QUEwWWMKKqEIteqzi3wgr/5Bz2rfzn/WT3kyPNXI eGRXRTlsWmlpenJrVkMxakJZVTV0cE0KMQCKscSLnCu3NsurFFiDaUGjJbyIAwd0
AUWv1f7TZCe04gSvaW6U/ELkEwjt//1iOSmiz05dAX0B8V97mTXpbA== HTutCiuPYVI4zznQ3RZDBeO5L6a/twXxMRTePUCwOkRNWRWpzR9nxg==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age1jy7pe4530s8w904wtvrmpxvteztqy5ewdt92a7y3lq87sg9jce5qxxuydt - recipient: age1jy7pe4530s8w904wtvrmpxvteztqy5ewdt92a7y3lq87sg9jce5qxxuydt
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBtaFJXN3k1Znp2VGlTd2VZ YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB1VEJmMWlnemFGNExWYUI4
Sk9SYTVPU01SamN1MnhJcElLUWZuK01Xd3pFCjRFNDMrRXR6UFU3Rk95TTNVdXZG QWRwRktwODNvSmlEcGJORHNlQXNVeXVpbFNrCms0QUFNdDlrNjMxazU1UTcwc2JF
T09SWTdnTDVsUDBkeWRXWWw5ZGVabGMKLS0tIDB4WGxUVldFWGpqQnYzZUg1bFFF RC9JUnJsWmgyc01zZU9JQmxuM3V6STQKLS0tIGxQZGFsZ0pNTjQ3QW1sS0E2Y2RM
UnBIZU5iNE0wR2R6MUNnS1R3UytxNkkKleAh4sGMIUkqIuSwPuII+8+M8/+W+IIx aVVrNW1BNXQ5UDk1UEtVVWJPNHpwUFUKcArFPFknBj8ss1lD38YtMaB06L/ASeu5
hRBzSodKq2b1jmovQZ0liuvto7qGIHlV+/tIyPpckyuNRv+TxeZw7Q== u4ff0rTDx237snaSFg5RIJ+6uxX16p5ODg3xOYGOMkDeuTLdl2bg3A==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age1zjgqu3zks5kvlw6hvy6ytyygq7n25lu0uj2435zlf30smpxuy4hshpmfer - recipient: age1zjgqu3zks5kvlw6hvy6ytyygq7n25lu0uj2435zlf30smpxuy4hshpmfer
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBVNG1SN2FJaTRSYlBwSDdt YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSArWTNkaFlrQkJHRnd4cTBw
R0NCdjBZZlNWTkdLa00rNVIxU2V3Nmt1MGpNCnA5T2RnTGNLbFNndGw1cjEyRFVy N3dnTXk3SlJkQkZDdWpLcEpNQ2Z2RHZoVjBJCjBaK1MzbzdaaXluR1dFaFFNaGEx
aFFCNU9aVzZCRlAxMWdkUjVBQkZqd1EKLS0tIHBCbllXc0pPQ244RXg3eTcxcFRu VTNrVU0yeG9KQkhqUkYxU3VBM0E0R1UKLS0tIDJHek9vVldSZGN0M0c0UHcySGhk
V2VtWWY1M1Rha3ZGV2gvNDR3dTJ6TTAKBhdUhEM99DZmPjA6qMImXSw/eAMq/oyQ Z2RoZno4bmhidytlL2ZmNWUzNTcwcVEKXvgaO8Uo0R+Kc8lizLtVxmTi0W5XHjYw
QkMVyYxauZ5QiUtyAzhmL3BCdPerpClIlskbiKgCqs4w9R+VtAOgzA== 7evdCHQHmFl0vg/bGOJBmcTUhioJv06D0LR3XMl9I6ufXDNaT/NHxw==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
lastmodified: "2025-04-04T09:34:06Z" lastmodified: "2025-04-04T09:34:06Z"
mac: ENC[AES256_GCM,data:YIcRrsPparPfPaI2+MLlKsxu7M19H8nndOsrDLuh/5BXzIZNiuTIWyvxODyhI745rDwlibO+7Q0QctanhTl4+IzGaYtuY4i+rb+3dzBMpcdT2VAbtCHHxcltWeanRGFq2K3WM2tbnQCERst5kejfn0Razjq3UU5vNwfBsdJMwGc=,iv:izDxy0ufVnH8ImkZIngcYhGuj0PGpLqBD/ZDvQyE+5I=,tag:oYBUEQS52pr09h5OvOadNg==,type:str] mac: ENC[AES256_GCM,data:YIcRrsPparPfPaI2+MLlKsxu7M19H8nndOsrDLuh/5BXzIZNiuTIWyvxODyhI745rDwlibO+7Q0QctanhTl4+IzGaYtuY4i+rb+3dzBMpcdT2VAbtCHHxcltWeanRGFq2K3WM2tbnQCERst5kejfn0Razjq3UU5vNwfBsdJMwGc=,iv:izDxy0ufVnH8ImkZIngcYhGuj0PGpLqBD/ZDvQyE+5I=,tag:oYBUEQS52pr09h5OvOadNg==,type:str]

View File

@@ -4,20 +4,20 @@ sops:
- recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn - recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBkdHVtQ3hHSjVuRkxRMklO YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBtSjhXazlWd3YwNVFKVkw4
NEJjeERFUDBMRkVTTGVGTTJIR1lwM25kZWpRCitlUFIvUTVPUloreEFIWjIvT1dL dDMydVFCN1lLeUJOWkxuSGJ1a0srNm9PaWswCm8yZ3hiOWFHUlAzNVRrck53OElD
S2hBY2NUWHdxOVhRejU0eUpZa1FMY00KLS0tIEZ3bDRIdlQzaVQ5U0kyRjYyeUor b056YmV4S2NtNnEzRkpnRVNEblV5blkKLS0tIG1ramoya3RHV1FJZGlFU2ZSeUtS
c1M2V2J1Q2R3alI3b3NoYk5SK1AzKzgKbOhSxwTpLr7wwbN+nY4aK+6WmpofBxNX KzJlbEsvYWlXaHhEQU5oOS9HaDdYSDAKvlhKgi4Pf8xVB5MnO33GWYg313mRdUGu
CEaEBz98KTTrSQ9Qvm1+/yep95l7i0HPQGdGwCRNKdvUoXzk1KalpQ== kFCs5b1N96x9JOS7zgnM0AKDY8IPBSe33tmDqtYygwPdkOys1PmZkw==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
- recipient: age10zxwwufrf5uu9cv9p9znse2ftfm74q9ce893us6cnvxjc7e3ypcqy709dy - recipient: age14aml5s3sxksa8qthnt6apl3pu6egxyn0cz7pdzzvp2yl6wncad0q56udyj
enc: | enc: |
-----BEGIN AGE ENCRYPTED FILE----- -----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBZb2oyd1ZGR2FXVFlhT2d3 YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBYTVovQld2RkRxaW90b3lR
NFVlU3VlR1BEWFRyTDBYZFFlQjZxUXVyK240CkdLRmk5M01ZTloyREtwQ3hpRkxZ NGFtbWVLZUNHdnlZVWkrL1RXUHBVeGdvSDJrClJmSmZRZmdjcy8rNnJBVmVUWDZq
RDRKYVlRVFVLYzVSenU3THhFc0ZrdjgKLS0tIG1iMWh1TTZIZDEwazVKY1g3NXg0 M2lPbDBhT0Y0NkJ5a1FNYnU3Zkl0TkEKLS0tIGxqM2h2TDB2akl4ODlYY042R1Z4
NFRKemkwcnBxR0NIbXBGcm8xejdUMjgKOAGxkrvtvf7Y9W5BteL12HuUWA/d5Bah ZVJWN3pZelFJR0Jid3JseEZKVFZtYmsKmKXQRjnghuF/s9z2Xk98sFvxic91fGa2
wVoeBK21Zxz/GodBpVCuDnJ5DwM3c+7O3jnvtTShIW00evDhJIvcvA== V7IGmpqAYQV3jJ1G4cjJxtpidQ6fLCqlnR+sq+y8+dT+LN7i+Zbnnw==
-----END AGE ENCRYPTED FILE----- -----END AGE ENCRYPTED FILE-----
lastmodified: "2025-10-19T17:33:13Z" lastmodified: "2025-10-19T17:33:13Z"
mac: ENC[AES256_GCM,data:IwEyBr/I7BJa0gWZ494dCT0ogyP2PbnUg5fLOn15vZAHIyYtTB3dI3gV5Lx7oPdqOPlI61MsShIYBnk0uBChpNu6O4oiGUfwvBfegzlDyHHERLx+S7nZpcwmf/3JoNXwq0f2OtOu8nA6Q1V4gVjFFNWUCAh5cq106vG1awsQkn0=,iv:j+JcVtKz2RfyWu55dUeJJTRK6prB9DGLvcjiAAdVySM=,tag:Pg5sKiLzYUFoN9Duu+nF0w==,type:str] mac: ENC[AES256_GCM,data:IwEyBr/I7BJa0gWZ494dCT0ogyP2PbnUg5fLOn15vZAHIyYtTB3dI3gV5Lx7oPdqOPlI61MsShIYBnk0uBChpNu6O4oiGUfwvBfegzlDyHHERLx+S7nZpcwmf/3JoNXwq0f2OtOu8nA6Q1V4gVjFFNWUCAh5cq106vG1awsQkn0=,iv:j+JcVtKz2RfyWu55dUeJJTRK6prB9DGLvcjiAAdVySM=,tag:Pg5sKiLzYUFoN9Duu+nF0w==,type:str]

25
secrets/stinky.yaml Normal file
View File

@@ -0,0 +1,25 @@
kopia: ENC[AES256_GCM,data:boi8V0Kn,iv:Kwe1hn44DJe9dpv8jVrJjwyblVouakuCdnEK9uotTkY=,tag:B5hrpRBP17kFVn4iy5TOlA==,type:str]
sops:
age:
- recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA4aXRBWUg3QU12UWNBNXI4
RS91amdnOXhibXpiYjgrQ2FwR3J0VER1cEdjCnlaTXFSMzFPNmlnSTIvejJuMUFU
T2lUSDRQeFVtT1ZHb2xNZVNpWDJOTmsKLS0tIElHK3FVbUFSREMwcVN5V0tPSEtt
a2ZyNXFnZzBkeWZsU2docUxzQVMyUFkKkiEW3ovgVBLlBEHyx6hSXVp8PTeZ+2PL
kzW8AnQTi714iQqyN3NlkJ8r+1doGBr9U492KXpjdt1woY4MwMvWkA==
-----END AGE ENCRYPTED FILE-----
- recipient: age1me78u46409q9ez6fj0qanrfffc5e9kuq7n7uuvlljfwwc2mdaezqmyzxhx
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBCN0Fvdm5Mc3lqNDh6VEp5
Mk83N0dNU2pnWVFRODl5MllIaHdIUm5SclJJCjNPQVBGOGRIRlVVL1lzYU5ocjRx
Sis5VzZXN3AxYUpxNm9pZE5WUzZGaTQKLS0tIFlpL09vQmpDWjNJeXg5dUZZSm5O
S2ZOdExQdzJRcGdmUWtrRUpmVy9lY3cKJwEoO9WltW2FIFEylGuWBHwSJlnAIy8M
FFCmmApdkzJLwvQGg5kNC/4Xx34ZfNTTpePxh9qP0ASxUQASZo2urA==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2025-10-26T16:59:40Z"
mac: ENC[AES256_GCM,data:FlSv9PIcmX+oJNVaUpXIG2thzUvEb7bMGDOvIRgAFVzoUipIes0qdbU0R/pqogW0NpgbXNLhNBmemKfheGusngatJmbNwHT9Hqo7a82U9j1G302sziqrcz1pOxG79oacFEM+coWpXGgmMXYeNlQEihUvvvUt810VWBb3Hjba80g=,iv:6gSTUd2y9YxiOCzwQ/udLN46lgfwgWDgfSTOpaJpPmY=,tag:q/Ta6fejjKMg0TmZhNmy8Q==,type:str]
unencrypted_suffix: _unencrypted
version: 3.11.0

25
secrets/wifi.yaml Normal file
View File

@@ -0,0 +1,25 @@
wifi-password-pi: ENC[AES256_GCM,data:n5ZfyhBCrHx98uUxFQ==,iv:9SQHcIw252GeS0IxON3ThqOk02Wtlfu/Df6KMLAmokw=,tag:F5pAHlInkqVUQDg8MPnMgQ==,type:str]
sops:
age:
- recipient: age1df9ukkmg9yn9cjeheq9m6wspa420su8qarmq570rdvf2de3rl38saqauwn
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBUd1lyZG9GVHBZZHU0Wkl5
RFJ2NUdtUFRUbmd3aTRFV2dGaVA2S3RWOGk0CmlLV2ZYdERvb21iT0dlUk42TW5S
LzdxVlA1U1FpWkxIb1pMeUtRRm9NdFkKLS0tIGszaFM0dkhHeWZUcXc1dlo3SDBX
WjltV282VlJtTlBCRmdzOU16R0x5UUUKBTFArSUNWtq7r+HduxT0ChvYfjo8HtbG
KeYBoB9QwY5wNRMlk0AIlJVNLKW8A2tC9T8ehbtjol13H7PQK+wsQQ==
-----END AGE ENCRYPTED FILE-----
- recipient: age1me78u46409q9ez6fj0qanrfffc5e9kuq7n7uuvlljfwwc2mdaezqmyzxhx
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB4THVFa1p5c2l5V0pKckVC
YUdYbitJbUpjclAydG4yekxhbXdzeDNpbXdRCnRCZVI1cWJiQi9TdkR3Y0E5TklO
T2dHYXFKeW9KSkdXOWFnbWVRQUZOL28KLS0tIDVMVldvd0NWcU5QWkhDTTBmUTla
aUs0dTB3Y3RXTlBCOCtYSHdOMUYxdTgKQShxsJ+3EQU18uixmM3FlCe5C9Rl3oS5
gwZIrh0amSzX3f9SOjf42h1d+IDL/DMWAlSA/3XMx8TK9A1zKZDgVA==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2025-10-29T14:55:56Z"
mac: ENC[AES256_GCM,data:2zTEzx8UxOMHIytiufCHS/B1ci7kI05+SIE8ziMY8/ItoAYtt0zXXEgRWs0NLVb3P2vXMOhnXG4qO1o20UXt6Wqq9j1zXPVaQTQie4QSPDdX/8OXXi87Ggm3WQyeA1IfABacfL0D6XkNvxfMHGvMrnhYltPPgYDuNlgjnnjTm8o=,iv:FJMKLSlAenvSNUH6OmeGIR7f9Bzl3NwqaUaokoHEj50=,tag:WU4BnlLu5cKSbtiYL0mNKg==,type:str]
unencrypted_suffix: _unencrypted
version: 3.11.0

View File

@@ -27,7 +27,7 @@ job "adminer" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.adminer.entryPoints=websecure", "traefik.http.routers.adminer.entryPoints=websecure",
"traefik.http.routers.adminer.middlewares=authentik@file", "traefik.http.routers.adminer.middlewares=oidc-auth@file",
] ]
} }
} }

View File

@@ -1,118 +0,0 @@
job "authentik" {
datacenters = ["alo"]
group "auth" {
network {
port "http" {
# traefik forwardAuth hardcodes this port
static = 9000
}
port "https" {
to = 9443
}
port "metrics" {
to = 9300
}
}
task "server" {
driver = "docker"
config {
image = "ghcr.io/goauthentik/server:${var.authentik_version}"
ports = [
"http",
"https",
"metrics"
]
command = "server"
}
env {
AUTHENTIK_REDIS__HOST = "redis.service.consul"
AUTHENTIK_POSTGRESQL__HOST = "postgres.service.consul"
AUTHENTIK_POSTGRESQL__NAME = "${var.pg_db}"
AUTHENTIK_POSTGRESQL__USER = "${var.pg_user}"
AUTHENTIK_POSTGRESQL__PASSWORD = "${var.pg_password}"
AUTHENTIK_SECRET_KEY = "${var.secret_key}"
AUTHENTIK_EMAIL__HOST = "192.168.1.1"
AUTHENTIK_EMAIL__FROM = "authentik@paler.net"
}
resources {
cpu = 2000
memory = 1024
}
service {
name = "authentik"
port = "http"
tags = [
"traefik.enable=true",
# Main UI
"traefik.http.routers.authentik.entryPoints=websecure",
"traefik.http.routers.authentik.rule=Host(`authentik.v.paler.net`) || Host(`authentik.alo.land`)",
# Embedded outpost for forward auth
"traefik.http.routers.authentik-palernet.entryPoints=websecure",
"traefik.http.routers.authentik-palernet.rule=HostRegexp(`{subdomain:[a-z0-9-]+}.v.paler.net`) && PathPrefix(`/outpost.goauthentik.io/`)",
"traefik.http.routers.authentik-aloland.entryPoints=websecure",
"traefik.http.routers.authentik-aloland.rule=HostRegexp(`{subdomain:[a-z0-9-]+}.alo.land`) && PathPrefix(`/outpost.goauthentik.io/`)",
]
}
service {
name = "authentik-metrics"
port = "metrics"
tags = [ "metrics" ]
}
}
task "worker" {
driver = "docker"
config {
image = "ghcr.io/goauthentik/server:${var.authentik_version}"
command = "worker"
}
env {
AUTHENTIK_REDIS__HOST = "redis.service.consul"
AUTHENTIK_POSTGRESQL__HOST = "postgres.service.consul"
AUTHENTIK_POSTGRESQL__NAME = "${var.pg_db}"
AUTHENTIK_POSTGRESQL__USER = "${var.pg_user}"
AUTHENTIK_POSTGRESQL__PASSWORD = "${var.pg_password}"
AUTHENTIK_SECRET_KEY = "${var.secret_key}"
AUTHENTIK_EMAIL__HOST = "192.168.1.1"
AUTHENTIK_EMAIL__FROM = "authentik@paler.net"
}
resources {
memory = 600
}
}
}
}
variable "pg_user" {
type = string
default = "authentik"
}
variable "pg_password" {
type = string
default = "aQueiquuo6aiyah5eoch"
}
variable "pg_db" {
type = string
default = "authentik"
}
variable "secret_key" {
type = string
default = "uUzCYhGV93Z8wKLAScuGFqBskxyzSfG4cz6bnXq6McM67Ho7p9"
}
variable "authentik_version" {
type = string
default = "2025.6"
}

View File

@@ -37,7 +37,7 @@ job "beancount" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.finances.entryPoints=websecure", "traefik.http.routers.finances.entryPoints=websecure",
"traefik.http.routers.finances.middlewares=authentik@file", "traefik.http.routers.finances.middlewares=oidc-auth@file",
] ]
} }

View File

@@ -1,120 +0,0 @@
job "code-server" {
datacenters = ["alo"]
meta {
uuid = uuidv4()
}
group "code" {
network {
port "http" {
to = 8080
}
}
volume "appdata" {
type = "host"
read_only = false
source = "appdata"
}
volume "nix-store" {
type = "host"
read_only = true
source = "nix-store"
}
volume "sw" {
type = "host"
read_only = true
source = "sw"
}
task "server" {
driver = "docker"
config {
image = "codercom/code-server:latest"
ports = ["http"]
volumes = [
"/data/services/code:/home/coder",
]
}
env {
ANTHROPIC_API_KEY = "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"
}
user = "1000"
resources {
cpu = 1000
memory = 1024
}
service {
name = "code"
port = "http"
tags = [
"traefik.enable=true",
"traefik.http.routers.code.entryPoints=websecure",
"traefik.http.routers.code.middlewares=authentik@file",
]
}
volume_mount {
volume = "nix-store"
destination = "/nix/store"
}
volume_mount {
volume = "sw"
destination = "/sw"
}
}
task "setup" {
driver = "exec"
lifecycle {
hook = "prestart"
sidecar = false
}
config {
command = "sh"
args = ["-c", <<EOF
# Set up npm global directory
export NPM_CONFIG_PREFIX=/appdata/code/.npm-global
mkdir -p $NPM_CONFIG_PREFIX
# Check if we should update (weekly check)
WEEK_AGO=$(date -d '7 days ago' +%s 2>/dev/null || date -v-7d +%s 2>/dev/null || echo 0)
LAST_UPDATE=$(stat -c %Y /appdata/code/.claude-last-update 2>/dev/null || echo 0)
if [ ! -f /appdata/code/.claude-installed ] || [ $LAST_UPDATE -lt $WEEK_AGO ]; then
echo "Installing/updating Claude Code..."
/sw/bin/npm install -g @anthropic-ai/claude-code --prefix=$NPM_CONFIG_PREFIX
touch /appdata/code/.claude-installed
touch /appdata/code/.claude-last-update
else
echo "Claude Code is up to date (checked within last week)"
fi
EOF
]
}
user = "1000"
volume_mount {
volume = "appdata"
destination = "/appdata"
}
volume_mount {
volume = "nix-store"
destination = "/nix/store"
}
volume_mount {
volume = "sw"
destination = "/sw"
}
}
}
}

View File

@@ -49,7 +49,7 @@ job "evcc" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.evcc.entryPoints=websecure", "traefik.http.routers.evcc.entryPoints=websecure",
"traefik.http.routers.evcc.middlewares=authentik@file", "traefik.http.routers.evcc.middlewares=oidc-auth@file",
] ]
} }
} }

View File

@@ -51,6 +51,7 @@ job "gitea" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.gitea.entryPoints=websecure", "traefik.http.routers.gitea.entryPoints=websecure",
"traefik.http.services.gitea.loadBalancer.serversTransport=gitea-transport@file",
] ]
} }

View File

@@ -25,19 +25,22 @@ job "grafana" {
GF_SERVER_ROOT_URL = "https://grafana.v.paler.net" GF_SERVER_ROOT_URL = "https://grafana.v.paler.net"
GF_AUTH_BASIC_ENABLED = "false" GF_AUTH_BASIC_ENABLED = "false"
GF_AUTH_GENERIC_OAUTH_ENABLED = "true" GF_AUTH_GENERIC_OAUTH_ENABLED = "true"
GF_AUTH_GENERIC_OAUTH_NAME = "authentik" GF_AUTH_GENERIC_OAUTH_NAME = "Pocket ID"
GF_AUTH_GENERIC_OAUTH_CLIENT_ID = "E78NG1AZeW6FaAox0mUhaTSrHeqFgNkWG12My2zx" GF_AUTH_GENERIC_OAUTH_CLIENT_ID = "99e44cf2-ecc6-4e82-8882-129c017f8a4a"
GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET = "N7u2RfFZ5KVLdEkhlpUTzymGxeK5rLo9SYZLSGGBXJDr46p5g5uv1qZ4Jm2d1rP4aJX4PSzauZlxHhkG2byiBFMbdo6K742KXcEimZsOBFiNKeWOHxofYerBnPuoECQW" GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET = "NjJ9Uro4MK7siqLGSmkiQmjFuESulqQN"
GF_AUTH_GENERIC_OAUTH_SCOPES = "openid profile email offline_access" GF_AUTH_GENERIC_OAUTH_SCOPES = "openid profile email groups"
GF_AUTH_GENERIC_OAUTH_AUTH_URL = "https://authentik.v.paler.net/application/o/authorize/" GF_AUTH_GENERIC_OAUTH_AUTH_URL = "https://pocket-id.v.paler.net/authorize"
GF_AUTH_GENERIC_OAUTH_TOKEN_URL = "https://authentik.v.paler.net/application/o/token/" GF_AUTH_GENERIC_OAUTH_TOKEN_URL = "https://pocket-id.v.paler.net/api/oidc/token"
GF_AUTH_GENERIC_OAUTH_API_URL = "https://authentik.v.paler.net/application/o/userinfo/" GF_AUTH_GENERIC_OAUTH_API_URL = "https://pocket-id.v.paler.net/api/oidc/userinfo"
GF_AUTH_SIGNOUT_REDIRECT_URL = "https://authentik.v.paler.net/application/o/grafana/end-session/" GF_AUTH_SIGNOUT_REDIRECT_URL = "https://pocket-id.v.paler.net/logout"
# Optionally enable auto-login (bypasses Grafana login screen) # Optionally enable auto-login (bypasses Grafana login screen)
GF_AUTH_OAUTH_AUTO_LOGIN = "true" GF_AUTH_OAUTH_AUTO_LOGIN = "true"
# Optionally map user groups to Grafana roles # Optionally map user groups to Grafana roles
GF_AUTH_GENERIC_OAUTH_ROLE_ATTRIBUTE_PATH = "contains(groups[*], 'Grafana Admins') && 'Admin' || contains(groups[*], 'Grafana Editors') && 'Editor' || 'Viewer'" GF_AUTH_GENERIC_OAUTH_ROLE_ATTRIBUTE_PATH = "contains(groups[*], 'admins') && 'Admin' || contains(groups[*], 'residents') && 'Editor' || 'Viewer'"
GF_AUTH_GENERIC_OAUTH_USE_REFRESH_TOKEN = "true" GF_AUTH_GENERIC_OAUTH_USE_REFRESH_TOKEN = "true"
GF_AUTH_GENERIC_OAUTH_EMAIL_ATTRIBUTE_PATH = "email"
GF_AUTH_GENERIC_OAUTH_LOGIN_ATTRIBUTE_PATH = "preferred_username"
GF_AUTH_GENERIC_OAUTH_NAME_ATTRIBUTE_PATH = "name"
#GF_LOG_LEVEL = "debug" #GF_LOG_LEVEL = "debug"
} }

50
services/homepage.hcl Normal file
View File

@@ -0,0 +1,50 @@
job "homepage" {
datacenters = ["alo"]
group "app" {
network {
port "http" { to = 3000 }
}
task "homepage" {
driver = "docker"
config {
image = "ghcr.io/gethomepage/homepage:latest"
ports = [ "http" ]
volumes = [
"/data/services/homepage:/app/config",
]
}
env {
PUID = 1000
PGID = 1000
HOMEPAGE_ALLOWED_HOSTS = "homepage.v.paler.net"
}
resources {
cpu = 200
memory = 256
}
service {
name = "homepage"
port = "http"
tags = [
"traefik.enable=true",
"traefik.http.routers.homepage.entryPoints=websecure",
"traefik.http.routers.homepage.middlewares=oidc-auth@file",
]
check {
type = "http"
path = "/"
interval = "10s"
timeout = "5s"
}
}
}
}
}

View File

@@ -38,7 +38,7 @@ job "jupyter" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.jupyter.entryPoints=websecure", "traefik.http.routers.jupyter.entryPoints=websecure",
"traefik.http.routers.jupyter.middlewares=authentik@file", "traefik.http.routers.jupyter.middlewares=oidc-auth@file",
] ]
} }
} }

View File

@@ -126,7 +126,7 @@ EOH
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.loki.entryPoints=websecure", "traefik.http.routers.loki.entryPoints=websecure",
"traefik.http.routers.loki.middlewares=authentik@file", "traefik.http.routers.loki.middlewares=oidc-auth@file",
"metrics", "metrics",
] ]
} }

View File

@@ -65,7 +65,7 @@ job "maps" {
to = 80 to = 80
} }
port "php" { port "php" {
static = 9001 static = 9000
} }
} }

View File

@@ -7,9 +7,12 @@ job "media" {
group "servers" { group "servers" {
network { network {
port "radarr" { to = 7878 } port "radarr" { static = 7878 }
port "sonarr" { to = 8989 } port "sonarr" { static = 8989 }
port "bazarr" { to = 6767 } port "bazarr" { to = 6767 }
port "prowlarr" { static = 9696 }
port "jellyseerr" { static = 5055 }
port "flaresolverr" { static = 8191 }
port "pms" { static = 32400 } port "pms" { static = 32400 }
port "qbt_ui" { static = 8080 } port "qbt_ui" { static = 8080 }
port "qbt_torrent" { static = 51413 } port "qbt_torrent" { static = 51413 }
@@ -34,7 +37,7 @@ job "media" {
} }
resources { resources {
cpu = 200 cpu = 1000
} }
service { service {
@@ -44,7 +47,7 @@ job "media" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.radarr.entryPoints=websecure", "traefik.http.routers.radarr.entryPoints=websecure",
"traefik.http.routers.radarr.middlewares=authentik@file", "traefik.http.routers.radarr.middlewares=oidc-auth@file",
] ]
} }
} }
@@ -68,7 +71,8 @@ job "media" {
} }
resources { resources {
cpu = 200 cpu = 1000
memory = 500
} }
service { service {
@@ -78,20 +82,54 @@ job "media" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.sonarr.entryPoints=websecure", "traefik.http.routers.sonarr.entryPoints=websecure",
"traefik.http.routers.sonarr.middlewares=authentik@file", "traefik.http.routers.sonarr.middlewares=oidc-auth@file",
] ]
} }
} }
task "bazarr" { # task "bazarr" {
# driver = "docker"
#
# config {
# image = "ghcr.io/hotio/bazarr:latest"
# ports = [ "bazarr" ]
# volumes = [
# "/data/services/media/bazarr:/config",
# "/data/media/media:/data/media",
# ]
# }
#
# env {
# PUID = 1000
# PGID = 1000
# TZ = "Europe/Lisbon"
# }
#
# resources {
# cpu = 200
# memory = 500
# }
#
# service {
# name = "bazarr"
# port = "bazarr"
#
# tags = [
# "traefik.enable=true",
# "traefik.http.routers.bazarr.entryPoints=websecure",
# "traefik.http.routers.bazarr.middlewares=oidc-auth@file",
# ]
# }
# }
task "prowlarr" {
driver = "docker" driver = "docker"
config { config {
image = "ghcr.io/hotio/bazarr:latest" image = "ghcr.io/hotio/prowlarr:latest"
ports = [ "bazarr" ] ports = [ "prowlarr" ]
volumes = [ volumes = [
"/data/services/media/bazarr:/config", "/data/services/media/prowlarr:/config",
"/data/media/media:/data/media",
] ]
} }
@@ -106,17 +144,93 @@ job "media" {
} }
service { service {
name = "bazarr" name = "prowlarr"
port = "bazarr" port = "prowlarr"
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.bazarr.entryPoints=websecure", "traefik.http.routers.prowlarr.entryPoints=websecure",
"traefik.http.routers.bazarr.middlewares=authentik@file", "traefik.http.routers.prowlarr.middlewares=oidc-auth@file",
] ]
} }
} }
task "jellyseerr" {
driver = "docker"
config {
image = "fallenbagel/jellyseerr:latest"
ports = [ "jellyseerr" ]
volumes = [
"/data/services/media/jellyseerr:/app/config",
]
}
env {
TZ = "Europe/Lisbon"
}
resources {
cpu = 200
}
service {
name = "jellyseerr"
port = "jellyseerr"
tags = [
"traefik.enable=true",
"traefik.http.routers.jellyseerr.entryPoints=websecure",
"traefik.http.routers.jellyseerr.middlewares=oidc-auth@file",
]
}
}
task "flaresolverr" {
driver = "docker"
config {
image = "ghcr.io/flaresolverr/flaresolverr:latest"
ports = [ "flaresolverr" ]
}
env {
LOG_LEVEL = "info"
TZ = "Europe/Lisbon"
}
resources {
cpu = 500
memory = 1024
}
service {
name = "flaresolverr"
port = "flaresolverr"
}
}
task "recyclarr" {
driver = "docker"
config {
image = "ghcr.io/recyclarr/recyclarr:latest"
volumes = [
"/data/services/media/recyclarr:/config",
]
}
env {
TZ = "Europe/Lisbon"
CRON_SCHEDULE = "0 0 * * *" # Daily at midnight
}
resources {
cpu = 100
memory = 256
}
}
task "plex" { task "plex" {
driver = "docker" driver = "docker"
@@ -138,7 +252,7 @@ job "media" {
resources { resources {
cpu = 2000 cpu = 2000
memory = 1000 memory = 2000
} }
service { service {
@@ -148,7 +262,7 @@ job "media" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.plex.entryPoints=websecure", "traefik.http.routers.plex.entryPoints=websecure",
"traefik.http.routers.plex.middlewares=authentik@file", "traefik.http.routers.plex.middlewares=oidc-auth@file",
] ]
} }
} }
@@ -177,7 +291,7 @@ job "media" {
resources { resources {
cpu = 2000 cpu = 2000
memory = 1000 memory = 1500
} }
service { service {
@@ -187,7 +301,7 @@ job "media" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.torrent.entryPoints=websecure", "traefik.http.routers.torrent.entryPoints=websecure",
"traefik.http.routers.torrent.middlewares=authentik@file", "traefik.http.routers.torrent.middlewares=oidc-auth@file",
] ]
} }
} }

View File

@@ -39,10 +39,10 @@ job "netbox" {
REMOTE_AUTH_ENABLED = "true" REMOTE_AUTH_ENABLED = "true"
REMOTE_AUTH_BACKEND = "social_core.backends.open_id_connect.OpenIdConnectAuth" REMOTE_AUTH_BACKEND = "social_core.backends.open_id_connect.OpenIdConnectAuth"
SOCIAL_AUTH_OIDC_ENDPOINT = "https://authentik.v.paler.net/application/o/netbox/" SOCIAL_AUTH_OIDC_ENDPOINT = "https://pocket-id.v.paler.net/"
SOCIAL_AUTH_OIDC_KEY = "XiPhZmWy2mp8hQyHLXCwk7njRNPSLTp2vSHhvWYI" SOCIAL_AUTH_OIDC_KEY = "6ce1f1bb-d5e8-4ba5-b136-2643dc8bcbcf"
SOCIAL_AUTH_OIDC_SECRET = "Kkop2dStx0gN52V1LfPnoxcaemuur6zMsvRnqpWSDe2qSngJVcqWfvFXaNeTbdURRB6TPwjlaNJ5BXR2ChcSmokWGTGargu84Ox1D6M2zXTsfLFj9B149Mhblos4mJL1" SOCIAL_AUTH_OIDC_SECRET = "Af7sJvCn9BuijoJXrB5aWv6fTmEqLCAf"
LOGOUT_REDIRECT_URL = "https://authentik.v.paler.net/application/o/netbox/end-session/" LOGOUT_REDIRECT_URL = "https://pocket-id.v.paler.net/logout"
} }
resources { resources {

39
services/ollama.hcl Normal file
View File

@@ -0,0 +1,39 @@
job "ollama" {
datacenters = ["alo"]
type = "service"
group "ollama" {
network {
port "http" {
static = 11434
}
}
task "server" {
driver = "docker"
config {
image = "ollama/ollama:latest"
ports = ["http"]
volumes = ["/data/services/ollama:/root/.ollama"]
}
service {
name = "ollama"
port = "http"
check {
type = "http"
path = "/"
interval = "30s"
timeout = "5s"
}
}
resources {
cpu = 8000
memory = 2048
}
}
}
}

51
services/pocket-id.hcl Normal file
View File

@@ -0,0 +1,51 @@
job "pocket-id" {
datacenters = ["alo"]
group "app" {
network {
port "http" {
to = 1411
}
}
task "server" {
driver = "docker"
config {
image = "ghcr.io/pocket-id/pocket-id:v1"
ports = ["http"]
volumes = [
"/data/services/pocket-id:/app/data",
]
}
env {
APP_URL = "https://pocket-id.v.paler.net"
TRUST_PROXY = "true"
MAXMIND_LICENSE_KEY = "${var.maxmind_license_key}"
PUID = "1000"
PGID = "1000"
}
resources {
cpu = 500
memory = 512
}
service {
name = "pocket-id"
port = "http"
tags = [
"traefik.enable=true",
"traefik.http.routers.pocket-id.entryPoints=websecure",
]
}
}
}
}
variable "maxmind_license_key" {
type = string
default = "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"
}

View File

@@ -91,15 +91,15 @@ job "postgres" {
PGADMIN_CONFIG_OAUTH2_AUTO_CREATE_USER = "True" PGADMIN_CONFIG_OAUTH2_AUTO_CREATE_USER = "True"
PGADMIN_CONFIG_OAUTH2_CONFIG = <<EOH PGADMIN_CONFIG_OAUTH2_CONFIG = <<EOH
[{ [{
'OAUTH2_NAME' : 'authentik', 'OAUTH2_NAME' : 'pocket-id',
'OAUTH2_DISPLAY_NAME' : 'SSO', 'OAUTH2_DISPLAY_NAME' : 'SSO',
'OAUTH2_CLIENT_ID' : 'o4p3B03ayTQ2kpwmM7GswbcfO78JHCTdoZqKJEut', 'OAUTH2_CLIENT_ID' : '180133da-1bd7-4cde-9c18-2f277e962dab',
'OAUTH2_CLIENT_SECRET' : '7UYHONOCVdjpRMK9Ojwds0qPPpxCiztbIRhK7FJ2IFBpUgN6tnmpEjlkPYimiGKfaHLhy4XE7kQm7Et1Jm0hgyia0iB1VIlp623ckppbwkM6IfpTE1LfEmTMtPrxSngx', 'OAUTH2_CLIENT_SECRET' : 'ELYNAfiWSGYJQUXUDOdpm7tTtyLbrs4E',
'OAUTH2_TOKEN_URL' : 'https://authentik.v.paler.net/application/o/token/', 'OAUTH2_TOKEN_URL' : 'https://pocket-id.v.paler.net/api/oidc/token',
'OAUTH2_AUTHORIZATION_URL' : 'https://authentik.v.paler.net/application/o/authorize/', 'OAUTH2_AUTHORIZATION_URL' : 'https://pocket-id.v.paler.net/authorize',
'OAUTH2_API_BASE_URL' : 'https://authentik.v.paler.net/', 'OAUTH2_API_BASE_URL' : 'https://pocket-id.v.paler.net/',
'OAUTH2_USERINFO_ENDPOINT' : 'https://authentik.v.paler.net/application/o/userinfo/', 'OAUTH2_USERINFO_ENDPOINT' : 'https://pocket-id.v.paler.net/api/oidc/userinfo',
'OAUTH2_SERVER_METADATA_URL' : 'https://authentik.v.paler.net/application/o/pgadmin/.well-known/openid-configuration', 'OAUTH2_SERVER_METADATA_URL' : 'https://pocket-id.v.paler.net/.well-known/openid-configuration',
'OAUTH2_SCOPE' : 'openid email profile', 'OAUTH2_SCOPE' : 'openid email profile',
'OAUTH2_ICON' : 'fa-database', 'OAUTH2_ICON' : 'fa-database',
'OAUTH2_BUTTON_COLOR' : '#00ff00' 'OAUTH2_BUTTON_COLOR' : '#00ff00'

View File

@@ -54,7 +54,7 @@ job "prometheus" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.prometheus.entryPoints=websecure", "traefik.http.routers.prometheus.entryPoints=websecure",
"traefik.http.routers.prometheus.middlewares=authentik@file", "traefik.http.routers.prometheus.middlewares=oidc-auth@file",
] ]
check { check {

View File

@@ -0,0 +1,82 @@
job "tiddlywiki-mcp" {
datacenters = ["alo"]
group "captainslog" {
network {
port "http" {
static = 3500
}
}
volume "services" {
type = "host"
source = "services"
read_only = false
}
volume "nix-store" {
type = "host"
source = "nix-store"
read_only = true
}
volume "sw" {
type = "host"
source = "sw"
read_only = true
}
task "mcp-server" {
driver = "exec"
config {
command = "/sw/bin/node"
args = ["/data/services/tiddlywiki-mcp/dist/index.js"]
}
env {
MCP_TRANSPORT = "http"
MCP_PORT = "${NOMAD_PORT_http}"
CONSUL_SERVICE = "captainslog.service.consul"
AUTH_HEADER = "X-Oidc-Username"
AUTH_USER = "claude-code"
}
volume_mount {
volume = "services"
destination = "/data/services"
read_only = false
}
volume_mount {
volume = "nix-store"
destination = "/nix/store"
read_only = true
}
volume_mount {
volume = "sw"
destination = "/sw"
read_only = true
}
service {
name = "tiddlywiki-mcp-captainslog"
port = "http"
check {
type = "http"
path = "/health"
interval = "10s"
timeout = "2s"
}
}
resources {
memory = 256
}
user = "ppetru"
}
}
}

View File

@@ -34,7 +34,7 @@ job "traefik" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.api.entryPoints=websecure", "traefik.http.routers.api.entryPoints=websecure",
"traefik.http.routers.api.middlewares=authentik@file", "traefik.http.routers.api.middlewares=oidc-auth@file",
"traefik.http.routers.api.rule=Host(`traefik.v.paler.net`)", "traefik.http.routers.api.rule=Host(`traefik.v.paler.net`)",
"traefik.http.routers.api.service=api@internal", "traefik.http.routers.api.service=api@internal",
] ]
@@ -63,6 +63,7 @@ job "traefik" {
volumes = [ volumes = [
"local/traefik.yml:/etc/traefik/traefik.yml", "local/traefik.yml:/etc/traefik/traefik.yml",
"/data/services/traefik:/config", "/data/services/traefik:/config",
"/data/services/traefik/plugins-storage:/plugins-storage",
] ]
} }
@@ -75,6 +76,12 @@ global:
#log: #log:
# level: debug # level: debug
experimental:
plugins:
traefik-oidc-auth:
moduleName: "github.com/sevensolutions/traefik-oidc-auth"
version: "v0.16.0"
api: api:
dashboard: true dashboard: true
@@ -145,7 +152,7 @@ EOH
} }
resources { resources {
cpu = 100 cpu = 200
memory = 512 memory = 512
} }
} }

View File

@@ -69,7 +69,7 @@ job "unifi" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.unifi.entryPoints=websecure", "traefik.http.routers.unifi.entryPoints=websecure",
"traefik.http.routers.unifi.middlewares=authentik@file", "traefik.http.routers.unifi.middlewares=oidc-auth@file",
"traefik.http.services.unifi.loadbalancer.server.scheme=https", "traefik.http.services.unifi.loadbalancer.server.scheme=https",
] ]
} }

View File

@@ -39,7 +39,7 @@ job "urbit" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.urbit.entryPoints=websecure", "traefik.http.routers.urbit.entryPoints=websecure",
"traefik.http.routers.urbit.middlewares=authentik@file", "traefik.http.routers.urbit.middlewares=oidc-auth@file",
] ]
} }

View File

@@ -73,7 +73,7 @@ EOH
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.webodm.entryPoints=websecure", "traefik.http.routers.webodm.entryPoints=websecure",
"traefik.http.routers.webodm.middlewares=authentik@file", "traefik.http.routers.webodm.middlewares=oidc-auth@file",
] ]
} }
} }
@@ -97,7 +97,7 @@ EOH
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.clusterodm.entryPoints=websecure", "traefik.http.routers.clusterodm.entryPoints=websecure",
"traefik.http.routers.clusterodm.middlewares=authentik@file", "traefik.http.routers.clusterodm.middlewares=oidc-auth@file",
] ]
} }

View File

@@ -22,7 +22,7 @@ job "whoami" {
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.whoami.rule=Host(`test.alo.land`)", "traefik.http.routers.whoami.rule=Host(`test.alo.land`)",
"traefik.http.routers.whoami.entryPoints=websecure", "traefik.http.routers.whoami.entryPoints=websecure",
"traefik.http.routers.whoami.middlewares=authentik@file", "traefik.http.routers.whoami.middlewares=oidc-auth@file",
] ]
} }
} }

View File

@@ -36,9 +36,9 @@ job "wiki" {
"--listen", "--listen",
"host=0.0.0.0", "host=0.0.0.0",
"port=${NOMAD_PORT_captainslog}", "port=${NOMAD_PORT_captainslog}",
"authenticated-user-header=X-authentik-username", "authenticated-user-header=X-Oidc-Username",
"readers=ppetru", "readers=ppetru,claude-code",
"writers=ppetru", "writers=ppetru,claude-code",
"admin=ppetru", "admin=ppetru",
] ]
} }
@@ -64,7 +64,7 @@ job "wiki" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.captainslog.entryPoints=websecure", "traefik.http.routers.captainslog.entryPoints=websecure",
"traefik.http.routers.captainslog.middlewares=authentik@file", "traefik.http.routers.captainslog.middlewares=oidc-auth@file",
] ]
} }
@@ -85,7 +85,7 @@ job "wiki" {
"--listen", "--listen",
"host=0.0.0.0", "host=0.0.0.0",
"port=${NOMAD_PORT_alo}", "port=${NOMAD_PORT_alo}",
"authenticated-user-header=X-authentik-username", "authenticated-user-header=X-Oidc-Username",
"readers=ppetru,ines", "readers=ppetru,ines",
"writers=ppetru,ines", "writers=ppetru,ines",
"admin=ppetru", "admin=ppetru",
@@ -112,7 +112,7 @@ job "wiki" {
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.alowiki.rule=Host(`wiki.alo.land`)", "traefik.http.routers.alowiki.rule=Host(`wiki.alo.land`)",
"traefik.http.routers.alowiki.entryPoints=websecure", "traefik.http.routers.alowiki.entryPoints=websecure",
"traefik.http.routers.alowiki.middlewares=authentik@file", "traefik.http.routers.alowiki.middlewares=oidc-auth@file",
] ]
} }
@@ -133,7 +133,7 @@ job "wiki" {
"--listen", "--listen",
"host=0.0.0.0", "host=0.0.0.0",
"port=${NOMAD_PORT_pispace}", "port=${NOMAD_PORT_pispace}",
"authenticated-user-header=X-authentik-username", "authenticated-user-header=X-Oidc-Username",
"readers=ppetru,ines", "readers=ppetru,ines",
"writers=ppetru,ines", "writers=ppetru,ines",
"admin=ppetru", "admin=ppetru",
@@ -160,7 +160,7 @@ job "wiki" {
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.pispace.rule=Host(`pi.paler.net`)", "traefik.http.routers.pispace.rule=Host(`pi.paler.net`)",
"traefik.http.routers.pispace.entryPoints=websecure", "traefik.http.routers.pispace.entryPoints=websecure",
"traefik.http.routers.pispace.middlewares=authentik@file", "traefik.http.routers.pispace.middlewares=oidc-auth@file",
] ]
} }
@@ -181,7 +181,7 @@ job "wiki" {
"--listen", "--listen",
"host=0.0.0.0", "host=0.0.0.0",
"port=${NOMAD_PORT_grok}", "port=${NOMAD_PORT_grok}",
"authenticated-user-header=X-authentik-username", "authenticated-user-header=X-Oidc-Username",
"readers=ppetru", "readers=ppetru",
"writers=ppetru", "writers=ppetru",
"admin=ppetru", "admin=ppetru",
@@ -207,7 +207,7 @@ job "wiki" {
tags = [ tags = [
"traefik.enable=true", "traefik.enable=true",
"traefik.http.routers.groktw.entryPoints=websecure", "traefik.http.routers.groktw.entryPoints=websecure",
"traefik.http.routers.groktw.middlewares=authentik@file", "traefik.http.routers.groktw.middlewares=oidc-auth@file",
] ]
} }

View File

@@ -60,7 +60,7 @@ job "wordpress" {
} }
resources { resources {
cpu = 4000 cpu = 2000
memory = 1024 memory = 1024
} }