19 Commits

Author SHA1 Message Date
ahtlon
52824e39ee with nix flake check the hydraJobs output is evaluated in the same way as Hydra's hydra-eval-jobs
All checks were successful
Check flake syntax / flake-check (push) Successful in 13m21s
2025-01-18 23:41:53 +01:00
ahtlon
8793120436 Only run on push 2025-01-18 23:40:11 +01:00
ahtlon
950ada1e10 [actions] Add flake check
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m33s
Check flake syntax / flake-check (push) Successful in 7m30s
2025-01-18 22:24:21 +01:00
ahtlon
1e269966ff Merge branch 'fix-flake-check'
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 5m54s
nix flake check and show now work again
2025-01-18 22:02:19 +01:00
ahtlon
3861daaf76 [modules] move microvm module import from makeMicroVM to baseModules 2025-01-18 22:01:06 +01:00
ahtlon
3a332e77d1 [scripts] move packages to legacyPackages 2025-01-18 21:45:48 +01:00
ahtlon
79c311b45d Merge branch 'issue51'
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 6m3s
Fixes #51
2025-01-18 20:41:06 +01:00
ahtlon
850070f987 [scripts] check for flake.nix
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m15s
2025-01-18 20:39:16 +01:00
ahtlon
d242562544 [packages] make scripts available in shell without nix run
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m7s
2025-01-18 20:04:22 +01:00
d8d910f5fd [uptimekuma] mv from fanny to hetzner server
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m7s
after thinking about it it makes no sense to have status/alerting
running on fanny. as soon as fanny fails we wont get any alerts anymore.
thats why i think having it running on the hetzner server, which is
quite stable, makes sense
2025-01-17 14:19:38 +01:00
a4f6b77e30 [fanny] deploy uptimekuma
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m21s
2025-01-17 14:00:41 +01:00
6aa6f2e171 [uptimekuma] set redirects 2025-01-17 13:59:54 +01:00
d9bb933891 [uptimekuma] init 2025-01-17 13:59:35 +01:00
168d45ed8a [vpn] set mtu 1340
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 5m47s
2025-01-17 00:29:11 +01:00
2f477d3566 [fanny] undo proxy settings
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m18s
2025-01-17 00:19:23 +01:00
b40cb40b01 [fanny] try fix incomplete file transfer
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m5s
2025-01-16 19:30:49 +01:00
b15b2ae789 [fanny] disable proxy_buffer
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m2s
2025-01-16 16:36:38 +01:00
c7b02b9366 [vpn] disable proxy_buffer
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 4m5s
url http://10.100.0.101:80/css/variables.css only returns half the file
hopefully this fixes it
2025-01-16 16:26:23 +01:00
c78eb9cbc1 [fanny][vpn] open port 80, enable nginx
All checks were successful
Evaluate Hydra Jobs / eval-hydra-jobs (push) Successful in 5m49s
2025-01-16 14:24:19 +01:00
11 changed files with 146 additions and 24 deletions

View File

@@ -1,9 +1,8 @@
name: "Evaluate Hydra Jobs" name: "Check flake syntax"
on: on:
pull_request:
push: push:
jobs: jobs:
eval-hydra-jobs: flake-check:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@@ -11,5 +10,5 @@ jobs:
run: | run: |
apt update -y apt update -y
apt install sudo -y apt install sudo -y
- uses: cachix/install-nix-action@v27 - uses: cachix/install-nix-action@v30
- run: nix eval --no-update-lock-file --accept-flake-config .\#hydraJobs - run: nix flake check --no-update-lock-file --accept-flake-config .

View File

@@ -3,6 +3,7 @@
, nixpkgs , nixpkgs
, sops-nix , sops-nix
, inputs , inputs
, microvm
, nixos-hardware , nixos-hardware
, home-manager , home-manager
, ... , ...
@@ -34,15 +35,14 @@ let
}; };
}; };
}) })
sops-nix.nixosModules.sops sops-nix.nixosModules.sops
microvm.nixosModules.microvm
]; ];
} }
]; ];
defaultModules = baseModules; defaultModules = baseModules;
makeMicroVM = hostName: ipv4Addr: macAddr: modules: [ makeMicroVM = hostName: ipv4Addr: macAddr: modules: [
inputs.microvm.nixosModules.microvm
{ {
microvm = { microvm = {
hypervisor = "cloud-hypervisor"; hypervisor = "cloud-hypervisor";
@@ -114,6 +114,15 @@ in
]; ];
}; };
lucia = nixosSystem {
system = "aarch64-linux";
specialArgs.inputs = inputs;
modules = defaultModules ++ [
./lucia/configuration.nix
./lucia/hardware_configuration.nix
];
};
fanny = nixosSystem { fanny = nixosSystem {
system = "x86_64-linux"; system = "x86_64-linux";
specialArgs.inputs = inputsMod; specialArgs.inputs = inputsMod;
@@ -152,12 +161,12 @@ in
]; ];
}; };
lucia = nixosSystem { uptimekuma = nixosSystem {
system = "aarch64-linux"; system = "x86_64-linux";
specialArgs.inputs = inputs; specialArgs.inputs = inputs;
modules = defaultModules ++ [ specialArgs.self = self;
./lucia/configuration.nix modules = makeMicroVM "uptimekuma" "10.0.0.12" "D0:E5:CA:F0:D7:E8" [
./lucia/hardware_configuration.nix ./uptimekuma/configuration.nix
]; ];
}; };

View File

@@ -8,6 +8,15 @@
{ addr = "0.0.0.0"; port = 9000; } { addr = "0.0.0.0"; port = 9000; }
]; ];
root = "${self.packages.x86_64-linux.docs}/share/doc"; root = "${self.packages.x86_64-linux.docs}/share/doc";
extraConfig = ''
proxy_buffering off;
proxy_cache off;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
'';
}; };
}; };

View File

@@ -36,7 +36,21 @@ in
services.nginx.virtualHosts."docs.malobeo.org" = { services.nginx.virtualHosts."docs.malobeo.org" = {
forceSSL = true; forceSSL = true;
enableACME= true; enableACME= true;
locations."/".proxyPass = "http://10.0.0.10"; locations."/" = {
proxyPass = "http://10.0.0.10";
extraConfig = ''
'';
};
};
services.nginx.virtualHosts."status.malobeo.org" = {
forceSSL = true;
enableACME= true;
locations."/" = {
proxyPass = "http://10.0.0.12";
extraConfig = ''
'';
};
}; };
services.nginx.virtualHosts."tasklist.malobeo.org" = { services.nginx.virtualHosts."tasklist.malobeo.org" = {

View File

@@ -55,9 +55,21 @@ in
services.malobeo.microvm.enableHostBridge = true; services.malobeo.microvm.enableHostBridge = true;
services.malobeo.microvm.deployHosts = [ "infradocs" ]; services.malobeo.microvm.deployHosts = [ "infradocs" ];
networking = {
firewall = {
allowedTCPPorts = [ 80 ];
};
};
services.nginx.virtualHosts."docs.malobeo.org" = { services.nginx = {
locations."/".proxyPass = "http://10.0.0.11:9000"; enable = true;
virtualHosts."docs.malobeo.org" = {
locations."/" = {
proxyPass = "http://10.0.0.11:9000";
extraConfig = ''
'';
};
};
}; };
services.tor = { services.tor = {

View File

@@ -69,6 +69,7 @@ in
networking.wg-quick = { networking.wg-quick = {
interfaces = { interfaces = {
malovpn = { malovpn = {
mtu = 1340; #seems to be necessary to proxypass nginx traffic through vpn
address = myPeer.address; address = myPeer.address;
autostart = cfg.autostart; autostart = cfg.autostart;
listenPort = mkIf (myPeer.role == "server") myPeer.listenPort; listenPort = mkIf (myPeer.role == "server") myPeer.listenPort;

View File

@@ -0,0 +1,37 @@
{ config, lib, pkgs, inputs, ... }:
with lib;
{
networking = {
hostName = mkDefault "uptimekuma";
useDHCP = false;
nameservers = [ "1.1.1.1" ];
};
imports = [
../modules/malobeo_user.nix
../modules/sshd.nix
];
networking.firewall.allowedTCPPorts = [ 80 ];
services.nginx = {
enable = true;
virtualHosts."status.malobeo.org" = {
locations."/" = {
proxyPass = "http://127.0.0.1:3001";
extraConfig = ''
'';
};
};
};
services.uptime-kuma = {
enable = true;
};
system.stateVersion = "22.11"; # Did you read the comment?
}

View File

@@ -12,6 +12,7 @@ with lib;
nameservers = [ "1.1.1.1" ]; nameservers = [ "1.1.1.1" ];
firewall = { firewall = {
allowedUDPPorts = [ 51821 ]; allowedUDPPorts = [ 51821 ];
allowedTCPPorts = [ 80 ];
}; };
}; };
@@ -27,8 +28,16 @@ with lib;
privateKeyFile = config.sops.secrets.wg_private.path; privateKeyFile = config.sops.secrets.wg_private.path;
}; };
services.nginx.virtualHosts."docs.malobeo.org" = { services.nginx = {
locations."/".proxyPass = "http://10.100.0.101"; enable = true;
virtualHosts."docs.malobeo.org" = {
locations."/" = {
proxyPass = "http://10.100.0.101";
extraConfig = ''
'';
};
};
}; };
system.stateVersion = "22.11"; # Did you read the comment? system.stateVersion = "22.11"; # Did you read the comment?

View File

@@ -20,6 +20,7 @@ in (utils.lib.eachSystem (builtins.filter filter_system utils.lib.defaultSystems
let let
sops = sops-nix.packages."${pkgs.system}"; sops = sops-nix.packages."${pkgs.system}";
microvmpkg = microvm.packages."${pkgs.system}"; microvmpkg = microvm.packages."${pkgs.system}";
installed = builtins.attrNames self.legacyPackages."${pkgs.system}".scripts;
in in
pkgs.mkShell { pkgs.mkShell {
sopsPGPKeyDirs = [ sopsPGPKeyDirs = [
@@ -37,11 +38,14 @@ in (utils.lib.eachSystem (builtins.filter filter_system utils.lib.defaultSystems
pkgs.mdbook pkgs.mdbook
microvmpkg.microvm microvmpkg.microvm
]; ];
packages = builtins.map (pkgName: self.legacyPackages."${pkgs.system}".scripts.${pkgName}) installed;
shellHook = ''echo "Available scripts: ${builtins.concatStringsSep " " installed}"'';
};
legacyPackages = {
scripts.remote-install = pkgs.writeShellScriptBin "remote-install" (builtins.readFile ./scripts/remote-install-encrypt.sh);
scripts.boot-unlock = pkgs.writeShellScriptBin "boot-unlock" (builtins.readFile ./scripts/unlock-boot.sh);
}; };
packages = { packages = {
remote-install = pkgs.writeShellScriptBin "remote-install" (builtins.readFile ./scripts/remote-install-encrypt.sh);
boot-unlock = pkgs.writeShellScriptBin "boot-unlock" (builtins.readFile ./scripts/unlock-boot.sh);
docs = pkgs.stdenv.mkDerivation { docs = pkgs.stdenv.mkDerivation {
name = "malobeo-docs"; name = "malobeo-docs";
phases = [ "buildPhase" ]; phases = [ "buildPhase" ];

View File

@@ -1,5 +1,4 @@
set -o errexit set -o errexit
set -o nounset
set -o pipefail set -o pipefail
if [ $# -lt 2 ]; then if [ $# -lt 2 ]; then
@@ -9,6 +8,21 @@ if [ $# -lt 2 ]; then
exit 1 exit 1
fi fi
if [ ! -e flake.nix ]
then
echo "flake.nix not found. Searching down."
while [ ! -e flake.nix ]
do
if [ $PWD = "/" ]
then
echo "Found root. Aborting."
exit 1
else
cd ..
fi
done
fi
hostname=$1 hostname=$1
ipaddress=$2 ipaddress=$2

View File

@@ -4,19 +4,33 @@ set -o pipefail
sshoptions="-o StrictHostKeyChecking=no -o ServerAliveInterval=1 -o ServerAliveCountMax=1 -p 222 -T" sshoptions="-o StrictHostKeyChecking=no -o ServerAliveInterval=1 -o ServerAliveCountMax=1 -p 222 -T"
HOSTNAME=$1 HOSTNAME=$1
echo if [ ! -e flake.nix ]
diskkey=$(sops -d machines/$HOSTNAME/disk.key) then
echo "flake.nix not found. Searching down."
while [ ! -e flake.nix ]
do
if [ $PWD = "/" ]
then
echo "Found root. Aborting."
exit 1
else
cd ..
fi
done
fi
echo
if [ $# = 1 ] if [ $# = 1 ]
then then
diskkey=$(sops -d machines/$HOSTNAME/disk.key)
echo "$diskkey" | ssh $sshoptions root@$HOSTNAME-initrd "systemd-tty-ask-password-agent" #storage echo "$diskkey" | ssh $sshoptions root@$HOSTNAME-initrd "systemd-tty-ask-password-agent" #storage
echo "$diskkey" | ssh $sshoptions root@$HOSTNAME-initrd "systemd-tty-ask-password-agent" #root echo "$diskkey" | ssh $sshoptions root@$HOSTNAME-initrd "systemd-tty-ask-password-agent" #root
elif [ $# = 2 ] elif [ $# = 2 ]
then then
diskkey=$(sops -d machines/$HOSTNAME/disk.key)
IP=$2 IP=$2
echo "$diskkey" | ssh $sshoptions root@$IP "systemd-tty-ask-password-agent" #storage echo "$diskkey" | ssh $sshoptions root@$IP "systemd-tty-ask-password-agent" #storage
echo "$diskkey" | ssh $sshoptions root@$IP "systemd-tty-ask-password-agent" #root echo "$diskkey" | ssh $sshoptions root@$IP "systemd-tty-ask-password-agent" #root