From 750978a19232583e17620a1bd80435e957e7213a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Sat, 18 Jun 2022 13:22:42 +0200 Subject: [PATCH 01/26] Add gitea push hook --- doc/manual/src/webhooks.md | 20 +++++++++++++++++--- src/lib/Hydra/Controller/API.pm | 16 ++++++++++++++++ src/lib/Hydra/Controller/Root.pm | 3 ++- 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/doc/manual/src/webhooks.md b/doc/manual/src/webhooks.md index 2b26cd61..674e1064 100644 --- a/doc/manual/src/webhooks.md +++ b/doc/manual/src/webhooks.md @@ -1,9 +1,12 @@ # Webhooks -Hydra can be notified by github's webhook to trigger a new evaluation when a +Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a jobset has a github repo in its input. -To set up a github webhook go to `https://github.com///settings` and in the `Webhooks` tab -click on `Add webhook`. + +## GitHub + +To set up a webhook for a GitHub repository go to `https://github.com///settings` +and in the `Webhooks` tab click on `Add webhook`. - In `Payload URL` fill in `https:///api/push-github`. - In `Content type` switch to `application/json`. @@ -11,3 +14,14 @@ click on `Add webhook`. - For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`. Then add the hook with `Add webhook`. + +## Gitea + +To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance +and in the `Webhooks` tab click on `Add Webhook` and choose `Gitea` in the drop down. + +- In `Target URL` fill in `https:///api/push-gitea`. +- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`. +- Change the branch filter to match the git branch hydra builds. + +Then add the hook with `Add webhook`. diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 6f10ef57..12073595 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -285,6 +285,22 @@ sub push_github : Chained('api') PathPart('push-github') Args(0) { $c->response->body(""); } +sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) { + my ($self, $c) = @_; + + $c->{stash}->{json}->{jobsetsTriggered} = []; + + my $in = $c->request->{data}; + my $url = $in->{repository}->{clone_url} or die; + print STDERR "got push from Gitea repository $url\n"; + + triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search( + { 'project.enabled' => 1, 'me.enabled' => 1 }, + { join => 'project' + , where => \ [ 'me.flake like ? or exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value like ?)', [ 'flake', "%$url%"], [ 'value', "%$url%" ] ] + }); + $c->response->body(""); +} 1; diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index c6843d29..1b33db2a 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -32,6 +32,7 @@ sub noLoginNeeded { return $whitelisted || $c->request->path eq "api/push-github" || + $c->request->path eq "api/push-gitea" || $c->request->path eq "google-login" || $c->request->path eq "github-redirect" || $c->request->path eq "github-login" || @@ -77,7 +78,7 @@ sub begin :Private { $_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins}; # XSRF protection: require POST requests to have the same origin. - if ($c->req->method eq "POST" && $c->req->path ne "api/push-github") { + if ($c->req->method eq "POST" && $c->req->path ne "api/push-github" && $c->req->path ne "api/push-gitea") { my $referer = $c->req->header('Referer'); $referer //= $c->req->header('Origin'); my $base = $c->req->base; From a81c6a3a80d1055aa80934ab229e2dc49594edd2 Mon Sep 17 00:00:00 2001 From: Sandro Date: Fri, 1 Jul 2022 22:21:32 +0200 Subject: [PATCH 02/26] Match URIs that don't end in .git Co-authored-by: Charlotte --- src/lib/Hydra/Controller/API.pm | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 12073595..5eeb0c04 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -292,6 +292,7 @@ sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) { my $in = $c->request->{data}; my $url = $in->{repository}->{clone_url} or die; + $url =~ s/.git$//; print STDERR "got push from Gitea repository $url\n"; triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search( From 1b8154e67fb20f84b7f84049de03204b0df0a366 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Fri, 3 May 2024 14:31:15 +0200 Subject: [PATCH 03/26] Fix doi resolution after #1375 This fixes: > Caught exception in Hydra::Controller::Root->realisations "Undefined subroutine &Hydra::Controller::Root::queryRawRealisation called at /nix/store/v842xb35ph8ka1yi1xanjhk4xh1pn5nm-hydra-2024-04-22/libexec/hydra/lib/Hydra/Controller/Root.pm line 371." --- src/lib/Hydra/Controller/Root.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm index 47389251..406dc52e 100644 --- a/src/lib/Hydra/Controller/Root.pm +++ b/src/lib/Hydra/Controller/Root.pm @@ -367,7 +367,7 @@ sub realisations :Path('realisations') :Args(StrMatch[REALISATIONS_REGEX]) { else { my ($rawDrvOutput) = $realisation =~ REALISATIONS_REGEX; - my $rawRealisation = queryRawRealisation($rawDrvOutput); + my $rawRealisation = $MACHINE_LOCAL_STORE->queryRawRealisation($rawDrvOutput); if (!$rawRealisation) { $c->response->status(404); From 4bd687e3e677c3c89603d8738a94c74c51d4f9ba Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 3 May 2024 10:34:56 -0400 Subject: [PATCH 04/26] Update to Nix 2.22 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nix': 'github:NixOS/nix/60824fa97c588a0faf68ea61260a47e388b0a4e5' (2024-04-11) → 'github:NixOS/nix/1c8150ac312b5f9ba1b3f6768ff43b09867e5883' (2024-04-23) • Added input 'nix/flake-parts': 'github:hercules-ci/flake-parts/9126214d0a59633752a136528f5f3b9aa8565b7d' (2024-04-01) • Added input 'nix/flake-parts/nixpkgs-lib': follows 'nix/nixpkgs' • Added input 'nix/pre-commit-hooks': 'github:cachix/pre-commit-hooks.nix/40e6053ecb65fcbf12863338a6dcefb3f55f1bf8' (2024-04-12) • Added input 'nix/pre-commit-hooks/flake-compat': follows 'nix' • Added input 'nix/pre-commit-hooks/flake-utils': 'github:numtide/flake-utils/5aed5285a952e0b949eb3ba02c12fa4fcfef535f' (2022-11-02) • Added input 'nix/pre-commit-hooks/gitignore': follows 'nix' • Added input 'nix/pre-commit-hooks/nixpkgs': follows 'nix/nixpkgs' • Added input 'nix/pre-commit-hooks/nixpkgs-stable': follows 'nix/nixpkgs' --- flake.lock | 80 ++++++++++++++++++++++++-- flake.nix | 2 +- src/hydra-eval-jobs/hydra-eval-jobs.cc | 18 +++--- 3 files changed, 85 insertions(+), 15 deletions(-) diff --git a/flake.lock b/flake.lock index 966431f9..e828aa7b 100644 --- a/flake.lock +++ b/flake.lock @@ -16,6 +16,42 @@ "type": "github" } }, + "flake-parts": { + "inputs": { + "nixpkgs-lib": [ + "nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1712014858, + "narHash": "sha256-sB4SWl2lX95bExY2gMFG5HIzvva5AVMJd4Igm+GpZNw=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "9126214d0a59633752a136528f5f3b9aa8565b7d", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "flake-parts", + "type": "github" + } + }, + "flake-utils": { + "locked": { + "lastModified": 1667395993, + "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, "libgit2": { "flake": false, "locked": { @@ -35,23 +71,25 @@ "nix": { "inputs": { "flake-compat": "flake-compat", + "flake-parts": "flake-parts", "libgit2": "libgit2", "nixpkgs": [ "nixpkgs" ], - "nixpkgs-regression": "nixpkgs-regression" + "nixpkgs-regression": "nixpkgs-regression", + "pre-commit-hooks": "pre-commit-hooks" }, "locked": { - "lastModified": 1712849398, - "narHash": "sha256-10z/SoidVl9/lh56cMLj7ntJZHtVrumFvmn1YEqXmaM=", + "lastModified": 1713874370, + "narHash": "sha256-gW1mO/CvsQQ5gvgiwzxsGhPFI/tx30NING+qgF5Do0s=", "owner": "NixOS", "repo": "nix", - "rev": "60824fa97c588a0faf68ea61260a47e388b0a4e5", + "rev": "1c8150ac312b5f9ba1b3f6768ff43b09867e5883", "type": "github" }, "original": { "owner": "NixOS", - "ref": "2.21-maintenance", + "ref": "2.22-maintenance", "repo": "nix", "type": "github" } @@ -88,6 +126,38 @@ "type": "github" } }, + "pre-commit-hooks": { + "inputs": { + "flake-compat": [ + "nix" + ], + "flake-utils": "flake-utils", + "gitignore": [ + "nix" + ], + "nixpkgs": [ + "nix", + "nixpkgs" + ], + "nixpkgs-stable": [ + "nix", + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1712897695, + "narHash": "sha256-nMirxrGteNAl9sWiOhoN5tIHyjBbVi5e2tgZUgZlK3Y=", + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "rev": "40e6053ecb65fcbf12863338a6dcefb3f55f1bf8", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "type": "github" + } + }, "root": { "inputs": { "nix": "nix", diff --git a/flake.nix b/flake.nix index c6646200..82824733 100644 --- a/flake.nix +++ b/flake.nix @@ -2,7 +2,7 @@ description = "A Nix-based continuous build system"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small"; - inputs.nix.url = "github:NixOS/nix/2.21-maintenance"; + inputs.nix.url = "github:NixOS/nix/2.22-maintenance"; inputs.nix.inputs.nixpkgs.follows = "nixpkgs"; outputs = { self, nixpkgs, nix }: diff --git a/src/hydra-eval-jobs/hydra-eval-jobs.cc b/src/hydra-eval-jobs/hydra-eval-jobs.cc index d5619719..5adea42b 100644 --- a/src/hydra-eval-jobs/hydra-eval-jobs.cc +++ b/src/hydra-eval-jobs/hydra-eval-jobs.cc @@ -102,8 +102,8 @@ static std::string queryMetaStrings(EvalState & state, PackageInfo & drv, const for (unsigned int n = 0; n < v.listSize(); ++n) rec(*v.listElems()[n]); else if (v.type() == nAttrs) { - auto a = v.attrs->find(state.symbols.create(subAttribute)); - if (a != v.attrs->end()) + auto a = v.attrs()->find(state.symbols.create(subAttribute)); + if (a != v.attrs()->end()) res.push_back(std::string(state.forceString(*a->value, a->pos, "while evaluating meta attributes"))); } }; @@ -138,12 +138,12 @@ static void worker( callFlake(state, lockedFlake, *vFlake); - auto vOutputs = vFlake->attrs->get(state.symbols.create("outputs"))->value; + auto vOutputs = vFlake->attrs()->get(state.symbols.create("outputs"))->value; state.forceValue(*vOutputs, noPos); - auto aHydraJobs = vOutputs->attrs->get(state.symbols.create("hydraJobs")); + auto aHydraJobs = vOutputs->attrs()->get(state.symbols.create("hydraJobs")); if (!aHydraJobs) - aHydraJobs = vOutputs->attrs->get(state.symbols.create("checks")); + aHydraJobs = vOutputs->attrs()->get(state.symbols.create("checks")); if (!aHydraJobs) throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef); @@ -204,9 +204,9 @@ static void worker( job["isChannel"] = drv->queryMetaBool("isHydraChannel", false); /* If this is an aggregate, then get its constituents. */ - auto a = v->attrs->get(state.symbols.create("_hydraAggregate")); + auto a = v->attrs()->get(state.symbols.create("_hydraAggregate")); if (a && state.forceBool(*a->value, a->pos, "while evaluating the `_hydraAggregate` attribute")) { - auto a = v->attrs->get(state.symbols.create("constituents")); + auto a = v->attrs()->get(state.symbols.create("constituents")); if (!a) state.error("derivation must have a ‘constituents’ attribute").debugThrow(); @@ -260,7 +260,7 @@ static void worker( else if (v->type() == nAttrs) { auto attrs = nlohmann::json::array(); StringSet ss; - for (auto & i : v->attrs->lexicographicOrder(state.symbols)) { + for (auto & i : v->attrs()->lexicographicOrder(state.symbols)) { std::string name(state.symbols[i->name]); if (name.find(' ') != std::string::npos) { printError("skipping job with illegal name '%s'", name); @@ -368,7 +368,7 @@ int main(int argc, char * * argv) ]() { try { - EvalState state(myArgs.searchPath, openStore()); + EvalState state(myArgs.lookupPath, openStore()); Bindings & autoArgs = *myArgs.getAutoArgs(state); worker(state, autoArgs, *to, *from); } catch (Error & e) { From 92155f9a07f5fe32e0778e474e7313997811e635 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 3 May 2024 11:41:42 -0400 Subject: [PATCH 05/26] Remove `PrometheusTiny` from overlay It's in Nixpkgs for a good while now. --- flake.nix | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/flake.nix b/flake.nix index 82824733..b3de3d73 100644 --- a/flake.nix +++ b/flake.nix @@ -40,27 +40,6 @@ # A Nixpkgs overlay that provides a 'hydra' package. overlays.default = final: prev: { - - # Add LDAP dependencies that aren't currently found within nixpkgs. - perlPackages = prev.perlPackages // { - - PrometheusTiny = final.perlPackages.buildPerlPackage { - pname = "Prometheus-Tiny"; - version = "0.007"; - src = final.fetchurl { - url = "mirror://cpan/authors/id/R/RO/ROBN/Prometheus-Tiny-0.007.tar.gz"; - sha256 = "0ef8b226a2025cdde4df80129dd319aa29e884e653c17dc96f4823d985c028ec"; - }; - buildInputs = with final.perlPackages; [ HTTPMessage Plack TestException ]; - meta = { - homepage = "https://github.com/robn/Prometheus-Tiny"; - description = "A tiny Prometheus client"; - license = with final.lib.licenses; [ artistic1 gpl1Plus ]; - }; - }; - - }; - hydra = final.callPackage ./package.nix { inherit (nixpkgs.lib) fileset; rawSrc = self; From 743795b2b090a5cdfe8bd90120add8db7770086a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 3 May 2024 12:07:05 -0400 Subject: [PATCH 06/26] Factor out NixOS tests, and clean up Due to newer nixpkgs, there were a number of things that could be cleaned up in the process. --- flake.nix | 299 +----------------------------------- nixos-modules/default.nix | 11 +- nixos-tests.nix | 309 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 319 insertions(+), 300 deletions(-) create mode 100644 nixos-tests.nix diff --git a/flake.nix b/flake.nix index b3de3d73..7e2f3f67 100644 --- a/flake.nix +++ b/flake.nix @@ -17,24 +17,6 @@ overlays = overlayList; }); - # NixOS configuration used for VM tests. - hydraServer = - { config, pkgs, ... }: - { - imports = [ self.nixosModules.hydraTest ]; - - virtualisation.memorySize = 1024; - virtualisation.writableStore = true; - - environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ]; - - nix = { - # Without this nix tries to fetch packages from the default - # cache.nixos.org which is not reachable from this sandboxed NixOS test. - binaryCaches = [ ]; - }; - }; - in rec { @@ -67,282 +49,9 @@ echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products ''); - tests.install = forEachSystem (system: - with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; - simpleTest { - name = "hydra-install"; - nodes.machine = hydraServer; - testScript = - '' - machine.wait_for_job("hydra-init") - machine.wait_for_job("hydra-server") - machine.wait_for_job("hydra-evaluator") - machine.wait_for_job("hydra-queue-runner") - machine.wait_for_open_port(3000) - machine.succeed("curl --fail http://localhost:3000/") - ''; - }); - - tests.notifications = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in - with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; - simpleTest { - name = "hydra-notifications"; - nodes.machine = { pkgs, ... }: { - imports = [ hydraServer ]; - services.hydra-dev.extraConfig = '' - - url = http://127.0.0.1:8086 - db = hydra - - ''; - services.influxdb.enable = true; - }; - testScript = '' - machine.wait_for_job("hydra-init") - - # Create an admin account and some other state. - machine.succeed( - """ - su - hydra -c "hydra-create-user root --email-address 'alice@example.org' --password foobar --role admin" - mkdir /run/jobset - chmod 755 /run/jobset - cp ${./t/jobs/api-test.nix} /run/jobset/default.nix - chmod 644 /run/jobset/default.nix - chown -R hydra /run/jobset - """ - ) - - # Wait until InfluxDB can receive web requests - machine.wait_for_job("influxdb") - machine.wait_for_open_port(8086) - - # Create an InfluxDB database where hydra will write to - machine.succeed( - "curl -XPOST 'http://127.0.0.1:8086/query' " - + "--data-urlencode 'q=CREATE DATABASE hydra'" - ) - - # Wait until hydra-server can receive HTTP requests - machine.wait_for_job("hydra-server") - machine.wait_for_open_port(3000) - - # Setup the project and jobset - machine.succeed( - "su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" - ) - - # Wait until hydra has build the job and - # the InfluxDBNotification plugin uploaded its notification to InfluxDB - machine.wait_until_succeeds( - "curl -s -H 'Accept: application/csv' " - + "-G 'http://127.0.0.1:8086/query?db=hydra' " - + "--data-urlencode 'q=SELECT * FROM hydra_build_status' | grep success" - ) - ''; - }); - - tests.gitea = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in - with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; - makeTest { - name = "hydra-gitea"; - nodes.machine = { pkgs, ... }: { - imports = [ hydraServer ]; - services.hydra-dev.extraConfig = '' - - root=d7f16a3412e01a43a414535b16007c6931d3a9c7 - - ''; - nixpkgs.config.permittedInsecurePackages = [ "gitea-1.19.4" ]; - nix = { - settings.substituters = [ ]; - }; - services.gitea = { - enable = true; - database.type = "postgres"; - disableRegistration = true; - httpPort = 3001; - }; - services.openssh.enable = true; - environment.systemPackages = with pkgs; [ gitea git jq gawk ]; - networking.firewall.allowedTCPPorts = [ 3000 ]; - }; - skipLint = true; - testScript = - let - scripts.mktoken = pkgs.writeText "token.sql" '' - INSERT INTO access_token (id, uid, name, created_unix, updated_unix, token_hash, token_salt, token_last_eight, scope) VALUES (1, 1, 'hydra', 1617107360, 1617107360, 'a930f319ca362d7b49a4040ac0af74521c3a3c3303a86f327b01994430672d33b6ec53e4ea774253208686c712495e12a486', 'XRjWE9YW0g', '31d3a9c7', 'all'); - ''; - - scripts.git-setup = pkgs.writeShellScript "setup.sh" '' - set -x - mkdir -p /tmp/repo $HOME/.ssh - cat ${snakeoilKeypair.privkey} > $HOME/.ssh/privk - chmod 0400 $HOME/.ssh/privk - git -C /tmp/repo init - cp ${smallDrv} /tmp/repo/jobset.nix - git -C /tmp/repo add . - git config --global user.email test@localhost - git config --global user.name test - git -C /tmp/repo commit -m 'Initial import' - git -C /tmp/repo remote add origin gitea@machine:root/repo - GIT_SSH_COMMAND='ssh -i $HOME/.ssh/privk -o StrictHostKeyChecking=no' \ - git -C /tmp/repo push origin master - git -C /tmp/repo log >&2 - ''; - - scripts.hydra-setup = pkgs.writeShellScript "hydra.sh" '' - set -x - su -l hydra -c "hydra-create-user root --email-address \ - 'alice@example.org' --password foobar --role admin" - - URL=http://localhost:3000 - USERNAME="root" - PASSWORD="foobar" - PROJECT_NAME="trivial" - JOBSET_NAME="trivial" - mycurl() { - curl --referer $URL -H "Accept: application/json" \ - -H "Content-Type: application/json" $@ - } - - cat >data.json <data.json <data.json < $out; exit 0"]; - }; - } - ''; - in - '' - import json - - machine.start() - machine.wait_for_unit("multi-user.target") - machine.wait_for_open_port(3000) - machine.wait_for_open_port(3001) - - machine.succeed( - "su -l gitea -c 'GITEA_WORK_DIR=/var/lib/gitea gitea admin user create " - + "--username root --password root --email test@localhost'" - ) - machine.succeed("su -l postgres -c 'psql gitea < ${scripts.mktoken}'") - - machine.succeed( - "curl --fail -X POST http://localhost:3001/api/v1/user/repos " - + "-H 'Accept: application/json' -H 'Content-Type: application/json' " - + f"-H 'Authorization: token ${api_token}'" - + ' -d \'{"auto_init":false, "description":"string", "license":"mit", "name":"repo", "private":false}\''' - ) - - machine.succeed( - "curl --fail -X POST http://localhost:3001/api/v1/user/keys " - + "-H 'Accept: application/json' -H 'Content-Type: application/json' " - + f"-H 'Authorization: token ${api_token}'" - + ' -d \'{"key":"${snakeoilKeypair.pubkey}","read_only":true,"title":"SSH"}\''' - ) - - machine.succeed( - "${scripts.git-setup}" - ) - - machine.succeed( - "${scripts.hydra-setup}" - ) - - machine.wait_until_succeeds( - 'curl -Lf -s http://localhost:3000/build/1 -H "Accept: application/json" ' - + '| jq .buildstatus | xargs test 0 -eq' - ) - - data = machine.succeed( - 'curl -Lf -s "http://localhost:3001/api/v1/repos/root/repo/statuses/$(cd /tmp/repo && git show | head -n1 | awk "{print \\$2}")" ' - + "-H 'Accept: application/json' -H 'Content-Type: application/json' " - + f"-H 'Authorization: token ${api_token}'" - ) - - response = json.loads(data) - - assert len(response) == 2, "Expected exactly three status updates for latest commit (queued, finished)!" - assert response[0]['status'] == "success", "Expected finished status to be success!" - assert response[1]['status'] == "pending", "Expected queued status to be pending!" - - machine.shutdown() - ''; - }); - - tests.validate-openapi = forEachSystem (system: - let pkgs = pkgsBySystem.${system}; in - pkgs.runCommand "validate-openapi" - { buildInputs = [ pkgs.openapi-generator-cli ]; } - '' - openapi-generator-cli validate -i ${./hydra-api.yaml} - touch $out - ''); + tests = import ./nixos-tests.nix { + inherit forEachSystem nixpkgs pkgsBySystem nixosModules; + }; container = nixosConfigurations.container.config.system.build.toplevel; }; @@ -366,6 +75,8 @@ system = "x86_64-linux"; modules = [ + self.nixosModules.hydra + self.nixosModules.overlayNixpkgsForThisHyydra self.nixosModules.hydraTest self.nixosModules.hydraProxy { diff --git a/nixos-modules/default.nix b/nixos-modules/default.nix index 6fc19d31..f44d7808 100644 --- a/nixos-modules/default.nix +++ b/nixos-modules/default.nix @@ -1,14 +1,13 @@ { overlays }: -rec { - hydra = { - imports = [ ./hydra.nix ]; +{ + hydra = import ./hydra.nix; + + overlayNixpkgsForThisHyydra = { nixpkgs = { inherit overlays; }; }; hydraTest = { pkgs, ... }: { - imports = [ hydra ]; - services.hydra-dev.enable = true; services.hydra-dev.hydraURL = "http://hydra.example.org"; services.hydra-dev.notificationSender = "admin@hydra.example.org"; @@ -16,7 +15,7 @@ rec { systemd.services.hydra-send-stats.enable = false; services.postgresql.enable = true; - services.postgresql.package = pkgs.postgresql_11; + services.postgresql.package = pkgs.postgresql_12; # The following is to work around the following error from hydra-server: # [error] Caught exception in engine "Cannot determine local time zone" diff --git a/nixos-tests.nix b/nixos-tests.nix new file mode 100644 index 00000000..3c9dc6c8 --- /dev/null +++ b/nixos-tests.nix @@ -0,0 +1,309 @@ +{ forEachSystem, nixpkgs, pkgsBySystem, nixosModules }: + +let + # NixOS configuration used for VM tests. + hydraServer = + { config, pkgs, ... }: + { + imports = [ + nixosModules.hydra + nixosModules.overlayNixpkgsForThisHyydra + nixosModules.hydraTest + ]; + + virtualisation.memorySize = 1024; + virtualisation.writableStore = true; + + environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ]; + + nix = { + # Without this nix tries to fetch packages from the default + # cache.nixos.org which is not reachable from this sandboxed NixOS test. + settings.substituters = [ ]; + }; + }; + +in + +{ + + install = forEachSystem (system: + with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; + simpleTest { + name = "hydra-install"; + nodes.machine = hydraServer; + testScript = + '' + machine.wait_for_job("hydra-init") + machine.wait_for_job("hydra-server") + machine.wait_for_job("hydra-evaluator") + machine.wait_for_job("hydra-queue-runner") + machine.wait_for_open_port(3000) + machine.succeed("curl --fail http://localhost:3000/") + ''; + }); + + notifications = forEachSystem (system: + let pkgs = pkgsBySystem.${system}; in + with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; + simpleTest { + name = "hydra-notifications"; + nodes.machine = { pkgs, ... }: { + imports = [ hydraServer ]; + services.hydra-dev.extraConfig = '' + + url = http://127.0.0.1:8086 + db = hydra + + ''; + services.influxdb.enable = true; + }; + testScript = '' + machine.wait_for_job("hydra-init") + + # Create an admin account and some other state. + machine.succeed( + """ + su - hydra -c "hydra-create-user root --email-address 'alice@example.org' --password foobar --role admin" + mkdir /run/jobset + chmod 755 /run/jobset + cp ${./t/jobs/api-test.nix} /run/jobset/default.nix + chmod 644 /run/jobset/default.nix + chown -R hydra /run/jobset + """ + ) + + # Wait until InfluxDB can receive web requests + machine.wait_for_job("influxdb") + machine.wait_for_open_port(8086) + + # Create an InfluxDB database where hydra will write to + machine.succeed( + "curl -XPOST 'http://127.0.0.1:8086/query' " + + "--data-urlencode 'q=CREATE DATABASE hydra'" + ) + + # Wait until hydra-server can receive HTTP requests + machine.wait_for_job("hydra-server") + machine.wait_for_open_port(3000) + + # Setup the project and jobset + machine.succeed( + "su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./t/setup-notifications-jobset.pl}' >&2" + ) + + # Wait until hydra has build the job and + # the InfluxDBNotification plugin uploaded its notification to InfluxDB + machine.wait_until_succeeds( + "curl -s -H 'Accept: application/csv' " + + "-G 'http://127.0.0.1:8086/query?db=hydra' " + + "--data-urlencode 'q=SELECT * FROM hydra_build_status' | grep success" + ) + ''; + }); + + gitea = forEachSystem (system: + let pkgs = pkgsBySystem.${system}; in + with import (nixpkgs + "/nixos/lib/testing-python.nix") { inherit system; }; + makeTest { + name = "hydra-gitea"; + nodes.machine = { pkgs, ... }: { + imports = [ hydraServer ]; + services.hydra-dev.extraConfig = '' + + root=d7f16a3412e01a43a414535b16007c6931d3a9c7 + + ''; + nixpkgs.config.permittedInsecurePackages = [ "gitea-1.19.4" ]; + nix = { + settings.substituters = [ ]; + }; + services.gitea = { + enable = true; + database.type = "postgres"; + settings = { + service.DISABLE_REGISTRATION = true; + server.HTTP_PORT = 3001; + }; + }; + services.openssh.enable = true; + environment.systemPackages = with pkgs; [ gitea git jq gawk ]; + networking.firewall.allowedTCPPorts = [ 3000 ]; + }; + skipLint = true; + testScript = + let + scripts.mktoken = pkgs.writeText "token.sql" '' + INSERT INTO access_token (id, uid, name, created_unix, updated_unix, token_hash, token_salt, token_last_eight, scope) VALUES (1, 1, 'hydra', 1617107360, 1617107360, 'a930f319ca362d7b49a4040ac0af74521c3a3c3303a86f327b01994430672d33b6ec53e4ea774253208686c712495e12a486', 'XRjWE9YW0g', '31d3a9c7', 'all'); + ''; + + scripts.git-setup = pkgs.writeShellScript "setup.sh" '' + set -x + mkdir -p /tmp/repo $HOME/.ssh + cat ${snakeoilKeypair.privkey} > $HOME/.ssh/privk + chmod 0400 $HOME/.ssh/privk + git -C /tmp/repo init + cp ${smallDrv} /tmp/repo/jobset.nix + git -C /tmp/repo add . + git config --global user.email test@localhost + git config --global user.name test + git -C /tmp/repo commit -m 'Initial import' + git -C /tmp/repo remote add origin gitea@machine:root/repo + GIT_SSH_COMMAND='ssh -i $HOME/.ssh/privk -o StrictHostKeyChecking=no' \ + git -C /tmp/repo push origin master + git -C /tmp/repo log >&2 + ''; + + scripts.hydra-setup = pkgs.writeShellScript "hydra.sh" '' + set -x + su -l hydra -c "hydra-create-user root --email-address \ + 'alice@example.org' --password foobar --role admin" + + URL=http://localhost:3000 + USERNAME="root" + PASSWORD="foobar" + PROJECT_NAME="trivial" + JOBSET_NAME="trivial" + mycurl() { + curl --referer $URL -H "Accept: application/json" \ + -H "Content-Type: application/json" $@ + } + + cat >data.json <data.json <data.json < $out; exit 0"]; + }; + } + ''; + in + '' + import json + + machine.start() + machine.wait_for_unit("multi-user.target") + machine.wait_for_open_port(3000) + machine.wait_for_open_port(3001) + + machine.succeed( + "su -l gitea -c 'GITEA_WORK_DIR=/var/lib/gitea gitea admin user create " + + "--username root --password root --email test@localhost'" + ) + machine.succeed("su -l postgres -c 'psql gitea < ${scripts.mktoken}'") + + machine.succeed( + "curl --fail -X POST http://localhost:3001/api/v1/user/repos " + + "-H 'Accept: application/json' -H 'Content-Type: application/json' " + + f"-H 'Authorization: token ${api_token}'" + + ' -d \'{"auto_init":false, "description":"string", "license":"mit", "name":"repo", "private":false}\''' + ) + + machine.succeed( + "curl --fail -X POST http://localhost:3001/api/v1/user/keys " + + "-H 'Accept: application/json' -H 'Content-Type: application/json' " + + f"-H 'Authorization: token ${api_token}'" + + ' -d \'{"key":"${snakeoilKeypair.pubkey}","read_only":true,"title":"SSH"}\''' + ) + + machine.succeed( + "${scripts.git-setup}" + ) + + machine.succeed( + "${scripts.hydra-setup}" + ) + + machine.wait_until_succeeds( + 'curl -Lf -s http://localhost:3000/build/1 -H "Accept: application/json" ' + + '| jq .buildstatus | xargs test 0 -eq' + ) + + data = machine.succeed( + 'curl -Lf -s "http://localhost:3001/api/v1/repos/root/repo/statuses/$(cd /tmp/repo && git show | head -n1 | awk "{print \\$2}")" ' + + "-H 'Accept: application/json' -H 'Content-Type: application/json' " + + f"-H 'Authorization: token ${api_token}'" + ) + + response = json.loads(data) + + assert len(response) == 2, "Expected exactly three status updates for latest commit (queued, finished)!" + assert response[0]['status'] == "success", "Expected finished status to be success!" + assert response[1]['status'] == "pending", "Expected queued status to be pending!" + + machine.shutdown() + ''; + }); + + validate-openapi = forEachSystem (system: + let pkgs = pkgsBySystem.${system}; in + pkgs.runCommand "validate-openapi" + { buildInputs = [ pkgs.openapi-generator-cli ]; } + '' + openapi-generator-cli validate -i ${./hydra-api.yaml} + touch $out + ''); + +} From e149da7b9bbc04bd0b1ca03fa0768e958cbcd40e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 3 May 2024 12:41:17 -0400 Subject: [PATCH 07/26] Try again to ensure hydra module is usable Nixpkgs only contains a `hydra_unstable`, not `hydra`, package, so adjust the default accordingly, and then override it to our package in the separate module which does that. --- flake.nix | 2 +- nixos-modules/default.nix | 3 ++- nixos-modules/hydra.nix | 4 ++-- nixos-tests.nix | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index 7e2f3f67..1f73e3b3 100644 --- a/flake.nix +++ b/flake.nix @@ -76,7 +76,7 @@ modules = [ self.nixosModules.hydra - self.nixosModules.overlayNixpkgsForThisHyydra + self.nixosModules.overlayNixpkgsForThisHydra self.nixosModules.hydraTest self.nixosModules.hydraProxy { diff --git a/nixos-modules/default.nix b/nixos-modules/default.nix index f44d7808..dac705c3 100644 --- a/nixos-modules/default.nix +++ b/nixos-modules/default.nix @@ -3,8 +3,9 @@ { hydra = import ./hydra.nix; - overlayNixpkgsForThisHyydra = { + overlayNixpkgsForThisHydra = { pkgs, ... }: { nixpkgs = { inherit overlays; }; + services.hydra.package = pkgs.hydra; }; hydraTest = { pkgs, ... }: { diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 1f0792d7..7a0486c1 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -68,7 +68,7 @@ in package = mkOption { type = types.path; - default = pkgs.hydra; + default = pkgs.hydra_unstable; defaultText = literalExpression "pkgs.hydra"; description = "The Hydra package."; }; @@ -233,7 +233,7 @@ in gc-keep-outputs = true; gc-keep-derivations = true; }; - + services.hydra-dev.extraConfig = '' using_frontend_proxy = 1 diff --git a/nixos-tests.nix b/nixos-tests.nix index 3c9dc6c8..19a9ba35 100644 --- a/nixos-tests.nix +++ b/nixos-tests.nix @@ -7,7 +7,7 @@ let { imports = [ nixosModules.hydra - nixosModules.overlayNixpkgsForThisHyydra + nixosModules.overlayNixpkgsForThisHydra nixosModules.hydraTest ]; From f99cdaf5fe1d70a00ea80d3cd6cbe29e6172b7eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Wed, 8 May 2024 21:31:32 +0200 Subject: [PATCH 08/26] README: update wiki link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2a085325..58373052 100644 --- a/README.md +++ b/README.md @@ -140,7 +140,7 @@ You can also interface with Hydra through a JSON API. The API is defined in [hyd ## Additional Resources - [Hydra User's Guide](https://nixos.org/hydra/manual/) -- [Hydra on the NixOS Wiki](https://nixos.wiki/wiki/Hydra) +- [Hydra on the NixOS Wiki](https://wiki.nixos.org/wiki/Hydra) - [hydra-cli](https://github.com/nlewo/hydra-cli) - [Peter Simons - Hydra: Setting up your own build farm (NixOS)](https://www.youtube.com/watch?v=RXV0Y5Bn-QQ) From 5728011da1aae308694a7098e00610b78d49451b Mon Sep 17 00:00:00 2001 From: Pierre Bourdon Date: Thu, 22 Feb 2024 14:45:17 +0100 Subject: [PATCH 09/26] queue-runner: try larger pipe buffer sizes (cherry picked from commit 18466e83261d39b997a73bbd9f0f249c3a91fbeb) --- src/hydra-queue-runner/build-remote.cc | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/hydra-queue-runner/build-remote.cc b/src/hydra-queue-runner/build-remote.cc index ad510e1b..1cabd291 100644 --- a/src/hydra-queue-runner/build-remote.cc +++ b/src/hydra-queue-runner/build-remote.cc @@ -54,9 +54,20 @@ static std::unique_ptr openConnection( command.splice(command.end(), extraStoreArgs(machine->sshName)); } - return master.startCommand(std::move(command), { + auto ret = master.startCommand(std::move(command), { "-a", "-oBatchMode=yes", "-oConnectTimeout=60", "-oTCPKeepAlive=yes" }); + + // XXX: determine the actual max value we can use from /proc. + + // FIXME: Should this be upstreamed into `startCommand` in Nix? + + int pipesize = 1024 * 1024; + + fcntl(ret->in.get(), F_SETPIPE_SZ, &pipesize); + fcntl(ret->out.get(), F_SETPIPE_SZ, &pipesize); + + return ret; } From bc19e7cd65e55e39e9c304d108010399ef0987a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niklas=20Hamb=C3=BCchen?= Date: Sat, 20 Jul 2024 23:45:12 +0200 Subject: [PATCH 10/26] renderInputDiff: Increase git hash length 8 -> 12 See investigation on lengths required to be conflict-free in practice: https://github.com/NixOS/hydra/pull/1258#issuecomment-1321891677 --- src/root/common.tt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/root/common.tt b/src/root/common.tt index 4487cbe3..869d8856 100644 --- a/src/root/common.tt +++ b/src/root/common.tt @@ -374,7 +374,7 @@ BLOCK renderInputDiff; %] [% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %] [% IF bi1.type == "git" %] - [% bi1.name %][% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 8) _ ' to ' _ bi2.revision.substr(0, 8)) %] + [% bi1.name %][% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %] [% ELSE %] From ada51d70fc3e7fec52eeeef26b937e4d3ae0529b Mon Sep 17 00:00:00 2001 From: marius david Date: Tue, 23 Jul 2024 22:37:32 +0200 Subject: [PATCH 11/26] Document the default user and port in hacking.md --- doc/manual/src/hacking.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index 49c17395..de826720 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -30,6 +30,8 @@ foreman: $ foreman start ``` +The Hydra interface will be available on port 63333, with an admin user named "alice" with password "foobar" + You can run just the Hydra web server in your source tree as follows: ```console From 578a3d22920c2dc319c3c55ff0b63e899aa15588 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Wed, 31 Jul 2024 01:19:40 +0200 Subject: [PATCH 12/26] t: increase timeouts for slow commands with high load We've seen many fails on ofborg, at lot of them ultimately appear to come down to a timeout being hit, resulting in something like this: Failure executing slapadd -F //slap.d -b dc=example -l //load.ldif. Hopefully this resolves it for most cases. I've done some endurance testing and this helps a lot. some other commands also regularly time-out with high load: - hydra-init - hydra-create-user - nix-store --delete This should address most issues with tests randomly failing. Used the following script for endurance testing: ``` import os import subprocess run_counter = 0 fail_counter = 0 while True: try: run_counter += 1 print(f"Starting run {run_counter}") env = os.environ env["YATH_JOB_COUNT"] = "20" result = subprocess.run(["perl", "t/test.pl"], env=env) if (result.returncode != 0): fail_counter += 1 print(f"Finish run {run_counter}, total fail count: {fail_counter}") except KeyboardInterrupt: print(f"Finished {run_counter} runs with {fail_counter} fails") break ``` In case someone else wants to do it on their system :). Note that YATH_JOB_COUNT may need to be changed loosely based on your cores. I only have 4 cores (8 threads), so for others higher numbers might yield better results in hashing out unstable tests. --- t/lib/HydraTestContext.pm | 2 +- t/lib/LDAPContext.pm | 6 +++--- .../build-locally-with-substitutable-path.t | 2 +- t/scripts/hydra-create-user.t | 14 +++++++------- t/scripts/hydra-init.t | 4 ++-- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/t/lib/HydraTestContext.pm b/t/lib/HydraTestContext.pm index e1a5b226..1d6fa909 100644 --- a/t/lib/HydraTestContext.pm +++ b/t/lib/HydraTestContext.pm @@ -92,7 +92,7 @@ sub new { $opts{'before_init'}->($self); } - expectOkay(5, ("hydra-init")); + expectOkay(30, ("hydra-init")); return $self; } diff --git a/t/lib/LDAPContext.pm b/t/lib/LDAPContext.pm index 2cd1a19d..df1334f0 100644 --- a/t/lib/LDAPContext.pm +++ b/t/lib/LDAPContext.pm @@ -70,7 +70,7 @@ sub add_user { my $email = $opts{'email'} // "$name\@example"; my $password = $opts{'password'} // rand_chars(); - my ($res, $stdout, $stderr) = captureStdoutStderr(1, ("slappasswd", "-s", $password)); + my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("slappasswd", "-s", $password)); if ($res) { die "Failed to execute slappasswd ($res): $stderr, $stdout"; } @@ -178,7 +178,7 @@ sub start { sub validateConfig { my ($self) = @_; - expectOkay(1, ("slaptest", "-u", "-F", $self->{"_slapd_dir"})); + expectOkay(5, ("slaptest", "-u", "-F", $self->{"_slapd_dir"})); } sub _spawn { @@ -218,7 +218,7 @@ sub load_ldif { my $path = "${\$self->{'_tmpdir'}}/load.ldif"; write_file($path, $content); - expectOkay(1, ("slapadd", "-F", $self->{"_slapd_dir"}, "-b", $suffix, "-l", $path)); + expectOkay(5, ("slapadd", "-F", $self->{"_slapd_dir"}, "-b", $suffix, "-l", $path)); $self->validateConfig(); } diff --git a/t/queue-runner/build-locally-with-substitutable-path.t b/t/queue-runner/build-locally-with-substitutable-path.t index e3b31761..6477635a 100644 --- a/t/queue-runner/build-locally-with-substitutable-path.t +++ b/t/queue-runner/build-locally-with-substitutable-path.t @@ -39,7 +39,7 @@ subtest "Building, caching, and then garbage collecting the underlying job" => s ok(unlink(Hydra::Helper::Nix::gcRootFor($path)), "Unlinking the GC root for underlying Dependency succeeds"); - (my $ret, my $stdout, my $stderr) = captureStdoutStderr(5, "nix-store", "--delete", $path); + (my $ret, my $stdout, my $stderr) = captureStdoutStderr(15, "nix-store", "--delete", $path); is($ret, 0, "Deleting the underlying dependency should succeed"); }; diff --git a/t/scripts/hydra-create-user.t b/t/scripts/hydra-create-user.t index 71a5eda3..7f943f9d 100644 --- a/t/scripts/hydra-create-user.t +++ b/t/scripts/hydra-create-user.t @@ -9,7 +9,7 @@ my $db = $ctx->db(); subtest "Handling password and password hash creation" => sub { subtest "Creating a user with a plain text password (insecure) stores the password securely" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "plain-text-user", "--password", "foobar")); + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "plain-text-user", "--password", "foobar")); is($res, 0, "hydra-create-user should exit zero"); like($stderr, qr/Submitting plaintext passwords as arguments is deprecated and will be removed/, "Submitting a plain text password is deprecated."); @@ -23,7 +23,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "Creating a user with a sha1 password (still insecure) stores the password as a hashed sha1" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "old-password-hash-user", "--password-hash", "8843d7f92416211de9ebb963ff4ce28125932878")); + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "old-password-hash-user", "--password-hash", "8843d7f92416211de9ebb963ff4ce28125932878")); is($res, 0, "hydra-create-user should exit zero"); my $user = $db->resultset('Users')->find({ username => "old-password-hash-user" }); @@ -36,7 +36,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "Creating a user with an argon2 password stores the password as given" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-create-user", "argon2-hash-user", "--password-hash", '$argon2id$v=19$m=262144,t=3,p=1$tMnV5paYjmIrUIb6hylaNA$M8/e0i3NGrjhOliVLa5LqQ')); + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ("hydra-create-user", "argon2-hash-user", "--password-hash", '$argon2id$v=19$m=262144,t=3,p=1$tMnV5paYjmIrUIb6hylaNA$M8/e0i3NGrjhOliVLa5LqQ')); is($res, 0, "hydra-create-user should exit zero"); my $user = $db->resultset('Users')->find({ username => "argon2-hash-user" }); @@ -50,7 +50,7 @@ subtest "Handling password and password hash creation" => sub { subtest "Creating a user by prompting for the password" => sub { subtest "with the same password twice" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(5, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nmy-password\n"); + my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(15, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nmy-password\n"); is($res, 0, "hydra-create-user should exit zero"); my $user = $db->resultset('Users')->find({ username => "prompted-pass-user" }); @@ -62,7 +62,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "With mismatched password confirmation" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(5, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nnot-my-password\n"); + my ($res, $stdout, $stderr) = captureStdoutStderrWithStdin(15, ["hydra-create-user", "prompted-pass-user", "--password-prompt"], "my-password\nnot-my-password\n"); isnt($res, 0, "hydra-create-user should exit non-zero"); }; }; @@ -76,7 +76,7 @@ subtest "Handling password and password hash creation" => sub { ); for my $case (@cases) { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ( + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ( "hydra-create-user", "bogus-password-options", @{$case})); like($stderr, qr/please specify only one of --password-prompt or --password-hash/, "We get an error about specifying the password"); isnt($res, 0, "hydra-create-user should exit non-zero with conflicting " . join(" ", @{$case})); @@ -84,7 +84,7 @@ subtest "Handling password and password hash creation" => sub { }; subtest "A password is not required for creating a Google-based account" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ( + my ($res, $stdout, $stderr) = captureStdoutStderr(15, ( "hydra-create-user", "google-account", "--type", "google")); is($res, 0, "hydra-create-user should exit zero"); }; diff --git a/t/scripts/hydra-init.t b/t/scripts/hydra-init.t index bd5bd4bf..603aa4a4 100644 --- a/t/scripts/hydra-init.t +++ b/t/scripts/hydra-init.t @@ -28,7 +28,7 @@ subtest "hydra-init upgrades user's password hashes from sha1 to sha1 inside Arg $janet->setPassword("foobar"); is($alice->password, "8843d7f92416211de9ebb963ff4ce28125932878", "Alices's sha1 is stored in the database"); - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-init")); + my ($res, $stdout, $stderr) = captureStdoutStderr(30, ("hydra-init")); if ($res != 0) { is($stdout, ""); is($stderr, ""); @@ -55,7 +55,7 @@ subtest "hydra-init upgrades user's password hashes from sha1 to sha1 inside Arg }; subtest "Running hydra-init don't break Alice or Janet's passwords" => sub { - my ($res, $stdout, $stderr) = captureStdoutStderr(5, ("hydra-init")); + my ($res, $stdout, $stderr) = captureStdoutStderr(30, ("hydra-init")); is($res, 0, "hydra-init should exit zero"); my $updatedAlice = $db->resultset('Users')->find({ username => "alice" }); From a6b14369ee05c376deb04dd71062a5b95f186096 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Wed, 31 Jul 2024 17:10:44 +0200 Subject: [PATCH 13/26] t/test.pl: increase event-timeout, set qvf Only log issues/failures when something's actually up. It has irked me for a long time that so much output came out of running the tests, this seems to silence it. It does hide some warnings, but I think it makes the output so much more readable that it's worth the tradeoff. Helps for highly parallel running of jobs, sometimes they'd not give output for a while. Setting this timeout higher appears to help. Not completely sure if this is the right place to do it, but it works fine for me. --- t/test.pl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/t/test.pl b/t/test.pl index ba7f3781..12284637 100644 --- a/t/test.pl +++ b/t/test.pl @@ -21,7 +21,7 @@ if (defined($ENV{"NIX_BUILD_CORES"}) print STDERR "test.pl: Defaulting \$YATH_JOB_COUNT to \$NIX_BUILD_CORES (${\$ENV{'NIX_BUILD_CORES'}})\n"; } -system($^X, find_yath(), '-D', 'test', '--default-search' => './', @ARGV); +system($^X, find_yath(), '-D', 'test', '--qvf', '--event-timeout', 240, '--default-search' => './', @ARGV); my $exit = $?; # This makes sure it works with prove. From 54002f0fcf4a7cb65baf3e25e665e5325292f609 Mon Sep 17 00:00:00 2001 From: Rick van Schijndel Date: Wed, 31 Jul 2024 17:12:47 +0200 Subject: [PATCH 14/26] t/evaluator/evaluate-oom-job.t: always skip, the test always fails We should look into how to resolve this, but I tried some things and nothing really worked. Let's put it skipped for now until someone comes along to improve it. --- t/evaluator/evaluate-oom-job.t | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/t/evaluator/evaluate-oom-job.t b/t/evaluator/evaluate-oom-job.t index 6c17d4e4..8f0450c5 100644 --- a/t/evaluator/evaluate-oom-job.t +++ b/t/evaluator/evaluate-oom-job.t @@ -31,6 +31,10 @@ if ($sd_res != 0) { skip_all("`systemd-run` returned non-zero when executing `true` (expected 0)"); } +# XXX(Mindavi): We should think about how to fix this. +# Note that it was always skipped on ofborg/h.n.o (nixos hydra) since systemd-run is not present in the ambient environment there. +skip_all("Always fails, an error about 'oom' being a string is logged and the process never OOMs. Needs a way to use more memory."); + my $ctx = test_context(); # Contain the memory usage to 25 MegaBytes using `systemd-run` From 2c886f51d3554bfd4246c34cc3dab15a5b6efc1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janne=20He=C3=9F?= Date: Sun, 12 Jun 2022 17:57:49 +0200 Subject: [PATCH 15/26] CompressLog: Add zstd compression --- nixos-modules/hydra.nix | 1 + src/lib/Hydra/Helper/Nix.pm | 3 +++ src/lib/Hydra/Plugin/CompressLog.pm | 21 +++++++++++++++++---- src/lib/Hydra/View/NixLog.pm | 5 ++++- 4 files changed, 25 insertions(+), 5 deletions(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 7a0486c1..576bc553 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -408,6 +408,7 @@ in requires = [ "hydra-init.service" ]; after = [ "hydra-init.service" ]; restartTriggers = [ hydraConf ]; + path = [ pkgs.zstd ]; environment = env // { PGPASSFILE = "${baseDir}/pgpass-queue-runner"; # grrr HYDRA_DBI = "${env.HYDRA_DBI};application_name=hydra-notify"; diff --git a/src/lib/Hydra/Helper/Nix.pm b/src/lib/Hydra/Helper/Nix.pm index 16637b2b..bff7a5ed 100644 --- a/src/lib/Hydra/Helper/Nix.pm +++ b/src/lib/Hydra/Helper/Nix.pm @@ -174,6 +174,9 @@ sub getDrvLogPath { for ($fn . $bucketed, $fn . $bucketed . ".bz2") { return $_ if -f $_; } + for ($fn . $bucketed, $fn . $bucketed . ".zst") { + return $_ if -f $_; + } return undef; } diff --git a/src/lib/Hydra/Plugin/CompressLog.pm b/src/lib/Hydra/Plugin/CompressLog.pm index 10e8f6cc..fe4d33b0 100644 --- a/src/lib/Hydra/Plugin/CompressLog.pm +++ b/src/lib/Hydra/Plugin/CompressLog.pm @@ -9,11 +9,24 @@ use Hydra::Helper::CatalystUtils; sub stepFinished { my ($self, $step, $logPath) = @_; - my $doCompress = $self->{config}->{'compress_build_logs'} // "1"; + my $doCompress = $self->{config}->{'compress_build_logs'} // '1'; + my $silent = $self->{config}->{'compress_build_logs_silent'} // '0'; + my $compression = $self->{config}->{'compress_build_logs_compression'} // 'bzip2'; - if ($doCompress eq "1" && -e $logPath) { - print STDERR "compressing ‘$logPath’...\n"; - system("bzip2", "--force", $logPath); + if (not -e $logPath or $doCompress ne "1") { + return; + } + + if ($silent ne '1') { + print STDERR "compressing '$logPath' with $compression...\n"; + } + + if ($compression eq 'bzip2') { + system('bzip2', '--force', $logPath); + } elsif ($compression eq 'zstd') { + system('zstd', '--rm', '--quiet', '-T0', $logPath); + } else { + print STDERR "unknown compression type '$compression'\n"; } } diff --git a/src/lib/Hydra/View/NixLog.pm b/src/lib/Hydra/View/NixLog.pm index 7f37ae78..fe37d900 100644 --- a/src/lib/Hydra/View/NixLog.pm +++ b/src/lib/Hydra/View/NixLog.pm @@ -16,7 +16,10 @@ sub process { my $tail = int($c->stash->{tail} // "0"); - if ($logPath =~ /\.bz2$/) { + if ($logPath =~ /\.zst$/) { + my $doTail = $tail ? "| tail -n '$tail'" : ""; + open($fh, "-|", "zstd -dc < '$logPath' $doTail") or die; + } elsif ($logPath =~ /\.bz2$/) { my $doTail = $tail ? "| tail -n '$tail'" : ""; open($fh, "-|", "bzip2 -dc < '$logPath' $doTail") or die; } else { From 99ca560d583239b1bc34485f2f719989642fa697 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Thu, 8 Aug 2024 11:24:16 +0200 Subject: [PATCH 16/26] Use configured compression in hydra-compress-logs service --- nixos-modules/hydra.nix | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 576bc553..97906cb3 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -459,10 +459,15 @@ in # logs automatically after a step finishes, but this doesn't work # if the queue runner is stopped prematurely. systemd.services.hydra-compress-logs = - { path = [ pkgs.bzip2 ]; + { path = [ pkgs.bzip2 pkgs.zstd ]; script = '' - find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r bzip2 -v -f + set -eou pipefail + compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf) + if [[ $compression == zstd ]]; then + compression="zstd --rm" + fi + find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r $compression --force --quiet ''; startAt = "Sun 01:45"; }; From b2b2d6e26ce8a212541fca780985216279c3c00c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Thu, 8 Aug 2024 13:33:08 +0200 Subject: [PATCH 17/26] Expand docs with new compression options --- doc/manual/src/plugins/README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/manual/src/plugins/README.md b/doc/manual/src/plugins/README.md index 6e46224f..93aa80b4 100644 --- a/doc/manual/src/plugins/README.md +++ b/doc/manual/src/plugins/README.md @@ -42,7 +42,7 @@ Sets CircleCI status. ## Compress build logs -Compresses build logs after a build with bzip2. +Compresses build logs after a build with bzip2 or zstd. ### Configuration options @@ -50,6 +50,14 @@ Compresses build logs after a build with bzip2. Enable log compression +- `compress_build_logs_compression` + +Which compression format to use. Valid values are bzip2 (default) and zstd. + +- `compress_build_logs_silent` + +Whether to compress logs silently. + ### Example ```xml From 250780aaf2c185b4b8530ae82dd67dc8e00f4345 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 20 Aug 2024 08:15:18 +0200 Subject: [PATCH 18/26] tests: use `like` for testing regexes This gives us better diagnostics when the test fails. --- t/Hydra/Controller/Job/builds.t | 13 +++++++------ t/Hydra/Controller/Jobset/http.t | 2 +- t/Hydra/Controller/User/dashboard.t | 2 +- t/api-test.t | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/t/Hydra/Controller/Job/builds.t b/t/Hydra/Controller/Job/builds.t index d933fbb8..aa6a36ce 100644 --- a/t/Hydra/Controller/Job/builds.t +++ b/t/Hydra/Controller/Job/builds.t @@ -54,13 +54,14 @@ subtest "/job/PROJECT/JOBSET/JOB/shield" => sub { subtest "/job/PROJECT/JOBSET/JOB/prometheus" => sub { my $response = request(GET '/job/' . $project->name . '/' . $jobset->name . '/' . $build->job . '/prometheus'); - ok($response->is_success, "The page showing the job's prometheus data returns 200."); - my $metrics = $response->content; - ok($metrics =~ m/hydra_job_failed\{.*\} 0/); - ok($metrics =~ m/hydra_job_completion_time\{.*\} [\d]+/); - ok($metrics =~ m/hydra_build_closure_size\{.*\} 96/); - ok($metrics =~ m/hydra_build_output_size\{.*\} 96/); + ok($response->is_success, "The page showing the job's prometheus data returns 200."); + + my $metrics = $response->content; + like($metrics, qr/hydra_job_failed\{.*\} 0/); + like($metrics, qr/hydra_job_completion_time\{.*\} [\d]+/); + like($metrics, qr/hydra_build_closure_size\{.*\} 96/); + like($metrics, qr/hydra_build_output_size\{.*\} 96/); }; done_testing; diff --git a/t/Hydra/Controller/Jobset/http.t b/t/Hydra/Controller/Jobset/http.t index 4e53949d..1a60715c 100644 --- a/t/Hydra/Controller/Jobset/http.t +++ b/t/Hydra/Controller/Jobset/http.t @@ -186,7 +186,7 @@ subtest 'Update jobset "job" to have an invalid input type' => sub { }) ); ok(!$jobsetupdate->is_success); - ok($jobsetupdate->content =~ m/Invalid input type.*valid types:/); + like($jobsetupdate->content, qr/Invalid input type.*valid types:/); }; diff --git a/t/Hydra/Controller/User/dashboard.t b/t/Hydra/Controller/User/dashboard.t index 8a24585d..12d7dd80 100644 --- a/t/Hydra/Controller/User/dashboard.t +++ b/t/Hydra/Controller/User/dashboard.t @@ -24,7 +24,7 @@ my $cookie = $login->header("set-cookie"); my $my_jobs = request(GET '/dashboard/alice/my-jobs-tab', Accept => 'application/json', Cookie => $cookie); ok($my_jobs->is_success); my $content = $my_jobs->content(); -ok($content =~ /empty_dir/); +like($content, qr/empty_dir/); ok(!($content =~ /fails/)); ok(!($content =~ /succeed_with_failed/)); done_testing; diff --git a/t/api-test.t b/t/api-test.t index e89d4460..637d412c 100644 --- a/t/api-test.t +++ b/t/api-test.t @@ -115,7 +115,7 @@ subtest "evaluation" => sub { my $build = decode_json(request_json({ uri => "/build/" . $evals->[0]->{builds}->[0] })->content()); is($build->{job}, "job", "The build's job name is job"); is($build->{finished}, 0, "The build isn't finished yet"); - ok($build->{buildoutputs}->{out}->{path} =~ /\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'"); + like($build->{buildoutputs}->{out}->{path}, qr/\/nix\/store\/[a-zA-Z0-9]{32}-job$/, "The build's outpath is in the Nix store and named 'job'"); subtest "search" => sub { my $search_project = decode_json(request_json({ uri => "/search/?query=sample" })->content()); From 54a9729a0f257b9bb011973ebcae1a42f6284e8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 20 Aug 2024 07:40:22 +0200 Subject: [PATCH 19/26] hacking.md: mention nix develop --- doc/manual/src/hacking.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index de826720..1f1761a7 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -15,6 +15,12 @@ and dependencies can be found: $ nix-shell ``` +of when flakes are enabled: + +```console +$ nix develop +``` + To build Hydra, you should then do: ```console From 02a514234bcb15e20241d98b2004e0292b4cc160 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 20 Aug 2024 07:40:33 +0200 Subject: [PATCH 20/26] hacking.md: make build parallel --- doc/manual/src/hacking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index 1f1761a7..ec96b8c6 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -26,7 +26,7 @@ To build Hydra, you should then do: ```console [nix-shell]$ autoreconfPhase [nix-shell]$ configurePhase -[nix-shell]$ make +[nix-shell]$ make -j$(nproc) ``` You start a local database, the webserver, and other components with From b94a7b6d5c56362af9ea85d944f8454d861ec001 Mon Sep 17 00:00:00 2001 From: hacker1024 Date: Wed, 21 Aug 2024 22:13:38 +1000 Subject: [PATCH 21/26] Use Nix::Store and Nix::Utils in NARInfo.pm These are required for the `signString` and `readFile` subroutines used when signing NARs. --- src/lib/Hydra/View/NARInfo.pm | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/Hydra/View/NARInfo.pm b/src/lib/Hydra/View/NARInfo.pm index bf8711a4..801fc06a 100644 --- a/src/lib/Hydra/View/NARInfo.pm +++ b/src/lib/Hydra/View/NARInfo.pm @@ -6,6 +6,8 @@ use File::Basename; use Hydra::Helper::CatalystUtils; use MIME::Base64; use Nix::Manifest; +use Nix::Store; +use Nix::Utils; use Hydra::Helper::Nix; use base qw/Catalyst::View/; From 0ead8dc65c5d4505aaf67769f8b89e3a9c359f35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janne=20He=C3=9F?= Date: Tue, 27 Aug 2024 17:44:19 +0200 Subject: [PATCH 22/26] default.nix: Drop URL literal --- default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.nix b/default.nix index d4c7ec29..b81119c3 100644 --- a/default.nix +++ b/default.nix @@ -1,6 +1,6 @@ # The `default.nix` in flake-compat reads `flake.nix` and `flake.lock` from `src` and # returns an attribute set of the shape `{ defaultNix, shellNix }` -(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) { +(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") { src = ./.; }).defaultNix From 916531dc9ccee52e6dab256232933fcf6d198158 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janne=20He=C3=9F?= Date: Tue, 27 Aug 2024 17:34:48 +0200 Subject: [PATCH 23/26] api: Require POST for /api/push --- hydra-api.yaml | 2 +- src/lib/Hydra/Controller/API.pm | 2 ++ src/lib/Hydra/Helper/CatalystUtils.pm | 2 +- src/root/jobset.tt | 1 + t/Hydra/Controller/API/checks.t | 4 ++-- 5 files changed, 7 insertions(+), 4 deletions(-) diff --git a/hydra-api.yaml b/hydra-api.yaml index 623c9082..a2fdea28 100644 --- a/hydra-api.yaml +++ b/hydra-api.yaml @@ -70,7 +70,7 @@ paths: $ref: '#/components/examples/projects-success' /api/push: - put: + post: summary: trigger jobsets parameters: - in: query diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index 06f35d4b..cd005551 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -239,6 +239,8 @@ sub triggerJobset { sub push : Chained('api') PathPart('push') Args(0) { my ($self, $c) = @_; + requirePost($c); + $c->{stash}->{json}->{jobsetsTriggered} = []; my $force = exists $c->request->query_params->{force}; diff --git a/src/lib/Hydra/Helper/CatalystUtils.pm b/src/lib/Hydra/Helper/CatalystUtils.pm index 2a2ad86f..15d50b1a 100644 --- a/src/lib/Hydra/Helper/CatalystUtils.pm +++ b/src/lib/Hydra/Helper/CatalystUtils.pm @@ -272,7 +272,7 @@ sub requireAdmin { sub requirePost { my ($c) = @_; - error($c, "Request must be POSTed.") if $c->request->method ne "POST"; + error($c, "Request must be POSTed.", 405) if $c->request->method ne "POST"; } diff --git a/src/root/jobset.tt b/src/root/jobset.tt index 56abdb50..5d8345f9 100644 --- a/src/root/jobset.tt +++ b/src/root/jobset.tt @@ -205,6 +205,7 @@ if (!c) return; requestJSON({ url: "[% HTML.escape(c.uri_for('/api/push', { jobsets = project.name _ ':' _ jobset.name, force = "1" })) %]", + type: 'POST', success: function(data) { bootbox.alert("The jobset has been scheduled for evaluation."); } diff --git a/t/Hydra/Controller/API/checks.t b/t/Hydra/Controller/API/checks.t index 2b97b489..f0f51f1c 100644 --- a/t/Hydra/Controller/API/checks.t +++ b/t/Hydra/Controller/API/checks.t @@ -109,7 +109,7 @@ subtest "/api/push" => sub { my $jobsetName = $jobset->name; is($jobset->forceeval, undef, "The existing jobset is not set to be forced to eval"); - my $response = request(GET "/api/push?jobsets=$projectName:$jobsetName&force=1"); + my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1"); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); @@ -128,7 +128,7 @@ subtest "/api/push" => sub { print STDERR $repo; - my $response = request(GET "/api/push?repos=$repo&force=1"); + my $response = request(POST "/api/push?repos=$repo&force=1"); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); From f73043378907c2c7e44f633ad764c8bdd1c947d5 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 27 Aug 2024 17:00:00 +0200 Subject: [PATCH 24/26] Create eval-jobset role and guard /api/push route --- doc/manual/src/configuration.md | 3 ++- src/lib/Hydra/Config.pm | 2 ++ src/lib/Hydra/Controller/API.pm | 7 ++++++- src/lib/Hydra/Helper/CatalystUtils.pm | 22 ++++++++++++++++++++++ src/root/user.tt | 1 + t/Hydra/Config/ldap_role_map.t | 2 ++ t/Hydra/Controller/API/checks.t | 27 ++++++++++++++++++++++++--- t/Hydra/Controller/User/ldap-legacy.t | 3 ++- t/Hydra/Controller/User/ldap.t | 5 ++++- 9 files changed, 65 insertions(+), 7 deletions(-) diff --git a/doc/manual/src/configuration.md b/doc/manual/src/configuration.md index 4954040c..d370312a 100644 --- a/doc/manual/src/configuration.md +++ b/doc/manual/src/configuration.md @@ -208,7 +208,8 @@ Example configuration: # Make all users in the hydra_admin group Hydra admins hydra_admin = admin - # Allow all users in the dev group to restart jobs and cancel builds + # Allow all users in the dev group to eval jobsets, restart jobs and cancel builds + dev = eval-jobset dev = restart-jobs dev = cancel-build diff --git a/src/lib/Hydra/Config.pm b/src/lib/Hydra/Config.pm index af686fca..6aae5a5e 100644 --- a/src/lib/Hydra/Config.pm +++ b/src/lib/Hydra/Config.pm @@ -95,6 +95,7 @@ sub get_legacy_ldap_config { "hydra_bump-to-front" => [ "bump-to-front" ], "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], + "hydra_eval-jobset" => [ "eval-jobset" ], "hydra_restart-jobs" => [ "restart-jobs" ], }, }; @@ -159,6 +160,7 @@ sub valid_roles { "bump-to-front", "cancel-build", "create-projects", + "eval-jobset", "restart-jobs", ]; } diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm index cd005551..9f8b7cba 100644 --- a/src/lib/Hydra/Controller/API.pm +++ b/src/lib/Hydra/Controller/API.pm @@ -248,19 +248,24 @@ sub push : Chained('api') PathPart('push') Args(0) { foreach my $s (@jobsets) { my ($p, $j) = parseJobsetName($s); my $jobset = $c->model('DB::Jobsets')->find($p, $j); + requireEvalJobsetPrivileges($c, $jobset->project); next unless defined $jobset && ($force || ($jobset->project->enabled && $jobset->enabled)); triggerJobset($self, $c, $jobset, $force); } my @repos = split /,/, ($c->request->query_params->{repos} // ""); foreach my $r (@repos) { - triggerJobset($self, $c, $_, $force) foreach $c->model('DB::Jobsets')->search( + my @jobsets = $c->model('DB::Jobsets')->search( { 'project.enabled' => 1, 'me.enabled' => 1 }, { join => 'project', where => \ [ 'exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value = ?)', [ 'value', $r ] ], order_by => 'me.id DESC' }); + foreach my $jobset (@jobsets) { + requireEvalJobsetPrivileges($c, $jobset->project); + triggerJobset($self, $c, $jobset, $force) + } } $self->status_ok( diff --git a/src/lib/Hydra/Helper/CatalystUtils.pm b/src/lib/Hydra/Helper/CatalystUtils.pm index 15d50b1a..6ccdbc4d 100644 --- a/src/lib/Hydra/Helper/CatalystUtils.pm +++ b/src/lib/Hydra/Helper/CatalystUtils.pm @@ -15,6 +15,7 @@ our @EXPORT = qw( forceLogin requireUser requireProjectOwner requireRestartPrivileges requireAdmin requirePost isAdmin isProjectOwner requireBumpPrivileges requireCancelBuildPrivileges + requireEvalJobsetPrivileges trim getLatestFinishedEval getFirstEval paramToList @@ -186,6 +187,27 @@ sub isProjectOwner { defined $c->model('DB::ProjectMembers')->find({ project => $project, userName => $c->user->username })); } +sub hasEvalJobsetRole { + my ($c) = @_; + return $c->user_exists && $c->check_user_roles("eval-jobset"); +} + +sub mayEvalJobset { + my ($c, $project) = @_; + return + $c->user_exists && + (isAdmin($c) || + hasEvalJobsetRole($c) || + isProjectOwner($c, $project)); +} + +sub requireEvalJobsetPrivileges { + my ($c, $project) = @_; + requireUser($c); + accessDenied($c, "Only the project members, administrators, and accounts with eval-jobset privileges can perform this operation.") + unless mayEvalJobset($c, $project); +} + sub hasCancelBuildRole { my ($c) = @_; return $c->user_exists && $c->check_user_roles('cancel-build'); diff --git a/src/root/user.tt b/src/root/user.tt index 76f85850..04eb6e68 100644 --- a/src/root/user.tt +++ b/src/root/user.tt @@ -91,6 +91,7 @@ [% INCLUDE roleoption mutable=mutable role="restart-jobs" %] [% INCLUDE roleoption mutable=mutable role="bump-to-front" %] [% INCLUDE roleoption mutable=mutable role="cancel-build" %] + [% INCLUDE roleoption mutable=mutable role="eval-jobset" %]

diff --git a/t/Hydra/Config/ldap_role_map.t b/t/Hydra/Config/ldap_role_map.t index cb1adf46..9287c782 100644 --- a/t/Hydra/Config/ldap_role_map.t +++ b/t/Hydra/Config/ldap_role_map.t @@ -57,6 +57,7 @@ subtest "getLDAPConfig" => sub { "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], "hydra_restart-jobs" => [ "restart-jobs" ], + "hydra_eval-jobset" => [ "eval-jobset" ], } }, "The empty file and set env var make legacy mode active." @@ -177,6 +178,7 @@ subtest "get_legacy_ldap_config" => sub { "hydra_cancel-build" => [ "cancel-build" ], "hydra_create-projects" => [ "create-projects" ], "hydra_restart-jobs" => [ "restart-jobs" ], + "hydra_eval-jobset" => [ "eval-jobset" ], } }, "Legacy, default role maps are applied." diff --git a/t/Hydra/Controller/API/checks.t b/t/Hydra/Controller/API/checks.t index f0f51f1c..e4c72ff2 100644 --- a/t/Hydra/Controller/API/checks.t +++ b/t/Hydra/Controller/API/checks.t @@ -22,9 +22,24 @@ sub is_json { } my $ctx = test_context(); - Catalyst::Test->import('Hydra'); +# Create a user to log in to +my $user = $ctx->db->resultset('Users')->create({ username => 'alice', emailaddress => 'alice@example.com', password => '!' }); +$user->setPassword('foobar'); +$user->userroles->update_or_create({ role => 'admin' }); + +# Login and save cookie for future requests +my $req = request(POST '/login', + Referer => 'http://localhost/', + Content => { + username => 'alice', + password => 'foobar' + } +); +is($req->code, 302, "The login redirects"); +my $cookie = $req->header("set-cookie"); + my $finishedBuilds = $ctx->makeAndEvaluateJobset( expression => "one-job.nix", build => 1 @@ -109,7 +124,10 @@ subtest "/api/push" => sub { my $jobsetName = $jobset->name; is($jobset->forceeval, undef, "The existing jobset is not set to be forced to eval"); - my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1"); + my $response = request(POST "/api/push?jobsets=$projectName:$jobsetName&force=1", + Cookie => $cookie, + Referer => 'http://localhost/', + ); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); @@ -128,7 +146,10 @@ subtest "/api/push" => sub { print STDERR $repo; - my $response = request(POST "/api/push?repos=$repo&force=1"); + my $response = request(POST "/api/push?repos=$repo&force=1", + Cookie => $cookie, + Referer => 'http://localhost/', + ); ok($response->is_success, "The API enpdoint for triggering jobsets returns 200."); my $data = is_json($response); diff --git a/t/Hydra/Controller/User/ldap-legacy.t b/t/Hydra/Controller/User/ldap-legacy.t index 9cb197c0..19f0c6bf 100644 --- a/t/Hydra/Controller/User/ldap-legacy.t +++ b/t/Hydra/Controller/User/ldap-legacy.t @@ -24,6 +24,7 @@ $ldap->add_group("hydra_create-projects", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"}); +$ldap->add_group("hydra_eval-jobset", $users->{"many_roles"}->{"username"}); my $hydra_ldap_config = "${\$ldap->tmpdir()}/hydra_ldap_config.yaml"; LDAPContext::write_file($hydra_ldap_config, < sub { unrelated => [], admin => ["admin"], not_admin => [], - many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build" ], + many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build", "eval-jobset" ], ); for my $username (keys %users_to_roles) { my $user = $users->{$username}; diff --git a/t/Hydra/Controller/User/ldap.t b/t/Hydra/Controller/User/ldap.t index 175b66aa..050fde23 100644 --- a/t/Hydra/Controller/User/ldap.t +++ b/t/Hydra/Controller/User/ldap.t @@ -24,6 +24,7 @@ $ldap->add_group("hydra_create-projects", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_restart-jobs", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_bump-to-front", $users->{"many_roles"}->{"username"}); $ldap->add_group("hydra_cancel-build", $users->{"many_roles"}->{"username"}); +$ldap->add_group("hydra_eval-jobset", $users->{"many_roles"}->{"username"}); my $ctx = test_context( @@ -76,10 +77,12 @@ my $ctx = test_context( hydra_cancel-build = cancel-build hydra_bump-to-front = bump-to-front hydra_restart-jobs = restart-jobs + hydra_eval-jobset = eval-jobset hydra_one_group_many_roles = create-projects hydra_one_group_many_roles = cancel-build hydra_one_group_many_roles = bump-to-front + hydra_one_group_many-roles = eval-jobset CFG @@ -92,7 +95,7 @@ subtest "Valid login attempts" => sub { unrelated => [], admin => ["admin"], not_admin => [], - many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build" ], + many_roles => [ "create-projects", "restart-jobs", "bump-to-front", "cancel-build", "eval-jobset" ], many_roles_one_group => [ "create-projects", "bump-to-front", "cancel-build" ], ); for my $username (keys %users_to_roles) { From 2dad87ad89ee6e483516c9bc0da21c60977fe024 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 20 Sep 2024 07:50:24 +0200 Subject: [PATCH 25/26] hydra-queue-runner: fix compilation warning instead of converting to double, we can convert to float right away. --- src/hydra-queue-runner/hydra-queue-runner.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hydra-queue-runner/hydra-queue-runner.cc b/src/hydra-queue-runner/hydra-queue-runner.cc index 5ffa7fe6..03b322f8 100644 --- a/src/hydra-queue-runner/hydra-queue-runner.cc +++ b/src/hydra-queue-runner/hydra-queue-runner.cc @@ -164,7 +164,7 @@ void State::parseMachines(const std::string & contents) ? string2Int(tokens[3]).value() : 1, // `speedFactor` - atof(tokens[4].c_str()), + std::stof(tokens[4].c_str()), // `supportedFeatures` std::move(supportedFeatures), // `mandatoryFeatures` From b472f55563285ae9ecfa1322530a31a81cc3d42d Mon Sep 17 00:00:00 2001 From: zowoq <59103226+zowoq@users.noreply.github.com> Date: Sun, 8 Sep 2024 12:15:34 +1000 Subject: [PATCH 26/26] set a default for hydra-compress-logs service follow up from 99ca560d583239b1bc34485f2f719989642fa697 --- nixos-modules/hydra.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nixos-modules/hydra.nix b/nixos-modules/hydra.nix index 97906cb3..d001675d 100644 --- a/nixos-modules/hydra.nix +++ b/nixos-modules/hydra.nix @@ -464,10 +464,12 @@ in '' set -eou pipefail compression=$(sed -nr 's/compress_build_logs_compression = ()/\1/p' ${baseDir}/hydra.conf) - if [[ $compression == zstd ]]; then + if [[ $compression == "" ]]; then + compression="bzip2" + elif [[ $compression == zstd ]]; then compression="zstd --rm" fi - find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r $compression --force --quiet + find ${baseDir}/build-logs -type f -name "*.drv" -mtime +3 -size +0c | xargs -r "$compression" --force --quiet ''; startAt = "Sun 01:45"; };