Compare commits

...

16 Commits

Author SHA1 Message Date
github-merge-queue
83a864607c flake.lock: Update 2026-03-09 00:38:49 +00:00
Janne Heß
a9581bcdc4 Merge pull request #1564 from d-goldin/fix/github-diff-url
fix: Github diffs URL
2026-01-30 19:11:11 +00:00
Dima
26e4d5eb54 fix: Github diffs URL
In https://github.com/NixOS/hydra/pull/1549 diffs were
offloaded to github for performance reasons.

While in some endpoints github accepts `.git` suffixed in the
repository name, in the comparison endpoint this does not seem
to be the case.

Specifically, on the main nixos org hydra this isn't working:

Example job: https://hydra.nixos.org/build/320178054

Generates a comparison link like so:
078d69f039...1cd347bf33

This just stips away the suffix and seems to work fine in local
testing.
2026-01-27 01:45:51 +01:00
John Ericson
8bc95a96f7 Merge pull request #1559 from NixOS/bump-nix
bump to nix v2.33
2026-01-23 23:56:48 +00:00
Amaan Qureshi
82cd5e0e23 Fix build after Nix bump 2026-01-23 18:49:40 -05:00
Jörg Thalheim
c3ed183c64 bump to nix v2.33 2026-01-23 18:49:35 -05:00
John Ericson
b45f0d1fa7 Merge pull request #1556 from Mindavi/bugfix/perlcritic-fixes
treewide: update split calls to make perlcritic happy
2026-01-23 23:22:23 +00:00
Rick van Schijndel
e4fe9d43c1 treewide: update split calls to make perlcritic happy
In nixpkgs this started to fail the hydra tests.
It's not completely clear why because it seems the perlcritic
rule has existed for quite some time.

Anyway, this should solve the issues.
2026-01-17 15:55:29 +01:00
Janne Heß
9df4b65c67 Merge pull request #1558 from NixOS/schema-changes
meson: add missing schema migration
2026-01-14 13:11:42 +00:00
Janne Heß
1d011baed8 Merge pull request #1557 from NixOS/update-flakes
Update flake inputs
2026-01-14 09:19:24 +00:00
github-merge-queue
52b2e4f021 flake.lock: Update 2026-01-14 09:53:02 +01:00
Jörg Thalheim
f089ff87f5 build: automatically include all sql files
To prevent issues as in 43006db8 we can just install all sql files by
default
2026-01-14 09:45:57 +01:00
Jörg Thalheim
43006db835 meson: add missing schema file
This is missing from: https://github.com/NixOS/hydra/pull/1548
2026-01-14 09:39:43 +01:00
Janne Heß
4ebfaba862 Merge pull request #1548 from NixOS/fix/hashlengths
feat: Use short revision from git
2026-01-13 14:34:55 +00:00
Janne Heß
2db62e86e7 feat: Store the short rev length 2026-01-04 19:01:49 +01:00
Janne Heß
d042e3c82c refactor: Revision for the frontend from one place 2026-01-04 18:23:44 +01:00
22 changed files with 96 additions and 150 deletions

22
flake.lock generated
View File

@@ -3,16 +3,16 @@
"nix": {
"flake": false,
"locked": {
"lastModified": 1767367029,
"narHash": "sha256-RsVlkBkxvPPePEo59AVolBFdayxny9FFv8X4aNq9qFc=",
"lastModified": 1772065213,
"narHash": "sha256-DbYpmZAD6aebwxepBop5Ub4S39sLg9UIJziTbeD832k=",
"owner": "NixOS",
"repo": "nix",
"rev": "394a8da9dd9c046de715e451a96b70e64ce4aa7a",
"rev": "0769726d44b0782fecbd7b9749e24320c77af317",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "2.32-maintenance",
"ref": "2.33-maintenance",
"repo": "nix",
"type": "github"
}
@@ -20,27 +20,27 @@
"nix-eval-jobs": {
"flake": false,
"locked": {
"lastModified": 1760478325,
"narHash": "sha256-hA+NOH8KDcsuvH7vJqSwk74PyZP3MtvI/l+CggZcnTc=",
"lastModified": 1767025318,
"narHash": "sha256-i68miKHGdueWggcDAF+Kca9g6S3ipkW629XbMpQYfn0=",
"owner": "nix-community",
"repo": "nix-eval-jobs",
"rev": "daa42f9e9c84aeff1e325dd50fda321f53dfd02c",
"rev": "79dd7adbb5f75b08fb4b9bddd712ebc52baa46bc",
"type": "github"
},
"original": {
"owner": "nix-community",
"ref": "v2.32.1",
"ref": "v2.33.0",
"repo": "nix-eval-jobs",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1767475907,
"narHash": "sha256-w10AFfl20h5MSBCsCegD4xtmcgDlQQeoFsOaIFwHOrE=",
"lastModified": 1772934839,
"narHash": "sha256-6mMYkB7BTTsc4thtCFbh3Aj5yth3EPI6L9L5DR6tpWc=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "7a0d60a03534a2d14b0805616aa1fd403fccfa55",
"rev": "d351a3bce30b8f0d0a36281754b62942977fabe5",
"type": "github"
},
"original": {

View File

@@ -4,13 +4,13 @@
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11-small";
inputs.nix = {
url = "github:NixOS/nix/2.32-maintenance";
url = "github:NixOS/nix/2.33-maintenance";
# We want to control the deps precisely
flake = false;
};
inputs.nix-eval-jobs = {
url = "github:nix-community/nix-eval-jobs/v2.32.1";
url = "github:nix-community/nix-eval-jobs/v2.33.0";
# We want to control the deps precisely
flake = false;
};

View File

@@ -273,7 +273,7 @@ static BuildResult performBuild(
auto drvOutput = DrvOutput { outputHash, outputName };
successP->builtOutputs.insert_or_assign(
std::move(outputName),
Realisation { drvOutput, *outputPath });
Realisation { {.outPath = *outputPath}, drvOutput });
}
}
}

View File

@@ -537,12 +537,12 @@ void State::notifyBuildFinished(pqxx::work & txn, BuildID buildId,
std::shared_ptr<PathLocks> State::acquireGlobalLock()
{
Path lockPath = hydraData + "/queue-runner/lock";
auto lockPath = std::filesystem::path(hydraData) / "queue-runner/lock";
createDirs(dirOf(lockPath));
createDirs(lockPath.parent_path());
auto lock = std::make_shared<PathLocks>();
if (!lock->lockPaths(PathSet({lockPath}), "", false)) return 0;
if (!lock->lockPaths({lockPath}, "", false)) return 0;
return lock;
}

View File

@@ -1,5 +1,6 @@
#include "state.hh"
#include "hydra-build-result.hh"
#include <nix/store/derived-path.hh>
#include <nix/store/globals.hh>
#include <nix/store/parsed-derivations.hh>
#include <nix/util/thread-pool.hh>
@@ -487,24 +488,24 @@ Step::ptr State::createStep(ref<Store> destStore,
it's not runnable yet, and other threads won't make it
runnable while step->created == false. */
step->drv = std::make_unique<Derivation>(localStore->readDerivation(drvPath));
{
try {
step->drvOptions = std::make_unique<DerivationOptions>(
DerivationOptions::fromStructuredAttrs(
step->drv->env,
step->drv->structuredAttrs ? &*step->drv->structuredAttrs : nullptr));
} catch (Error & e) {
e.addTrace({}, "while parsing derivation '%s'", localStore->printStorePath(drvPath));
throw;
}
DerivationOptions<nix::SingleDerivedPath> drvOptions;
try {
drvOptions = derivationOptionsFromStructuredAttrs(
*localStore,
step->drv->inputDrvs,
step->drv->env,
get(step->drv->structuredAttrs));
} catch (Error & e) {
e.addTrace({}, "while parsing derivation '%s'", localStore->printStorePath(drvPath));
throw;
}
step->preferLocalBuild = step->drvOptions->willBuildLocally(*localStore, *step->drv);
step->preferLocalBuild = drvOptions.willBuildLocally(*localStore, *step->drv);
step->isDeterministic = getOr(step->drv->env, "isDetermistic", "0") == "1";
step->systemType = step->drv->platform;
{
StringSet features = step->requiredSystemFeatures = step->drvOptions->getRequiredSystemFeatures(*step->drv);
StringSet features = step->requiredSystemFeatures = drvOptions.getRequiredSystemFeatures(*step->drv);
if (step->preferLocalBuild)
features.insert("local");
if (!features.empty()) {

View File

@@ -172,7 +172,6 @@ struct Step
nix::StorePath drvPath;
std::unique_ptr<nix::Derivation> drv;
std::unique_ptr<nix::DerivationOptions> drvOptions;
nix::StringSet requiredSystemFeatures;
bool preferLocalBuild;
bool isDeterministic;

View File

@@ -106,11 +106,11 @@ sub doEmailLogin {
my $allowed_domains = $c->config->{allowed_domains} // ($c->config->{persona_allowed_domains} // "");
if ($allowed_domains ne "") {
my $email_ok = 0;
my @domains = split ',', $allowed_domains;
my @domains = split /,/, $allowed_domains;
map { $_ =~ s/^\s*(.*?)\s*$/$1/ } @domains;
foreach my $domain (@domains) {
$email_ok = $email_ok || ((split '@', $email)[1] eq $domain);
$email_ok = $email_ok || ((split /@/, $email)[1] eq $domain);
}
error($c, "Your email address does not belong to a domain that is allowed to log in.\n")
unless $email_ok;

View File

@@ -71,7 +71,7 @@ sub buildFinished {
my $to = $build->jobset->emailoverride ne "" ? $build->jobset->emailoverride : $build->maintainers;
foreach my $address (split ",", ($to // "")) {
foreach my $address (split /,/, ($to // "")) {
$address = trim $address;
$addresses{$address} //= { builds => [] };

View File

@@ -38,7 +38,7 @@ sub _parseValue {
$start_options = 2;
}
foreach my $option (@parts[$start_options .. $#parts]) {
(my $key, my $value) = split('=', $option);
(my $key, my $value) = split(/=/, $option);
$options->{$key} = $value;
}
return ($uri, $branch, $deepClone, $options);
@@ -265,7 +265,7 @@ sub getCommits {
my $res = [];
foreach my $line (split /\n/, $out) {
my ($revision, $author, $email, $date) = split "\t", $line;
my ($revision, $author, $email, $date) = split /\t/, $line;
push @$res, { revision => $revision, author => decode("utf-8", $author), email => $email };
}

View File

@@ -31,10 +31,10 @@ sub _iterate {
$pulls->{$pull->{number}} = $pull;
}
# TODO Make Link header parsing more robust!!!
my @links = split ',', ($res->header("Link") // "");
my @links = split /,/, ($res->header("Link") // "");
my $next = "";
foreach my $link (@links) {
my ($url, $rel) = split ";", $link;
my ($url, $rel) = split /;/, $link;
if (trim($rel) eq 'rel="next"') {
$next = substr trim($url), 1, -1;
last;

View File

@@ -83,10 +83,10 @@ sub _iterate {
$refs->{$ref_name} = $ref;
}
# TODO Make Link header parsing more robust!!!
my @links = split ',', $res->header("Link");
my @links = split /,/, $res->header("Link");
my $next = "";
foreach my $link (@links) {
my ($url, $rel) = split ";", $link;
my ($url, $rel) = split /;/, $link;
if (trim($rel) eq 'rel="next"') {
$next = substr trim($url), 1, -1;
last;

View File

@@ -49,10 +49,10 @@ sub _iterate {
$pulls->{$pull->{iid}} = $pull;
}
# TODO Make Link header parsing more robust!!!
my @links = split ',', $res->header("Link");
my @links = split /,/, $res->header("Link");
my $next = "";
foreach my $link (@links) {
my ($url, $rel) = split ";", $link;
my ($url, $rel) = split /;/, $link;
if (trim($rel) eq 'rel="next"') {
$next = substr trim($url), 1, -1;
last;

View File

@@ -126,7 +126,7 @@ sub getCommits {
my $res = [];
foreach my $line (split /\n/, $out) {
if ($line ne "") {
my ($revision, $author, $email) = split "\t", $line;
my ($revision, $author, $email) = split /\t/, $line;
push @$res, { revision => $revision, author => $author, email => $email };
}
}

View File

@@ -85,7 +85,7 @@ sub isBuildEligibleForDynamicRunCommand {
sub configSectionMatches {
my ($name, $project, $jobset, $job) = @_;
my @elems = split ':', $name;
my @elems = split /:/, $name;
die "invalid section name '$name'\n" if scalar(@elems) > 3;

View File

@@ -66,6 +66,11 @@ __PACKAGE__->table("jobsetevalinputs");
data_type: 'text'
is_nullable: 1
=head2 shortRevLength
data_type: 'number'
is_nullable: 1
=head2 value
data_type: 'text'
@@ -102,6 +107,8 @@ __PACKAGE__->add_columns(
{ data_type => "text", is_nullable => 1 },
"revision",
{ data_type => "text", is_nullable => 1 },
"shortRevLength",
{ data_type => "integer", is_nullable => 1 },
"value",
{ data_type => "text", is_nullable => 1 },
"dependency",
@@ -183,4 +190,28 @@ sub json_hint {
return \%hint;
}
# Revision to be rendered by the frontend
sub frontend_revision() {
my ($self) = @_;
my $type = $self->get_column('type');
if ($type eq 'svn' or $type eq 'svn-checkout' or $type eq 'bzr' or $type eq 'bzr-checkout') {
return 'r' . $self->get_column('revision');
} elsif ($type eq 'git') {
# Find the longest revision length of this URI
my $schema = $self->result_source->schema;
my $maxLength = $schema
->resultset('JobsetEvalInputs')
->search({ uri => $self->get_column('uri')})
->get_column('shortRevLength')
->max;
# Fall back to a fixed value if there was no value
return substr($self->get_column('revision'), 0, $maxLength || 12);
} elsif ($type eq 'bzr') {
return substr($self->get_column('revision'), 0, 12);
} else {
return $self->get_column('revision');
}
}
1;

View File

@@ -347,9 +347,10 @@ BLOCK renderDiffUri;
url = res.0;
branch = res.1;
IF bi1.type == "hg" || bi1.type == "git" %]
[% IF url.substr(0, 19) == "https://github.com/" %]
[% IF url.substr(0, 19) == "https://github.com/";
github_url = url.replace('\.git$', '') %]
<a target="_blank" [% HTML.attributes(href =>
url
github_url
_ "/compare/"
_ bi1.revision
_ "..."
@@ -421,7 +422,7 @@ BLOCK renderInputDiff; %]
[% ELSIF bi1.uri == bi2.uri && bi1.revision != bi2.revision %]
[% IF bi1.type == "git" %]
<tr><td>
<b>[% HTML.escape(bi1.name) %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.revision.substr(0, 12) _ ' to ' _ bi2.revision.substr(0, 12)) %]</tt>
<b>[% HTML.escape(bi1.name) %]</b></td><td><tt>[% INCLUDE renderDiffUri contents=(bi1.frontend_revision _ ' to ' _ bi2.frontend_revision) %]</tt>
</td></tr>
[% ELSE %]
<tr><td>
@@ -462,16 +463,10 @@ BLOCK renderPager %]
BLOCK renderShortEvalInput;
IF input.type == "svn" || input.type == "svn-checkout" || input.type == "bzr" || input.type == "bzr-checkout" %]
r[% input.revision %]
[% ELSIF input.type == "git" %]
<tt>[% input.revision.substr(0, 7) | html %]</tt>
[% ELSIF input.type == "hg" %]
<tt>[% input.revision.substr(0, 12) | html %]</tt>
[% ELSIF input.type == "build" || input.type == "sysbuild" %]
IF input.type == "build" || input.type == "sysbuild" %]
<a [% HTML.attributes(href => c.uri_for('/build' input.get_column('dependency'))) %]>[% HTML.escape(input.get_column('dependency')) %]</a>
[% ELSE %]
<tt>[% input.revision | html %]</tt>
<tt>[% input.frontend_revision | html %]</tt>
[% END;
END;

View File

@@ -117,7 +117,7 @@ else
revCount="$(cat "$tmpDir/[% input.name %]/rev-count")"
fi
args+=(--arg '[% input.name %]' "{ outPath = $inputDir; rev = \"[% input.revision %]\"; shortRev = \"[% input.revision.substr(0, 7) %]\"; revCount = $revCount; }")
args+=(--arg '[% input.name %]' "{ outPath = $inputDir; rev = \"[% input.revision %]\"; shortRev = \"[% input.frontend_revision %]\"; revCount = $revCount; }")
[%+ ELSIF input.type == "hg" %]

View File

@@ -891,6 +891,7 @@ sub checkJobsetWrapped {
, type => $input->{type}
, uri => $input->{uri}
, revision => $input->{revision}
, shortRevLength => length($input->{shortRev})
, value => $input->{value}
, dependency => $input->{id}
, path => $input->{storePath} || "" # !!! temporary hack

View File

@@ -487,11 +487,12 @@ create table JobsetEvalInputs (
altNr integer not null,
-- Copied from the jobsetinputs from which the build was created.
type text not null,
uri text,
revision text,
value text,
dependency integer, -- build ID of the input, for type == 'build'
type text not null,
uri text,
revision text,
shortRevLength smallint, -- length of a short revision at the time this was checked out
value text,
dependency integer, -- build ID of the input, for type == 'build'
path text,

View File

@@ -1,90 +1,7 @@
sql_files = files(
'hydra.sql',
'test.sql',
'update-dbix.pl',
'upgrade-2.sql',
'upgrade-3.sql',
'upgrade-4.sql',
'upgrade-5.sql',
'upgrade-6.sql',
'upgrade-7.sql',
'upgrade-8.sql',
'upgrade-9.sql',
'upgrade-10.sql',
'upgrade-11.sql',
'upgrade-12.sql',
'upgrade-13.sql',
'upgrade-14.sql',
'upgrade-15.sql',
'upgrade-16.sql',
'upgrade-17.sql',
'upgrade-18.sql',
'upgrade-19.sql',
'upgrade-20.sql',
'upgrade-21.sql',
'upgrade-22.sql',
'upgrade-23.sql',
'upgrade-24.sql',
'upgrade-25.sql',
'upgrade-26.sql',
'upgrade-27.sql',
'upgrade-28.sql',
'upgrade-29.sql',
'upgrade-30.sql',
'upgrade-31.sql',
'upgrade-32.sql',
'upgrade-33.sql',
'upgrade-34.sql',
'upgrade-35.sql',
'upgrade-36.sql',
'upgrade-37.sql',
'upgrade-38.sql',
'upgrade-39.sql',
'upgrade-40.sql',
'upgrade-41.sql',
'upgrade-42.sql',
'upgrade-43.sql',
'upgrade-44.sql',
'upgrade-45.sql',
'upgrade-46.sql',
'upgrade-47.sql',
'upgrade-48.sql',
'upgrade-49.sql',
'upgrade-50.sql',
'upgrade-51.sql',
'upgrade-52.sql',
'upgrade-53.sql',
'upgrade-54.sql',
'upgrade-55.sql',
'upgrade-56.sql',
'upgrade-57.sql',
'upgrade-58.sql',
'upgrade-59.sql',
'upgrade-60.sql',
'upgrade-61.sql',
'upgrade-62.sql',
'upgrade-63.sql',
'upgrade-64.sql',
'upgrade-65.sql',
'upgrade-66.sql',
'upgrade-67.sql',
'upgrade-68.sql',
'upgrade-69.sql',
'upgrade-70.sql',
'upgrade-71.sql',
'upgrade-72.sql',
'upgrade-73.sql',
'upgrade-74.sql',
'upgrade-75.sql',
'upgrade-76.sql',
'upgrade-77.sql',
'upgrade-78.sql',
'upgrade-79.sql',
'upgrade-80.sql',
'upgrade-81.sql',
'upgrade-82.sql',
'upgrade-83.sql',
'upgrade-84.sql',
# Install all SQL files in this directory.
# This includes hydra.sql, test.sql, update-dbix.pl, and all upgrade-*.sql files.
install_subdir('.',
install_dir: hydra_libexecdir / 'sql',
strip_directory: true,
exclude_files: ['meson.build', 'update-dbix-harness.sh'],
)
install_data(sql_files, install_dir: hydra_libexecdir / 'sql')

1
src/sql/upgrade-85.sql Normal file
View File

@@ -0,0 +1 @@
ALTER TABLE JobsetEvalInputs ADD COLUMN shortRevLength smallint;

View File

@@ -109,7 +109,7 @@ subtest "Build: not substitutable, unsubstitutable" => sub {
subtest "Second notification: step_finished" => sub {
my ($channelName, $pid, $payload) = @{$dbh->func("pg_notifies")};
is($channelName, "step_finished", "The event is for the step finishing");
my ($buildId, $stepNr, $logFile) = split "\t", $payload;
my ($buildId, $stepNr, $logFile) = split /\t/, $payload;
is($buildId, $build->id, "The payload is the build's ID");
is($stepNr, 1, "The payload is the build's step number");
isnt($logFile, undef, "The log file is passed");