remove useless file
This commit is contained in:
parent
c5b0da7d76
commit
0fbea323b2
1 changed files with 0 additions and 430 deletions
|
@ -1,430 +0,0 @@
|
||||||
diff --git a/doc/manual/src/webhooks.md b/doc/manual/src/webhooks.md
|
|
||||||
index 2b26cd61..674e1064 100644
|
|
||||||
--- a/doc/manual/src/webhooks.md
|
|
||||||
+++ b/doc/manual/src/webhooks.md
|
|
||||||
@@ -1,9 +1,12 @@
|
|
||||||
# Webhooks
|
|
||||||
|
|
||||||
-Hydra can be notified by github's webhook to trigger a new evaluation when a
|
|
||||||
+Hydra can be notified by github or gitea with webhooks to trigger a new evaluation when a
|
|
||||||
jobset has a github repo in its input.
|
|
||||||
-To set up a github webhook go to `https://github.com/<yourhandle>/<yourrepo>/settings` and in the `Webhooks` tab
|
|
||||||
-click on `Add webhook`.
|
|
||||||
+
|
|
||||||
+## GitHub
|
|
||||||
+
|
|
||||||
+To set up a webhook for a GitHub repository go to `https://github.com/<yourhandle>/<yourrepo>/settings`
|
|
||||||
+and in the `Webhooks` tab click on `Add webhook`.
|
|
||||||
|
|
||||||
- In `Payload URL` fill in `https://<your-hydra-domain>/api/push-github`.
|
|
||||||
- In `Content type` switch to `application/json`.
|
|
||||||
@@ -11,3 +14,14 @@ click on `Add webhook`.
|
|
||||||
- For `Which events would you like to trigger this webhook?` keep the default option for events on `Just the push event.`.
|
|
||||||
|
|
||||||
Then add the hook with `Add webhook`.
|
|
||||||
+
|
|
||||||
+## Gitea
|
|
||||||
+
|
|
||||||
+To set up a webhook for a Gitea repository go to the settings of the repository in your Gitea instance
|
|
||||||
+and in the `Webhooks` tab click on `Add Webhook` and choose `Gitea` in the drop down.
|
|
||||||
+
|
|
||||||
+- In `Target URL` fill in `https://<your-hydra-domain>/api/push-gitea`.
|
|
||||||
+- Keep HTTP method `POST`, POST Content Type `application/json` and Trigger On `Push Events`.
|
|
||||||
+- Change the branch filter to match the git branch hydra builds.
|
|
||||||
+
|
|
||||||
+Then add the hook with `Add webhook`.
|
|
||||||
diff --git a/src/hydra-queue-runner/build-remote.cc b/src/hydra-queue-runner/build-remote.cc
|
|
||||||
index 05ab6761..b2b12ac0 100644
|
|
||||||
--- a/src/hydra-queue-runner/build-remote.cc
|
|
||||||
+++ b/src/hydra-queue-runner/build-remote.cc
|
|
||||||
@@ -358,6 +358,7 @@ void State::buildRemote(ref<Store> destStore,
|
|
||||||
copyPaths(*destStore, *localStore, closure, NoRepair, NoCheckSigs, NoSubstitute);
|
|
||||||
} else {
|
|
||||||
copyClosureTo(machine->state->sendLock, *destStore, from, to, step->drv->inputSrcs, true);
|
|
||||||
+ copyClosureTo(machine->state->sendLock, *destStore, from, to, basicDrv.inputSrcs, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto now2 = std::chrono::steady_clock::now();
|
|
||||||
@@ -514,11 +515,6 @@ void State::buildRemote(ref<Store> destStore,
|
|
||||||
infos.insert_or_assign(info.path, info);
|
|
||||||
}
|
|
||||||
|
|
||||||
- if (totalNarSize > maxOutputSize) {
|
|
||||||
- result.stepStatus = bsNarSizeLimitExceeded;
|
|
||||||
- return;
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
/* Copy each path. */
|
|
||||||
printMsg(lvlDebug, "copying outputs of ‘%s’ from ‘%s’ (%d bytes)",
|
|
||||||
localStore->printStorePath(step->drvPath), machine->sshName, totalNarSize);
|
|
||||||
@@ -528,26 +524,27 @@ void State::buildRemote(ref<Store> destStore,
|
|
||||||
for (auto & path : pathsSorted) {
|
|
||||||
auto & info = infos.find(path)->second;
|
|
||||||
|
|
||||||
- /* Receive the NAR from the remote and add it to the
|
|
||||||
- destination store. Meanwhile, extract all the info from the
|
|
||||||
- NAR that getBuildOutput() needs. */
|
|
||||||
- auto source2 = sinkToSource([&](Sink & sink)
|
|
||||||
- {
|
|
||||||
- /* Note: we should only send the command to dump the store
|
|
||||||
- path to the remote if the NAR is actually going to get read
|
|
||||||
- by the destination store, which won't happen if this path
|
|
||||||
- is already valid on the destination store. Since this
|
|
||||||
- lambda function only gets executed if someone tries to read
|
|
||||||
- from source2, we will send the command from here rather
|
|
||||||
- than outside the lambda. */
|
|
||||||
- to << cmdDumpStorePath << localStore->printStorePath(path);
|
|
||||||
- to.flush();
|
|
||||||
-
|
|
||||||
- TeeSource tee(from, sink);
|
|
||||||
- extractNarData(tee, localStore->printStorePath(path), narMembers);
|
|
||||||
- });
|
|
||||||
-
|
|
||||||
- destStore->addToStore(info, *source2, NoRepair, NoCheckSigs);
|
|
||||||
+ for (auto & store : {&*destStore, &*localStore}) {
|
|
||||||
+ /* Receive the NAR from the remote and add it to the
|
|
||||||
+ destination store. Meanwhile, extract all the info from the
|
|
||||||
+ NAR that getBuildOutput() needs. */
|
|
||||||
+ auto source2 = sinkToSource([&](Sink & sink)
|
|
||||||
+ {
|
|
||||||
+ /* Note: we should only send the command to dump the store
|
|
||||||
+ path to the remote if the NAR is actually going to get read
|
|
||||||
+ by the destination store, which won't happen if this path
|
|
||||||
+ is already valid on the destination store. Since this
|
|
||||||
+ lambda function only gets executed if someone tries to read
|
|
||||||
+ from source2, we will send the command from here rather
|
|
||||||
+ than outside the lambda. */
|
|
||||||
+ to << cmdDumpStorePath << localStore->printStorePath(path);
|
|
||||||
+ to.flush();
|
|
||||||
+
|
|
||||||
+ TeeSource tee(from, sink);
|
|
||||||
+ extractNarData(tee, localStore->printStorePath(path), narMembers);
|
|
||||||
+ });
|
|
||||||
+ store->addToStore(info, *source2, NoRepair, NoCheckSigs);
|
|
||||||
+ }
|
|
||||||
}
|
|
||||||
|
|
||||||
auto now2 = std::chrono::steady_clock::now();
|
|
||||||
diff --git a/src/lib/Hydra/Controller/API.pm b/src/lib/Hydra/Controller/API.pm
|
|
||||||
index 6f10ef57..5eeb0c04 100644
|
|
||||||
--- a/src/lib/Hydra/Controller/API.pm
|
|
||||||
+++ b/src/lib/Hydra/Controller/API.pm
|
|
||||||
@@ -285,6 +285,23 @@ sub push_github : Chained('api') PathPart('push-github') Args(0) {
|
|
||||||
$c->response->body("");
|
|
||||||
}
|
|
||||||
|
|
||||||
+sub push_gitea : Chained('api') PathPart('push-gitea') Args(0) {
|
|
||||||
+ my ($self, $c) = @_;
|
|
||||||
+
|
|
||||||
+ $c->{stash}->{json}->{jobsetsTriggered} = [];
|
|
||||||
+
|
|
||||||
+ my $in = $c->request->{data};
|
|
||||||
+ my $url = $in->{repository}->{clone_url} or die;
|
|
||||||
+ $url =~ s/.git$//;
|
|
||||||
+ print STDERR "got push from Gitea repository $url\n";
|
|
||||||
+
|
|
||||||
+ triggerJobset($self, $c, $_, 0) foreach $c->model('DB::Jobsets')->search(
|
|
||||||
+ { 'project.enabled' => 1, 'me.enabled' => 1 },
|
|
||||||
+ { join => 'project'
|
|
||||||
+ , where => \ [ 'me.flake like ? or exists (select 1 from JobsetInputAlts where project = me.project and jobset = me.name and value like ?)', [ 'flake', "%$url%"], [ 'value', "%$url%" ] ]
|
|
||||||
+ });
|
|
||||||
+ $c->response->body("");
|
|
||||||
+}
|
|
||||||
|
|
||||||
|
|
||||||
1;
|
|
||||||
diff --git a/src/lib/Hydra/Controller/Root.pm b/src/lib/Hydra/Controller/Root.pm
|
|
||||||
index c6843d29..1b33db2a 100644
|
|
||||||
--- a/src/lib/Hydra/Controller/Root.pm
|
|
||||||
+++ b/src/lib/Hydra/Controller/Root.pm
|
|
||||||
@@ -32,6 +32,7 @@ sub noLoginNeeded {
|
|
||||||
|
|
||||||
return $whitelisted ||
|
|
||||||
$c->request->path eq "api/push-github" ||
|
|
||||||
+ $c->request->path eq "api/push-gitea" ||
|
|
||||||
$c->request->path eq "google-login" ||
|
|
||||||
$c->request->path eq "github-redirect" ||
|
|
||||||
$c->request->path eq "github-login" ||
|
|
||||||
@@ -77,7 +78,7 @@ sub begin :Private {
|
|
||||||
$_->supportedInputTypes($c->stash->{inputTypes}) foreach @{$c->hydra_plugins};
|
|
||||||
|
|
||||||
# XSRF protection: require POST requests to have the same origin.
|
|
||||||
- if ($c->req->method eq "POST" && $c->req->path ne "api/push-github") {
|
|
||||||
+ if ($c->req->method eq "POST" && $c->req->path ne "api/push-github" && $c->req->path ne "api/push-gitea") {
|
|
||||||
my $referer = $c->req->header('Referer');
|
|
||||||
$referer //= $c->req->header('Origin');
|
|
||||||
my $base = $c->req->base;
|
|
||||||
diff --git a/src/lib/Hydra/Helper/AddBuilds.pm b/src/lib/Hydra/Helper/AddBuilds.pm
|
|
||||||
index 9e3ddfd2..a6373be5 100644
|
|
||||||
--- a/src/lib/Hydra/Helper/AddBuilds.pm
|
|
||||||
+++ b/src/lib/Hydra/Helper/AddBuilds.pm
|
|
||||||
@@ -67,7 +67,7 @@ sub validateDeclarativeJobset {
|
|
||||||
my $enable_dynamic_run_command = defined $update{enable_dynamic_run_command} ? 1 : 0;
|
|
||||||
if ($enable_dynamic_run_command
|
|
||||||
&& !($config->{dynamicruncommand}->{enable}
|
|
||||||
- && $project->{enable_dynamic_run_command}))
|
|
||||||
+ && $project->enable_dynamic_run_command))
|
|
||||||
{
|
|
||||||
die "Dynamic RunCommand is not enabled by the server or the parent project.";
|
|
||||||
}
|
|
||||||
diff --git a/src/lib/Hydra/Plugin/GiteaStatus.pm b/src/lib/Hydra/Plugin/GiteaStatus.pm
|
|
||||||
index 426c93f5..b4346870 100644
|
|
||||||
--- a/src/lib/Hydra/Plugin/GiteaStatus.pm
|
|
||||||
+++ b/src/lib/Hydra/Plugin/GiteaStatus.pm
|
|
||||||
@@ -29,6 +29,53 @@ sub toGiteaState {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
+sub url_from_jobsetevalinputs {
|
|
||||||
+ my ($eval) = @_;
|
|
||||||
+ my $giteastatusInput = $eval->jobsetevalinputs->find({ name => "gitea_status_repo" });
|
|
||||||
+ return undef unless defined $giteastatusInput && defined $giteastatusInput->value;
|
|
||||||
+ my $i = $eval->jobsetevalinputs->find({ name => $giteastatusInput->value, altnr => 0 });
|
|
||||||
+ return undef unless defined $i;
|
|
||||||
+ my $gitea_url = $eval->jobsetevalinputs->find({ name => "gitea_http_url" });
|
|
||||||
+
|
|
||||||
+ my $repoOwner = $eval->jobsetevalinputs->find({ name => "gitea_repo_owner" })->value;
|
|
||||||
+ my $repoName = $eval->jobsetevalinputs->find({ name => "gitea_repo_name" })->value;
|
|
||||||
+
|
|
||||||
+ my $rev = $i->revision;
|
|
||||||
+ my $domain = URI->new($i->uri)->host;
|
|
||||||
+ my $host;
|
|
||||||
+ unless (defined $gitea_url) {
|
|
||||||
+ $host = "https://$domain";
|
|
||||||
+ } else {
|
|
||||||
+ $host = $gitea_url->value;
|
|
||||||
+ }
|
|
||||||
+
|
|
||||||
+ return ("$host/api/v1/repos/$repoOwner/$repoName/statuses/$rev", $repoOwner);
|
|
||||||
+}
|
|
||||||
+sub is_gitea {
|
|
||||||
+ my ($ua, $hostname) = @_;
|
|
||||||
+ my $req = HTTP::Request->new('GET', "https://$hostname/api/swagger");
|
|
||||||
+ my $res = $ua->request($req);
|
|
||||||
+ return 0 unless $res->is_success;
|
|
||||||
+ return index($res->as_string(), "Gitea") != -1;
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+sub try_gitea_from_repo_url {
|
|
||||||
+ my ($ua, $url) = @_;
|
|
||||||
+ if ($url =~ m!git\+https://([^/]+)/([^/]+)/([^/]+)\?.*rev=([[:xdigit:]]{40})$!) {
|
|
||||||
+ return ("https://$1/api/v1/repos/$2/$3/statuses/$4", $2) if is_gitea($ua, $1);
|
|
||||||
+ }
|
|
||||||
+ return undef;
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+sub try_gitea {
|
|
||||||
+ my ($ua, $eval) = @_;
|
|
||||||
+ if (defined $eval->flake) {
|
|
||||||
+ return try_gitea_from_repo_url($ua, $eval->flake);
|
|
||||||
+ }
|
|
||||||
+ return undef;
|
|
||||||
+}
|
|
||||||
+
|
|
||||||
+
|
|
||||||
sub common {
|
|
||||||
my ($self, $topbuild, $dependents, $status) = @_;
|
|
||||||
my $baseurl = $self->{config}->{'base_uri'} || "http://localhost:3000";
|
|
||||||
@@ -52,26 +99,12 @@ sub common {
|
|
||||||
});
|
|
||||||
|
|
||||||
while (my $eval = $evals->next) {
|
|
||||||
- my $giteastatusInput = $eval->jobsetevalinputs->find({ name => "gitea_status_repo" });
|
|
||||||
- next unless defined $giteastatusInput && defined $giteastatusInput->value;
|
|
||||||
- my $i = $eval->jobsetevalinputs->find({ name => $giteastatusInput->value, altnr => 0 });
|
|
||||||
- next unless defined $i;
|
|
||||||
- my $gitea_url = $eval->jobsetevalinputs->find({ name => "gitea_http_url" });
|
|
||||||
-
|
|
||||||
- my $repoOwner = $eval->jobsetevalinputs->find({ name => "gitea_repo_owner" })->value;
|
|
||||||
- my $repoName = $eval->jobsetevalinputs->find({ name => "gitea_repo_name" })->value;
|
|
||||||
- my $accessToken = $self->{config}->{gitea_authorization}->{$repoOwner};
|
|
||||||
-
|
|
||||||
- my $rev = $i->revision;
|
|
||||||
- my $domain = URI->new($i->uri)->host;
|
|
||||||
- my $host;
|
|
||||||
- unless (defined $gitea_url) {
|
|
||||||
- $host = "https://$domain";
|
|
||||||
- } else {
|
|
||||||
- $host = $gitea_url->value;
|
|
||||||
+ my ($url, $repoOwner) = url_from_jobsetevalinputs($eval);
|
|
||||||
+ if (! defined $url) {
|
|
||||||
+ ($url, $repoOwner) = try_gitea($ua, $eval);
|
|
||||||
}
|
|
||||||
-
|
|
||||||
- my $url = "$host/api/v1/repos/$repoOwner/$repoName/statuses/$rev";
|
|
||||||
+ next unless defined $url;
|
|
||||||
+ my $accessToken = $self->{config}->{gitea_authorization}->{$repoOwner};
|
|
||||||
|
|
||||||
print STDERR "GiteaStatus POSTing $state to $url\n";
|
|
||||||
my $req = HTTP::Request->new('POST', $url);
|
|
||||||
diff --git a/t/Helper/AddBuilds/dynamic-disabled.t b/t/Helper/AddBuilds/dynamic-disabled.t
|
|
||||||
index 0507b03e..0c91f382 100644
|
|
||||||
--- a/t/Helper/AddBuilds/dynamic-disabled.t
|
|
||||||
+++ b/t/Helper/AddBuilds/dynamic-disabled.t
|
|
||||||
@@ -6,11 +6,31 @@ use Test2::V0;
|
|
||||||
require Catalyst::Test;
|
|
||||||
use HTTP::Request::Common qw(POST PUT GET DELETE);
|
|
||||||
use JSON::MaybeXS qw(decode_json encode_json);
|
|
||||||
-use Hydra::Helper::AddBuilds qw(validateDeclarativeJobset);
|
|
||||||
-use Hydra::Helper::Nix qw(getHydraConfig);
|
|
||||||
|
|
||||||
my $ctx = test_context();
|
|
||||||
|
|
||||||
+Catalyst::Test->import('Hydra');
|
|
||||||
+
|
|
||||||
+my $db = Hydra::Model::DB->new;
|
|
||||||
+hydra_setup($db);
|
|
||||||
+
|
|
||||||
+my $user = $db->resultset('Users')->create({ username => 'alice', emailaddress => 'root@invalid.org', password => '!' });
|
|
||||||
+$user->setPassword('foobar');
|
|
||||||
+$user->userroles->update_or_create({ role => 'admin' });
|
|
||||||
+
|
|
||||||
+my $project_with_dynamic_run_command = $db->resultset('Projects')->create({
|
|
||||||
+ name => 'tests_with_dynamic_runcommand',
|
|
||||||
+ displayname => 'Tests with dynamic runcommand',
|
|
||||||
+ owner => 'alice',
|
|
||||||
+ enable_dynamic_run_command => 1,
|
|
||||||
+});
|
|
||||||
+my $project_without_dynamic_run_command = $db->resultset('Projects')->create({
|
|
||||||
+ name => 'tests_without_dynamic_runcommand',
|
|
||||||
+ displayname => 'Tests without dynamic runcommand',
|
|
||||||
+ owner => 'alice',
|
|
||||||
+ enable_dynamic_run_command => 0,
|
|
||||||
+});
|
|
||||||
+
|
|
||||||
sub makeJobsetSpec {
|
|
||||||
my ($dynamic) = @_;
|
|
||||||
|
|
||||||
@@ -29,14 +49,16 @@ sub makeJobsetSpec {
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "validate declarative jobset with dynamic RunCommand disabled by server" => sub {
|
|
||||||
- my $config = getHydraConfig();
|
|
||||||
+ my $config = Hydra::Helper::Nix->getHydraConfig();
|
|
||||||
+ require Hydra::Helper::AddBuilds;
|
|
||||||
+ Hydra::Helper::AddBuilds->import( qw(validateDeclarativeJobset) );
|
|
||||||
|
|
||||||
subtest "project enabled dynamic runcommand, declarative jobset enabled dynamic runcommand" => sub {
|
|
||||||
like(
|
|
||||||
dies {
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 1 },
|
|
||||||
+ $project_with_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::true),
|
|
||||||
),
|
|
||||||
@@ -49,7 +71,7 @@ subtest "validate declarative jobset with dynamic RunCommand disabled by server"
|
|
||||||
ok(
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 1 },
|
|
||||||
+ $project_with_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::false)
|
|
||||||
),
|
|
||||||
@@ -61,7 +83,7 @@ subtest "validate declarative jobset with dynamic RunCommand disabled by server"
|
|
||||||
dies {
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 0 },
|
|
||||||
+ $project_without_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::true),
|
|
||||||
),
|
|
||||||
@@ -74,7 +96,7 @@ subtest "validate declarative jobset with dynamic RunCommand disabled by server"
|
|
||||||
ok(
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 0 },
|
|
||||||
+ $project_without_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::false)
|
|
||||||
),
|
|
||||||
diff --git a/t/Helper/AddBuilds/dynamic-enabled.t b/t/Helper/AddBuilds/dynamic-enabled.t
|
|
||||||
index d2f5a386..46497bed 100644
|
|
||||||
--- a/t/Helper/AddBuilds/dynamic-enabled.t
|
|
||||||
+++ b/t/Helper/AddBuilds/dynamic-enabled.t
|
|
||||||
@@ -6,8 +6,6 @@ use Test2::V0;
|
|
||||||
require Catalyst::Test;
|
|
||||||
use HTTP::Request::Common qw(POST PUT GET DELETE);
|
|
||||||
use JSON::MaybeXS qw(decode_json encode_json);
|
|
||||||
-use Hydra::Helper::AddBuilds qw(validateDeclarativeJobset);
|
|
||||||
-use Hydra::Helper::Nix qw(getHydraConfig);
|
|
||||||
|
|
||||||
my $ctx = test_context(
|
|
||||||
hydra_config => q|
|
|
||||||
@@ -17,6 +15,28 @@ my $ctx = test_context(
|
|
||||||
|
|
|
||||||
);
|
|
||||||
|
|
||||||
+Catalyst::Test->import('Hydra');
|
|
||||||
+
|
|
||||||
+my $db = Hydra::Model::DB->new;
|
|
||||||
+hydra_setup($db);
|
|
||||||
+
|
|
||||||
+my $user = $db->resultset('Users')->create({ username => 'alice', emailaddress => 'root@invalid.org', password => '!' });
|
|
||||||
+$user->setPassword('foobar');
|
|
||||||
+$user->userroles->update_or_create({ role => 'admin' });
|
|
||||||
+
|
|
||||||
+my $project_with_dynamic_run_command = $db->resultset('Projects')->create({
|
|
||||||
+ name => 'tests_with_dynamic_runcommand',
|
|
||||||
+ displayname => 'Tests with dynamic runcommand',
|
|
||||||
+ owner => 'alice',
|
|
||||||
+ enable_dynamic_run_command => 1,
|
|
||||||
+});
|
|
||||||
+my $project_without_dynamic_run_command = $db->resultset('Projects')->create({
|
|
||||||
+ name => 'tests_without_dynamic_runcommand',
|
|
||||||
+ displayname => 'Tests without dynamic runcommand',
|
|
||||||
+ owner => 'alice',
|
|
||||||
+ enable_dynamic_run_command => 0,
|
|
||||||
+});
|
|
||||||
+
|
|
||||||
sub makeJobsetSpec {
|
|
||||||
my ($dynamic) = @_;
|
|
||||||
|
|
||||||
@@ -35,13 +55,15 @@ sub makeJobsetSpec {
|
|
||||||
};
|
|
||||||
|
|
||||||
subtest "validate declarative jobset with dynamic RunCommand enabled by server" => sub {
|
|
||||||
- my $config = getHydraConfig();
|
|
||||||
+ my $config = Hydra::Helper::Nix->getHydraConfig();
|
|
||||||
+ require Hydra::Helper::AddBuilds;
|
|
||||||
+ Hydra::Helper::AddBuilds->import( qw(validateDeclarativeJobset) );
|
|
||||||
|
|
||||||
subtest "project enabled dynamic runcommand, declarative jobset enabled dynamic runcommand" => sub {
|
|
||||||
ok(
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 1 },
|
|
||||||
+ $project_with_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::true)
|
|
||||||
),
|
|
||||||
@@ -52,7 +74,7 @@ subtest "validate declarative jobset with dynamic RunCommand enabled by server"
|
|
||||||
ok(
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 1 },
|
|
||||||
+ $project_with_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::false)
|
|
||||||
),
|
|
||||||
@@ -64,7 +86,7 @@ subtest "validate declarative jobset with dynamic RunCommand enabled by server"
|
|
||||||
dies {
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 0 },
|
|
||||||
+ $project_without_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::true),
|
|
||||||
),
|
|
||||||
@@ -77,7 +99,7 @@ subtest "validate declarative jobset with dynamic RunCommand enabled by server"
|
|
||||||
ok(
|
|
||||||
validateDeclarativeJobset(
|
|
||||||
$config,
|
|
||||||
- { enable_dynamic_run_command => 0 },
|
|
||||||
+ $project_without_dynamic_run_command,
|
|
||||||
"test-jobset",
|
|
||||||
makeJobsetSpec(JSON::MaybeXS::false)
|
|
||||||
),
|
|
Loading…
Reference in a new issue