1
0
Fork 0
mirror of https://github.com/NixOS/hydra.git synced 2024-10-18 17:02:28 -04:00

Store the inputs of each evaluation in the database

Achtung: this requires a schema upgrade via "hydra-init".
This commit is contained in:
Eelco Dolstra 2012-04-15 18:36:36 +00:00
parent 12dd78d889
commit fd50ac1d4e
6 changed files with 276 additions and 4 deletions

View file

@ -388,6 +388,21 @@ __PACKAGE__->belongs_to(
{},
);
=head2 jobsetevalinputs
Type: has_many
Related object: L<Hydra::Schema::JobsetEvalInputs>
=cut
__PACKAGE__->has_many(
"jobsetevalinputs",
"Hydra::Schema::JobsetEvalInputs",
{ "foreign.dependency" => "self.id" },
{},
);
=head2 jobsetevalmembers
Type: has_many
@ -429,8 +444,8 @@ __PACKAGE__->has_many(
);
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-02-29 18:56:22
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:w16c86FRReLPdA8H0yTIRg
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-04-15 16:38:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:AltTdmkzfwBMYToTkj84vA
__PACKAGE__->has_many(
"dependents",

View file

@ -0,0 +1,152 @@
use utf8;
package Hydra::Schema::JobsetEvalInputs;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
=head1 NAME
Hydra::Schema::JobsetEvalInputs
=cut
use strict;
use warnings;
use base 'DBIx::Class::Core';
=head1 TABLE: C<JobsetEvalInputs>
=cut
__PACKAGE__->table("JobsetEvalInputs");
=head1 ACCESSORS
=head2 eval
data_type: 'integer'
is_foreign_key: 1
is_nullable: 0
=head2 name
data_type: 'text'
is_nullable: 0
=head2 altnr
data_type: 'integer'
is_nullable: 0
=head2 type
data_type: 'text'
is_nullable: 0
=head2 uri
data_type: 'text'
is_nullable: 1
=head2 revision
data_type: 'text'
is_nullable: 1
=head2 value
data_type: 'text'
is_nullable: 1
=head2 dependency
data_type: 'integer'
is_foreign_key: 1
is_nullable: 1
=head2 path
data_type: 'text'
is_nullable: 1
=head2 sha256hash
data_type: 'text'
is_nullable: 1
=cut
__PACKAGE__->add_columns(
"eval",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
"name",
{ data_type => "text", is_nullable => 0 },
"altnr",
{ data_type => "integer", is_nullable => 0 },
"type",
{ data_type => "text", is_nullable => 0 },
"uri",
{ data_type => "text", is_nullable => 1 },
"revision",
{ data_type => "text", is_nullable => 1 },
"value",
{ data_type => "text", is_nullable => 1 },
"dependency",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 1 },
"path",
{ data_type => "text", is_nullable => 1 },
"sha256hash",
{ data_type => "text", is_nullable => 1 },
);
=head1 PRIMARY KEY
=over 4
=item * L</eval>
=item * L</name>
=item * L</altnr>
=back
=cut
__PACKAGE__->set_primary_key("eval", "name", "altnr");
=head1 RELATIONS
=head2 dependency
Type: belongs_to
Related object: L<Hydra::Schema::Builds>
=cut
__PACKAGE__->belongs_to(
"dependency",
"Hydra::Schema::Builds",
{ id => "dependency" },
{ join_type => "LEFT" },
);
=head2 eval
Type: belongs_to
Related object: L<Hydra::Schema::JobsetEvals>
=cut
__PACKAGE__->belongs_to("eval", "Hydra::Schema::JobsetEvals", { id => "eval" }, {});
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-04-15 16:38:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:PNxVBdoUNeUzf5BztiIhLw
# You can replace this text with custom code or comments, and it will be preserved on regeneration
1;

View file

@ -116,6 +116,21 @@ __PACKAGE__->belongs_to(
{},
);
=head2 jobsetevalinputs
Type: has_many
Related object: L<Hydra::Schema::JobsetEvalInputs>
=cut
__PACKAGE__->has_many(
"jobsetevalinputs",
"Hydra::Schema::JobsetEvalInputs",
{ "foreign.eval" => "self.id" },
{},
);
=head2 jobsetevalmembers
Type: has_many
@ -142,8 +157,8 @@ Related object: L<Hydra::Schema::Projects>
__PACKAGE__->belongs_to("project", "Hydra::Schema::Projects", { name => "project" }, {});
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2011-12-05 14:15:43
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:eQtF5bcR/qZ625LxWBc7ug
# Created by DBIx::Class::Schema::Loader v0.07014 @ 2012-04-15 16:38:10
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:Yt39QbkhH52hfpJZ4ZECeg
__PACKAGE__->has_many(
"buildIds",

View file

@ -171,6 +171,24 @@ sub checkJobset {
while (my ($id, $new) = each %buildIds) {
$ev->jobsetevalmembers->create({ build => $id, isnew => $new });
}
foreach my $name (keys %{$inputInfo}) {
for (my $n = 0; $n < scalar(@{$inputInfo->{$name}}); $n++) {
my $input = $inputInfo->{$name}->[$n];
$ev->jobsetevalinputs->create(
{ name => $name
, altnr => $n
, type => $input->{type}
, uri => $input->{uri}
, revision => $input->{revision}
, value => $input->{value}
, dependency => $input->{id}
, path => $input->{storePath} || "" # !!! temporary hack
, sha256hash => $input->{sha256hash}
});
}
}
print STDERR " created new eval ", $ev->id, "\n";
$ev->builds->update({iscurrent => 1});
} else {

View file

@ -443,6 +443,27 @@ create table JobsetEvals (
);
create table JobsetEvalInputs (
eval integer not null references JobsetEvals(id) on delete cascade,
name text not null,
altNr integer not null,
-- Copied from the jobsetinputs from which the build was created.
type text not null,
uri text,
revision text,
value text,
dependency integer, -- build ID of the input, for type == 'build'
path text,
sha256hash text,
primary key (eval, name, altNr),
foreign key (dependency) references Builds(id)
);
create table JobsetEvalMembers (
eval integer not null references JobsetEvals(id) on delete cascade,
build integer not null references Builds(id) on delete cascade,
@ -521,6 +542,7 @@ create index IndexCachedGitInputsOnHash on CachedGitInputs(uri, branch, sha256ha
create index IndexCachedSubversionInputsOnUriRevision on CachedSubversionInputs(uri, revision);
create index IndexCachedBazaarInputsOnUriRevision on CachedBazaarInputs(uri, revision);
create index IndexJobsetEvalMembersOnBuild on JobsetEvalMembers(build);
create index IndexJobsetEvalMembersOnEval on JobsetEvalMembers(eval);
create index IndexJobsetInputAltsOnInput on JobsetInputAlts(project, jobset, input);
create index IndexJobsetInputAltsOnJobset on JobsetInputAlts(project, jobset);
create index IndexProjectsOnEnabled on Projects(enabled);

50
src/sql/upgrade-6.sql Normal file
View file

@ -0,0 +1,50 @@
create index IndexJobsetEvalMembersOnEval on JobsetEvalMembers(eval);
-- Inputs of jobset evals.
create table JobsetEvalInputs (
eval integer not null references JobsetEvals(id) on delete cascade,
name text not null,
altNr integer not null,
-- Copied from the jobsetinputs from which the build was created.
type text not null,
uri text,
revision text,
value text,
dependency integer, -- build ID of the input, for type == 'build'
path text,
sha256hash text,
primary key (eval, name, altNr),
foreign key (dependency) references Builds(id)
);
-- Reconstruct the repository inputs for pre-existing evals. This is
-- tricky (and not entirely possible) because builds are not uniquely
-- part of a single eval, so they may have different inputs.
-- For Subversion or Bazaar inputs, pick the highest revision for each
-- input.
insert into JobsetEvalInputs (eval, name, altNr, type, uri, revision)
select e.id, b.name, 0, max(b.type), max(b.uri), max(b.revision)
from (select id from JobsetEvals where hasNewBuilds = 1) e
join JobsetEvalMembers m on e.id = m.eval
join BuildInputs b on b.build = m.build
where (b.type = 'svn' or b.type = 'svn-checkout' or b.type = 'bzr' or b.type = 'bzr-checkout')
group by e.id, b.name
having count(distinct type) = 1 and count(distinct uri) = 1;
-- For other inputs there is no "best" revision to pick, so only do
-- the conversion if there is only one.
insert into JobsetEvalInputs (eval, name, altNr, type, uri, revision)
select e.id, b.name, 0, max(b.type), max(uri), max(revision)
from (select id from JobsetEvals where hasNewBuilds = 1) e
join JobsetEvalMembers m on e.id = m.eval
join BuildInputs b on b.build = m.build
where (b.type != 'svn' and b.type != 'svn-checkout' and b.type != 'bzr' and b.type != 'bzr-checkout')
and b.uri is not null and b.revision is not null
and not exists(select 1 from JobsetEvalInputs i where e.id = i.eval and b.name = i.name)
group by e.id, b.name
having count(distinct type) = 1 and count(distinct uri) = 1 and count(distinct revision) = 1;