Merge lp:~wgrant/launchpad/flatten-bfj-3-query into lp:launchpad
- flatten-bfj-3-query
- Merge into devel
Status: | Superseded | ||||
---|---|---|---|---|---|
Proposed branch: | lp:~wgrant/launchpad/flatten-bfj-3-query | ||||
Merge into: | lp:launchpad | ||||
Prerequisite: | lp:~wgrant/launchpad/flatten-bfj-2-garbo | ||||
Diff against target: |
1837 lines (+264/-721) 23 files modified
database/sampledata/current-dev.sql (+48/-48) database/sampledata/current.sql (+48/-48) database/schema/security.cfg (+0/-4) lib/lp/buildmaster/interfaces/buildfarmjob.py (+2/-0) lib/lp/buildmaster/interfaces/packagebuild.py (+0/-4) lib/lp/buildmaster/model/buildfarmjob.py (+11/-17) lib/lp/buildmaster/model/packagebuild.py (+4/-8) lib/lp/code/model/sourcepackagerecipe.py (+44/-47) lib/lp/code/model/sourcepackagerecipebuild.py (+5/-7) lib/lp/registry/model/sourcepackage.py (+7/-7) lib/lp/scripts/garbo.py (+0/-227) lib/lp/scripts/tests/test_garbo.py (+1/-136) lib/lp/soyuz/doc/sourcepackagerelease.txt (+1/-3) lib/lp/soyuz/interfaces/binarypackagebuild.py (+0/-7) lib/lp/soyuz/model/archive.py (+27/-47) lib/lp/soyuz/model/binarypackagebuild.py (+41/-58) lib/lp/soyuz/model/buildpackagejob.py (+7/-13) lib/lp/soyuz/model/distributionsourcepackagerelease.py (+1/-5) lib/lp/soyuz/model/publishing.py (+5/-10) lib/lp/soyuz/model/sourcepackagerelease.py (+6/-14) lib/lp/soyuz/tests/test_archive.py (+3/-6) lib/lp/translations/interfaces/translationtemplatesbuild.py (+1/-5) lib/lp/translations/model/translationtemplatesbuild.py (+2/-0) |
||||
To merge this branch: | bzr merge lp:~wgrant/launchpad/flatten-bfj-3-query | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Steve Kowalik (community) | code | Approve | |
Review via email: mp+145543@code.launchpad.net |
This proposal has been superseded by a proposal from 2013-02-01.
Commit message
Description of the change
The build farm job schema is being reworked to improve performance. Columns from PackageBuild and BuildFarmJob are being merged into tables that previously delegated to them. The PackageBuild table will end up dying entirely, but BuildFarmJob will remain, a shadow of its former self, to answer questions about Archive:+builds and Builder:+history. Additionally, BinaryPackageBuild is growing new distribution, distroseries, sourcepackagename and is_distro_archive columns to make searches even faster.
This branch (which can only land once the garbo jobs have completed) changes the app to read values from the new denormed columns, while still writing to the old ones on PB/BFJ during the transition. Most queries involving BuildFarmJob or PackageBuild are now just over BPB/SPRB/TTB.
Preview Diff
1 | === modified file 'database/sampledata/current-dev.sql' | |||
2 | --- database/sampledata/current-dev.sql 2013-01-17 11:57:53 +0000 | |||
3 | +++ database/sampledata/current-dev.sql 2013-02-01 03:49:23 +0000 | |||
4 | @@ -2464,30 +2464,30 @@ | |||
5 | 2464 | 2464 | ||
6 | 2465 | ALTER TABLE buildfarmjob DISABLE TRIGGER ALL; | 2465 | ALTER TABLE buildfarmjob DISABLE TRIGGER ALL; |
7 | 2466 | 2466 | ||
32 | 2467 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0); | 2467 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0, 1); |
33 | 2468 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0); | 2468 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
34 | 2469 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0); | 2469 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0, 1); |
35 | 2470 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0); | 2470 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0, 1); |
36 | 2471 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0); | 2471 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
37 | 2472 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); | 2472 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
38 | 2473 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0); | 2473 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0, 1); |
39 | 2474 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0); | 2474 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0, 1); |
40 | 2475 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0); | 2475 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0, 1); |
41 | 2476 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0); | 2476 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0, 1); |
42 | 2477 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0); | 2477 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0, 1); |
43 | 2478 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0); | 2478 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0, 1); |
44 | 2479 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0); | 2479 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0, 1); |
45 | 2480 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0); | 2480 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0, 1); |
46 | 2481 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0); | 2481 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0, 1); |
47 | 2482 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0); | 2482 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0, 1); |
48 | 2483 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); | 2483 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
49 | 2484 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0); | 2484 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0, 11); |
50 | 2485 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); | 2485 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 9); |
51 | 2486 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0); | 2486 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0, 9); |
52 | 2487 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0); | 2487 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0, 9); |
53 | 2488 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0); | 2488 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0, 9); |
54 | 2489 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0); | 2489 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0, 12); |
55 | 2490 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0); | 2490 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0, 1); |
56 | 2491 | 2491 | ||
57 | 2492 | 2492 | ||
58 | 2493 | ALTER TABLE buildfarmjob ENABLE TRIGGER ALL; | 2493 | ALTER TABLE buildfarmjob ENABLE TRIGGER ALL; |
59 | @@ -2719,30 +2719,30 @@ | |||
60 | 2719 | 2719 | ||
61 | 2720 | ALTER TABLE binarypackagebuild DISABLE TRIGGER ALL; | 2720 | ALTER TABLE binarypackagebuild DISABLE TRIGGER ALL; |
62 | 2721 | 2721 | ||
87 | 2722 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (2, 1, 1, 14); | 2722 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (2, 1, 1, 14, 1, 0, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, NULL, NULL, 0, 2, 1, 1, true, 1); |
88 | 2723 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (6, 2, 1, 32); | 2723 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (6, 2, 1, 32, 1, 0, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 6, 1, 1, true, 23); |
89 | 2724 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (7, 3, 6, 20); | 2724 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (7, 3, 6, 20, 1, 0, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, NULL, NULL, 0, 7, 1, 3, true, 14); |
90 | 2725 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (8, 4, 6, 14); | 2725 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (8, 4, 6, 14, 1, 0, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, NULL, NULL, 0, 8, 1, 3, true, 1); |
91 | 2726 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (9, 5, 1, 20); | 2726 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (9, 5, 1, 20, 1, 0, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 9, 1, 1, true, 14); |
92 | 2727 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (10, 6, 1, 26); | 2727 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (10, 6, 1, 26, 1, 0, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 10, 1, 1, true, 20); |
93 | 2728 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (11, 7, 6, 25); | 2728 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (11, 7, 6, 25, 1, 0, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, NULL, NULL, 0, 11, 1, 3, true, 19); |
94 | 2729 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (12, 8, 6, 27); | 2729 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (12, 8, 6, 27, 1, 0, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, NULL, 'cpp (>= 4:4.0.1-3), gcc-4.0 (>= 4.0.1-2)', 0, 12, 1, 3, true, 21); |
95 | 2730 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (13, 9, 1, 17); | 2730 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (13, 9, 1, 17, 1, 0, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, NULL, NULL, 0, 13, 1, 1, true, 10); |
96 | 2731 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (14, 10, 1, 28); | 2731 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (14, 10, 1, 28, 1, 0, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, NULL, NULL, 0, 14, 1, 1, true, 22); |
97 | 2732 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (15, 11, 1, 29); | 2732 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (15, 11, 1, 29, 1, 0, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, NULL, NULL, 0, 15, 1, 1, true, 17); |
98 | 2733 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (16, 12, 11, 20); | 2733 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (16, 12, 11, 20, 1, 0, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, NULL, NULL, 0, 16, 1, 3, true, 14); |
99 | 2734 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (18, 13, 8, 14); | 2734 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (18, 13, 8, 14, 1, 0, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, NULL, NULL, 0, 18, 1, 10, true, 1); |
100 | 2735 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (19, 14, 8, 20); | 2735 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (19, 14, 8, 20, 1, 0, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 19, 1, 10, true, 14); |
101 | 2736 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (21, 15, 1, 33); | 2736 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (21, 15, 1, 33, 1, 0, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, NULL, NULL, 0, 21, 1, 1, true, 24); |
102 | 2737 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (22, 16, 8, 33); | 2737 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (22, 16, 8, 33, 1, 0, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 91, NULL, 0, 22, 1, 10, true, 24); |
103 | 2738 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (23, 17, 1, 35); | 2738 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (23, 17, 1, 35, 1, 0, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 23, 1, 1, true, 26); |
104 | 2739 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (24, 18, 1, 33); | 2739 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (24, 18, 1, 33, 11, 0, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 24, 1, 1, false, 24); |
105 | 2740 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (25, 19, 1, 35); | 2740 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (25, 19, 1, 35, 9, 0, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 25, 1, 1, false, 26); |
106 | 2741 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (26, 20, 8, 33); | 2741 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (26, 20, 8, 33, 9, 0, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 26, 1, 10, false, 24); |
107 | 2742 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (27, 21, 1, 20); | 2742 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (27, 21, 1, 20, 9, 0, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 27, 1, 1, false, 14); |
108 | 2743 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (28, 22, 12, 14); | 2743 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (28, 22, 12, 14, 9, 0, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, NULL, NULL, 0, 28, 1, 1, false, 1); |
109 | 2744 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (29, 23, 8, 36); | 2744 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (29, 23, 8, 36, 12, 0, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, NULL, NULL, 0, 29, 1, 10, true, 27); |
110 | 2745 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (30, 24, 12, 14); | 2745 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (30, 24, 12, 14, 1, 0, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, NULL, NULL, 0, 30, 1, 1, true, 1); |
111 | 2746 | 2746 | ||
112 | 2747 | 2747 | ||
113 | 2748 | ALTER TABLE binarypackagebuild ENABLE TRIGGER ALL; | 2748 | ALTER TABLE binarypackagebuild ENABLE TRIGGER ALL; |
114 | 2749 | 2749 | ||
115 | === modified file 'database/sampledata/current.sql' | |||
116 | --- database/sampledata/current.sql 2013-01-17 11:57:53 +0000 | |||
117 | +++ database/sampledata/current.sql 2013-02-01 03:49:23 +0000 | |||
118 | @@ -2459,30 +2459,30 @@ | |||
119 | 2459 | 2459 | ||
120 | 2460 | ALTER TABLE buildfarmjob DISABLE TRIGGER ALL; | 2460 | ALTER TABLE buildfarmjob DISABLE TRIGGER ALL; |
121 | 2461 | 2461 | ||
146 | 2462 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0); | 2462 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0, 1); |
147 | 2463 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0); | 2463 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
148 | 2464 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0); | 2464 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0, 1); |
149 | 2465 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0); | 2465 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0, 1); |
150 | 2466 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0); | 2466 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
151 | 2467 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); | 2467 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
152 | 2468 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0); | 2468 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0, 1); |
153 | 2469 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0); | 2469 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0, 1); |
154 | 2470 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0); | 2470 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0, 1); |
155 | 2471 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0); | 2471 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0, 1); |
156 | 2472 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0); | 2472 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0, 1); |
157 | 2473 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0); | 2473 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0, 1); |
158 | 2474 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0); | 2474 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0, 1); |
159 | 2475 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0); | 2475 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0, 1); |
160 | 2476 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0); | 2476 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0, 1); |
161 | 2477 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0); | 2477 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0, 1); |
162 | 2478 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); | 2478 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
163 | 2479 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0); | 2479 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0, 11); |
164 | 2480 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); | 2480 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 9); |
165 | 2481 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0); | 2481 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0, 9); |
166 | 2482 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0); | 2482 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0, 9); |
167 | 2483 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0); | 2483 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0, 9); |
168 | 2484 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0); | 2484 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0, 12); |
169 | 2485 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0); | 2485 | INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0, 1); |
170 | 2486 | 2486 | ||
171 | 2487 | 2487 | ||
172 | 2488 | ALTER TABLE buildfarmjob ENABLE TRIGGER ALL; | 2488 | ALTER TABLE buildfarmjob ENABLE TRIGGER ALL; |
173 | @@ -2656,30 +2656,30 @@ | |||
174 | 2656 | 2656 | ||
175 | 2657 | ALTER TABLE binarypackagebuild DISABLE TRIGGER ALL; | 2657 | ALTER TABLE binarypackagebuild DISABLE TRIGGER ALL; |
176 | 2658 | 2658 | ||
201 | 2659 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (2, 1, 1, 14); | 2659 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (2, 1, 1, 14, 1, 0, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, NULL, NULL, 0, 2, 1, 1, true, 1); |
202 | 2660 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (6, 2, 1, 32); | 2660 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (6, 2, 1, 32, 1, 0, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 6, 1, 1, true, 23); |
203 | 2661 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (7, 3, 6, 20); | 2661 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (7, 3, 6, 20, 1, 0, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, NULL, NULL, 0, 7, 1, 3, true, 14); |
204 | 2662 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (8, 4, 6, 14); | 2662 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (8, 4, 6, 14, 1, 0, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, NULL, NULL, 0, 8, 1, 3, true, 1); |
205 | 2663 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (9, 5, 1, 20); | 2663 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (9, 5, 1, 20, 1, 0, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 9, 1, 1, true, 14); |
206 | 2664 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (10, 6, 1, 26); | 2664 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (10, 6, 1, 26, 1, 0, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 10, 1, 1, true, 20); |
207 | 2665 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (11, 7, 6, 25); | 2665 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (11, 7, 6, 25, 1, 0, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, NULL, NULL, 0, 11, 1, 3, true, 19); |
208 | 2666 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (12, 8, 6, 27); | 2666 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (12, 8, 6, 27, 1, 0, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, NULL, 'cpp (>= 4:4.0.1-3), gcc-4.0 (>= 4.0.1-2)', 0, 12, 1, 3, true, 21); |
209 | 2667 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (13, 9, 1, 17); | 2667 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (13, 9, 1, 17, 1, 0, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, NULL, NULL, 0, 13, 1, 1, true, 10); |
210 | 2668 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (14, 10, 1, 28); | 2668 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (14, 10, 1, 28, 1, 0, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, NULL, NULL, 0, 14, 1, 1, true, 22); |
211 | 2669 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (15, 11, 1, 29); | 2669 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (15, 11, 1, 29, 1, 0, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, NULL, NULL, 0, 15, 1, 1, true, 17); |
212 | 2670 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (16, 12, 11, 20); | 2670 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (16, 12, 11, 20, 1, 0, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, NULL, NULL, 0, 16, 1, 3, true, 14); |
213 | 2671 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (18, 13, 8, 14); | 2671 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (18, 13, 8, 14, 1, 0, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, NULL, NULL, 0, 18, 1, 10, true, 1); |
214 | 2672 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (19, 14, 8, 20); | 2672 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (19, 14, 8, 20, 1, 0, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 19, 1, 10, true, 14); |
215 | 2673 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (21, 15, 1, 33); | 2673 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (21, 15, 1, 33, 1, 0, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, NULL, NULL, 0, 21, 1, 1, true, 24); |
216 | 2674 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (22, 16, 8, 33); | 2674 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (22, 16, 8, 33, 1, 0, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 91, NULL, 0, 22, 1, 10, true, 24); |
217 | 2675 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (23, 17, 1, 35); | 2675 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (23, 17, 1, 35, 1, 0, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 23, 1, 1, true, 26); |
218 | 2676 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (24, 18, 1, 33); | 2676 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (24, 18, 1, 33, 11, 0, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 24, 1, 1, false, 24); |
219 | 2677 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (25, 19, 1, 35); | 2677 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (25, 19, 1, 35, 9, 0, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 25, 1, 1, false, 26); |
220 | 2678 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (26, 20, 8, 33); | 2678 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (26, 20, 8, 33, 9, 0, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 26, 1, 10, false, 24); |
221 | 2679 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (27, 21, 1, 20); | 2679 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (27, 21, 1, 20, 9, 0, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 27, 1, 1, false, 14); |
222 | 2680 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (28, 22, 12, 14); | 2680 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (28, 22, 12, 14, 9, 0, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, NULL, NULL, 0, 28, 1, 1, false, 1); |
223 | 2681 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (29, 23, 8, 36); | 2681 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (29, 23, 8, 36, 12, 0, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, NULL, NULL, 0, 29, 1, 10, true, 27); |
224 | 2682 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (30, 24, 12, 14); | 2682 | INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (30, 24, 12, 14, 1, 0, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, NULL, NULL, 0, 30, 1, 1, true, 1); |
225 | 2683 | 2683 | ||
226 | 2684 | 2684 | ||
227 | 2685 | ALTER TABLE binarypackagebuild ENABLE TRIGGER ALL; | 2685 | ALTER TABLE binarypackagebuild ENABLE TRIGGER ALL; |
228 | 2686 | 2686 | ||
229 | === modified file 'database/schema/security.cfg' | |||
230 | --- database/schema/security.cfg 2013-02-01 03:49:23 +0000 | |||
231 | +++ database/schema/security.cfg 2013-02-01 03:49:23 +0000 | |||
232 | @@ -2218,7 +2218,6 @@ | |||
233 | 2218 | public.answercontact = SELECT, DELETE | 2218 | public.answercontact = SELECT, DELETE |
234 | 2219 | public.branch = SELECT, UPDATE | 2219 | public.branch = SELECT, UPDATE |
235 | 2220 | public.branchjob = SELECT, DELETE | 2220 | public.branchjob = SELECT, DELETE |
236 | 2221 | public.binarypackagebuild = SELECT, UPDATE | ||
237 | 2222 | public.binarypackagename = SELECT | 2221 | public.binarypackagename = SELECT |
238 | 2223 | public.binarypackagerelease = SELECT | 2222 | public.binarypackagerelease = SELECT |
239 | 2224 | public.binarypackagepublishinghistory = SELECT, UPDATE | 2223 | public.binarypackagepublishinghistory = SELECT, UPDATE |
240 | @@ -2241,7 +2240,6 @@ | |||
241 | 2241 | public.bugtaskflat = SELECT | 2240 | public.bugtaskflat = SELECT |
242 | 2242 | public.bugwatch = SELECT, UPDATE | 2241 | public.bugwatch = SELECT, UPDATE |
243 | 2243 | public.bugwatchactivity = SELECT, DELETE | 2242 | public.bugwatchactivity = SELECT, DELETE |
244 | 2244 | public.buildfarmjob = SELECT, UPDATE | ||
245 | 2245 | public.codeimportevent = SELECT, DELETE | 2243 | public.codeimportevent = SELECT, DELETE |
246 | 2246 | public.codeimporteventdata = SELECT, DELETE | 2244 | public.codeimporteventdata = SELECT, DELETE |
247 | 2247 | public.codeimportresult = SELECT, DELETE | 2245 | public.codeimportresult = SELECT, DELETE |
248 | @@ -2265,7 +2263,6 @@ | |||
249 | 2265 | public.revisionauthor = SELECT, UPDATE | 2263 | public.revisionauthor = SELECT, UPDATE |
250 | 2266 | public.revisioncache = SELECT, DELETE | 2264 | public.revisioncache = SELECT, DELETE |
251 | 2267 | public.sourcepackagename = SELECT | 2265 | public.sourcepackagename = SELECT |
252 | 2268 | public.sourcepackagerecipebuild = SELECT, UPDATE | ||
253 | 2269 | public.sourcepackagerelease = SELECT | 2266 | public.sourcepackagerelease = SELECT |
254 | 2270 | public.sourcepackagepublishinghistory = SELECT, UPDATE | 2267 | public.sourcepackagepublishinghistory = SELECT, UPDATE |
255 | 2271 | public.suggestivepotemplate = INSERT, DELETE | 2268 | public.suggestivepotemplate = INSERT, DELETE |
256 | @@ -2273,7 +2270,6 @@ | |||
257 | 2273 | public.teamparticipation = SELECT, DELETE | 2270 | public.teamparticipation = SELECT, DELETE |
258 | 2274 | public.translationmessage = SELECT, DELETE | 2271 | public.translationmessage = SELECT, DELETE |
259 | 2275 | public.translationtemplateitem = SELECT, DELETE | 2272 | public.translationtemplateitem = SELECT, DELETE |
260 | 2276 | public.translationtemplatesbuild = SELECT, UPDATE | ||
261 | 2277 | type=user | 2273 | type=user |
262 | 2278 | 2274 | ||
263 | 2279 | [garbo_daily] | 2275 | [garbo_daily] |
264 | 2280 | 2276 | ||
265 | === modified file 'lib/lp/buildmaster/interfaces/buildfarmjob.py' | |||
266 | --- lib/lp/buildmaster/interfaces/buildfarmjob.py 2013-01-22 08:31:09 +0000 | |||
267 | +++ lib/lp/buildmaster/interfaces/buildfarmjob.py 2013-02-01 03:49:23 +0000 | |||
268 | @@ -179,6 +179,8 @@ | |||
269 | 179 | 179 | ||
270 | 180 | id = Attribute('The build farm job ID.') | 180 | id = Attribute('The build farm job ID.') |
271 | 181 | 181 | ||
272 | 182 | build_farm_job = Attribute('Generic build farm job record') | ||
273 | 183 | |||
274 | 182 | processor = Reference( | 184 | processor = Reference( |
275 | 183 | IProcessor, title=_("Processor"), required=False, readonly=True, | 185 | IProcessor, title=_("Processor"), required=False, readonly=True, |
276 | 184 | description=_( | 186 | description=_( |
277 | 185 | 187 | ||
278 | === modified file 'lib/lp/buildmaster/interfaces/packagebuild.py' | |||
279 | --- lib/lp/buildmaster/interfaces/packagebuild.py 2013-02-01 03:49:23 +0000 | |||
280 | +++ lib/lp/buildmaster/interfaces/packagebuild.py 2013-02-01 03:49:23 +0000 | |||
281 | @@ -72,10 +72,6 @@ | |||
282 | 72 | description=_("A URL for failed upload logs." | 72 | description=_("A URL for failed upload logs." |
283 | 73 | "Will be None if there was no failure."))) | 73 | "Will be None if there was no failure."))) |
284 | 74 | 74 | ||
285 | 75 | build_farm_job = Reference( | ||
286 | 76 | title=_('Build farm job'), schema=IBuildFarmJob, required=True, | ||
287 | 77 | readonly=True, description=_('The base build farm job.')) | ||
288 | 78 | |||
289 | 79 | current_component = Attribute( | 75 | current_component = Attribute( |
290 | 80 | 'Component where the source related to this build was last ' | 76 | 'Component where the source related to this build was last ' |
291 | 81 | 'published.') | 77 | 'published.') |
292 | 82 | 78 | ||
293 | === modified file 'lib/lp/buildmaster/model/buildfarmjob.py' | |||
294 | --- lib/lp/buildmaster/model/buildfarmjob.py 2013-02-01 03:49:23 +0000 | |||
295 | +++ lib/lp/buildmaster/model/buildfarmjob.py 2013-02-01 03:49:23 +0000 | |||
296 | @@ -199,8 +199,6 @@ | |||
297 | 199 | archive_id = Int(name='archive') | 199 | archive_id = Int(name='archive') |
298 | 200 | archive = Reference(archive_id, 'Archive.id') | 200 | archive = Reference(archive_id, 'Archive.id') |
299 | 201 | 201 | ||
300 | 202 | dependencies = None | ||
301 | 203 | |||
302 | 204 | def __init__(self, job_type, status=BuildStatus.NEEDSBUILD, | 202 | def __init__(self, job_type, status=BuildStatus.NEEDSBUILD, |
303 | 205 | processor=None, virtualized=None, date_created=None, | 203 | processor=None, virtualized=None, date_created=None, |
304 | 206 | builder=None, archive=None): | 204 | builder=None, archive=None): |
305 | @@ -227,51 +225,47 @@ | |||
306 | 227 | 225 | ||
307 | 228 | @property | 226 | @property |
308 | 229 | def processor(self): | 227 | def processor(self): |
310 | 230 | return self.build_farm_job.processor | 228 | return self._new_processor |
311 | 231 | 229 | ||
312 | 232 | @property | 230 | @property |
313 | 233 | def virtualized(self): | 231 | def virtualized(self): |
315 | 234 | return self.build_farm_job.virtualized | 232 | return self._new_virtualized |
316 | 235 | 233 | ||
317 | 236 | @property | 234 | @property |
318 | 237 | def date_created(self): | 235 | def date_created(self): |
320 | 238 | return self.build_farm_job.date_created | 236 | return self._new_date_created |
321 | 239 | 237 | ||
322 | 240 | @property | 238 | @property |
323 | 241 | def date_started(self): | 239 | def date_started(self): |
325 | 242 | return self.build_farm_job.date_started | 240 | return self._new_date_started |
326 | 243 | 241 | ||
327 | 244 | @property | 242 | @property |
328 | 245 | def date_finished(self): | 243 | def date_finished(self): |
330 | 246 | return self.build_farm_job.date_finished | 244 | return self._new_date_finished |
331 | 247 | 245 | ||
332 | 248 | @property | 246 | @property |
333 | 249 | def date_first_dispatched(self): | 247 | def date_first_dispatched(self): |
335 | 250 | return self.build_farm_job.date_first_dispatched | 248 | return self._new_date_first_dispatched |
336 | 251 | 249 | ||
337 | 252 | @property | 250 | @property |
338 | 253 | def builder(self): | 251 | def builder(self): |
340 | 254 | return self.build_farm_job.builder | 252 | return self._new_builder |
341 | 255 | 253 | ||
342 | 256 | @property | 254 | @property |
343 | 257 | def status(self): | 255 | def status(self): |
345 | 258 | return self.build_farm_job.status | 256 | return self._new_status |
346 | 259 | 257 | ||
347 | 260 | @property | 258 | @property |
348 | 261 | def log(self): | 259 | def log(self): |
354 | 262 | return self.build_farm_job.log | 260 | return self._new_log |
350 | 263 | |||
351 | 264 | @property | ||
352 | 265 | def job_type(self): | ||
353 | 266 | return self.build_farm_job.job_type | ||
355 | 267 | 261 | ||
356 | 268 | @property | 262 | @property |
357 | 269 | def failure_count(self): | 263 | def failure_count(self): |
359 | 270 | return self.build_farm_job.failure_count | 264 | return self._new_failure_count |
360 | 271 | 265 | ||
361 | 272 | @property | 266 | @property |
362 | 273 | def dependencies(self): | 267 | def dependencies(self): |
364 | 274 | return self.build_farm_job.dependencies | 268 | return None |
365 | 275 | 269 | ||
366 | 276 | @property | 270 | @property |
367 | 277 | def title(self): | 271 | def title(self): |
368 | 278 | 272 | ||
369 | === modified file 'lib/lp/buildmaster/model/packagebuild.py' | |||
370 | --- lib/lp/buildmaster/model/packagebuild.py 2013-02-01 03:49:23 +0000 | |||
371 | +++ lib/lp/buildmaster/model/packagebuild.py 2013-02-01 03:49:23 +0000 | |||
372 | @@ -110,24 +110,20 @@ | |||
373 | 110 | class PackageBuildMixin(BuildFarmJobMixin): | 110 | class PackageBuildMixin(BuildFarmJobMixin): |
374 | 111 | 111 | ||
375 | 112 | @property | 112 | @property |
376 | 113 | def build_farm_job(self): | ||
377 | 114 | return self.package_build.build_farm_job | ||
378 | 115 | |||
379 | 116 | @property | ||
380 | 117 | def archive(self): | 113 | def archive(self): |
382 | 118 | return self.package_build.archive | 114 | return self._new_archive |
383 | 119 | 115 | ||
384 | 120 | @property | 116 | @property |
385 | 121 | def pocket(self): | 117 | def pocket(self): |
387 | 122 | return self.package_build.pocket | 118 | return self._new_pocket |
388 | 123 | 119 | ||
389 | 124 | @property | 120 | @property |
390 | 125 | def upload_log(self): | 121 | def upload_log(self): |
392 | 126 | return self.package_build.upload_log | 122 | return self._new_upload_log |
393 | 127 | 123 | ||
394 | 128 | @property | 124 | @property |
395 | 129 | def dependencies(self): | 125 | def dependencies(self): |
397 | 130 | return self.package_build.dependencies | 126 | return self._new_dependencies |
398 | 131 | 127 | ||
399 | 132 | @property | 128 | @property |
400 | 133 | def current_component(self): | 129 | def current_component(self): |
401 | 134 | 130 | ||
402 | === modified file 'lib/lp/code/model/sourcepackagerecipe.py' | |||
403 | --- lib/lp/code/model/sourcepackagerecipe.py 2012-09-05 05:08:26 +0000 | |||
404 | +++ lib/lp/code/model/sourcepackagerecipe.py 2013-02-01 03:49:23 +0000 | |||
405 | @@ -17,8 +17,7 @@ | |||
406 | 17 | from pytz import utc | 17 | from pytz import utc |
407 | 18 | from storm.expr import ( | 18 | from storm.expr import ( |
408 | 19 | And, | 19 | And, |
411 | 20 | Join, | 20 | LeftJoin, |
410 | 21 | RightJoin, | ||
412 | 22 | ) | 21 | ) |
413 | 23 | from storm.locals import ( | 22 | from storm.locals import ( |
414 | 24 | Bool, | 23 | Bool, |
415 | @@ -37,8 +36,6 @@ | |||
416 | 37 | ) | 36 | ) |
417 | 38 | 37 | ||
418 | 39 | from lp.buildmaster.enums import BuildStatus | 38 | from lp.buildmaster.enums import BuildStatus |
419 | 40 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
420 | 41 | from lp.buildmaster.model.packagebuild import PackageBuild | ||
421 | 42 | from lp.code.errors import ( | 39 | from lp.code.errors import ( |
422 | 43 | BuildAlreadyPending, | 40 | BuildAlreadyPending, |
423 | 44 | BuildNotAllowedForDistro, | 41 | BuildNotAllowedForDistro, |
424 | @@ -215,24 +212,25 @@ | |||
425 | 215 | store.add(sprecipe) | 212 | store.add(sprecipe) |
426 | 216 | return sprecipe | 213 | return sprecipe |
427 | 217 | 214 | ||
430 | 218 | @classmethod | 215 | @staticmethod |
431 | 219 | def findStaleDailyBuilds(cls): | 216 | def findStaleDailyBuilds(): |
432 | 220 | one_day_ago = datetime.now(utc) - timedelta(hours=23, minutes=50) | 217 | one_day_ago = datetime.now(utc) - timedelta(hours=23, minutes=50) |
448 | 221 | joins = RightJoin( | 218 | joins = ( |
449 | 222 | Join( | 219 | SourcePackageRecipe, |
450 | 223 | Join(SourcePackageRecipeBuild, PackageBuild, | 220 | LeftJoin( |
451 | 224 | PackageBuild.id == | 221 | SourcePackageRecipeBuild, |
452 | 225 | SourcePackageRecipeBuild.package_build_id), | 222 | And(SourcePackageRecipeBuild.recipe_id == |
453 | 226 | BuildFarmJob, | 223 | SourcePackageRecipe.id, |
454 | 227 | And(BuildFarmJob.id == PackageBuild.build_farm_job_id, | 224 | SourcePackageRecipeBuild._new_archive_id == |
455 | 228 | BuildFarmJob.date_created > one_day_ago)), | 225 | SourcePackageRecipe.daily_build_archive_id, |
456 | 229 | SourcePackageRecipe, | 226 | SourcePackageRecipeBuild._new_date_created > one_day_ago)), |
457 | 230 | And(SourcePackageRecipeBuild.recipe == SourcePackageRecipe.id, | 227 | ) |
458 | 231 | SourcePackageRecipe.daily_build_archive_id == | 228 | return IStore(SourcePackageRecipe).using(*joins).find( |
459 | 232 | PackageBuild.archive_id)) | 229 | SourcePackageRecipe, |
460 | 233 | return IStore(cls).using(joins).find( | 230 | SourcePackageRecipe.is_stale == True, |
461 | 234 | cls, cls.is_stale == True, cls.build_daily == True, | 231 | SourcePackageRecipe.build_daily == True, |
462 | 235 | BuildFarmJob.date_created == None).config(distinct=True) | 232 | SourcePackageRecipeBuild._new_date_created == None, |
463 | 233 | ).config(distinct=True) | ||
464 | 236 | 234 | ||
465 | 237 | @staticmethod | 235 | @staticmethod |
466 | 238 | def exists(owner, name): | 236 | def exists(owner, name): |
467 | @@ -288,10 +286,8 @@ | |||
468 | 288 | pending = IStore(self).find(SourcePackageRecipeBuild, | 286 | pending = IStore(self).find(SourcePackageRecipeBuild, |
469 | 289 | SourcePackageRecipeBuild.recipe_id == self.id, | 287 | SourcePackageRecipeBuild.recipe_id == self.id, |
470 | 290 | SourcePackageRecipeBuild.distroseries_id == distroseries.id, | 288 | SourcePackageRecipeBuild.distroseries_id == distroseries.id, |
475 | 291 | PackageBuild.archive_id == archive.id, | 289 | SourcePackageRecipeBuild._new_archive_id == archive.id, |
476 | 292 | PackageBuild.id == SourcePackageRecipeBuild.package_build_id, | 290 | SourcePackageRecipeBuild._new_status == BuildStatus.NEEDSBUILD) |
473 | 293 | BuildFarmJob.id == PackageBuild.build_farm_job_id, | ||
474 | 294 | BuildFarmJob.status == BuildStatus.NEEDSBUILD) | ||
477 | 295 | if pending.any() is not None: | 291 | if pending.any() is not None: |
478 | 296 | raise BuildAlreadyPending(self, distroseries) | 292 | raise BuildAlreadyPending(self, distroseries) |
479 | 297 | 293 | ||
480 | @@ -323,39 +319,42 @@ | |||
481 | 323 | @property | 319 | @property |
482 | 324 | def builds(self): | 320 | def builds(self): |
483 | 325 | """See `ISourcePackageRecipe`.""" | 321 | """See `ISourcePackageRecipe`.""" |
488 | 326 | order_by = (Desc(Greatest( | 322 | order_by = ( |
489 | 327 | BuildFarmJob.date_started, | 323 | Desc(Greatest( |
490 | 328 | BuildFarmJob.date_finished)), | 324 | SourcePackageRecipeBuild._new_date_started, |
491 | 329 | Desc(BuildFarmJob.date_created), Desc(BuildFarmJob.id)) | 325 | SourcePackageRecipeBuild._new_date_finished)), |
492 | 326 | Desc(SourcePackageRecipeBuild._new_date_created), | ||
493 | 327 | Desc(SourcePackageRecipeBuild.id)) | ||
494 | 330 | return self._getBuilds(None, order_by) | 328 | return self._getBuilds(None, order_by) |
495 | 331 | 329 | ||
496 | 332 | @property | 330 | @property |
497 | 333 | def completed_builds(self): | 331 | def completed_builds(self): |
498 | 334 | """See `ISourcePackageRecipe`.""" | 332 | """See `ISourcePackageRecipe`.""" |
504 | 335 | filter_term = BuildFarmJob.status != BuildStatus.NEEDSBUILD | 333 | filter_term = ( |
505 | 336 | order_by = (Desc(Greatest( | 334 | SourcePackageRecipeBuild._new_status != BuildStatus.NEEDSBUILD) |
506 | 337 | BuildFarmJob.date_started, | 335 | order_by = ( |
507 | 338 | BuildFarmJob.date_finished)), | 336 | Desc(Greatest( |
508 | 339 | Desc(BuildFarmJob.id)) | 337 | SourcePackageRecipeBuild._new_date_started, |
509 | 338 | SourcePackageRecipeBuild._new_date_finished)), | ||
510 | 339 | Desc(SourcePackageRecipeBuild.id)) | ||
511 | 340 | return self._getBuilds(filter_term, order_by) | 340 | return self._getBuilds(filter_term, order_by) |
512 | 341 | 341 | ||
513 | 342 | @property | 342 | @property |
514 | 343 | def pending_builds(self): | 343 | def pending_builds(self): |
515 | 344 | """See `ISourcePackageRecipe`.""" | 344 | """See `ISourcePackageRecipe`.""" |
517 | 345 | filter_term = BuildFarmJob.status == BuildStatus.NEEDSBUILD | 345 | filter_term = ( |
518 | 346 | SourcePackageRecipeBuild._new_status == BuildStatus.NEEDSBUILD) | ||
519 | 346 | # We want to order by date_created but this is the same as ordering | 347 | # We want to order by date_created but this is the same as ordering |
520 | 347 | # by id (since id increases monotonically) and is less expensive. | 348 | # by id (since id increases monotonically) and is less expensive. |
522 | 348 | order_by = Desc(BuildFarmJob.id) | 349 | order_by = Desc(SourcePackageRecipeBuild.id) |
523 | 349 | return self._getBuilds(filter_term, order_by) | 350 | return self._getBuilds(filter_term, order_by) |
524 | 350 | 351 | ||
525 | 351 | def _getBuilds(self, filter_term, order_by): | 352 | def _getBuilds(self, filter_term, order_by): |
526 | 352 | """The actual query to get the builds.""" | 353 | """The actual query to get the builds.""" |
527 | 353 | query_args = [ | 354 | query_args = [ |
528 | 354 | SourcePackageRecipeBuild.recipe == self, | 355 | SourcePackageRecipeBuild.recipe == self, |
533 | 355 | SourcePackageRecipeBuild.package_build_id == PackageBuild.id, | 356 | SourcePackageRecipeBuild._new_archive_id == Archive.id, |
534 | 356 | PackageBuild.build_farm_job_id == BuildFarmJob.id, | 357 | Archive._enabled == True, |
531 | 357 | And(PackageBuild.archive_id == Archive.id, | ||
532 | 358 | Archive._enabled == True), | ||
535 | 359 | ] | 358 | ] |
536 | 360 | if filter_term is not None: | 359 | if filter_term is not None: |
537 | 361 | query_args.append(filter_term) | 360 | query_args.append(filter_term) |
538 | @@ -378,19 +377,17 @@ | |||
539 | 378 | def last_build(self): | 377 | def last_build(self): |
540 | 379 | """See `ISourcePackageRecipeBuild`.""" | 378 | """See `ISourcePackageRecipeBuild`.""" |
541 | 380 | return self._getBuilds( | 379 | return self._getBuilds( |
543 | 381 | True, Desc(BuildFarmJob.date_finished)).first() | 380 | True, Desc(SourcePackageRecipeBuild._new_date_finished)).first() |
544 | 382 | 381 | ||
545 | 383 | def getMedianBuildDuration(self): | 382 | def getMedianBuildDuration(self): |
546 | 384 | """Return the median duration of builds of this recipe.""" | 383 | """Return the median duration of builds of this recipe.""" |
547 | 385 | store = IStore(self) | 384 | store = IStore(self) |
548 | 386 | result = store.find( | 385 | result = store.find( |
550 | 387 | BuildFarmJob, | 386 | SourcePackageRecipeBuild, |
551 | 388 | SourcePackageRecipeBuild.recipe == self.id, | 387 | SourcePackageRecipeBuild.recipe == self.id, |
557 | 389 | BuildFarmJob.date_finished != None, | 388 | SourcePackageRecipeBuild._new_date_finished != None) |
558 | 390 | BuildFarmJob.id == PackageBuild.build_farm_job_id, | 389 | durations = [ |
559 | 391 | SourcePackageRecipeBuild.package_build_id == PackageBuild.id) | 390 | build.date_finished - build.date_started for build in result] |
555 | 392 | durations = [build.date_finished - build.date_started for build in | ||
556 | 393 | result] | ||
560 | 394 | if len(durations) == 0: | 391 | if len(durations) == 0: |
561 | 395 | return None | 392 | return None |
562 | 396 | durations.sort(reverse=True) | 393 | durations.sort(reverse=True) |
563 | 397 | 394 | ||
564 | === modified file 'lib/lp/code/model/sourcepackagerecipebuild.py' | |||
565 | --- lib/lp/code/model/sourcepackagerecipebuild.py 2013-02-01 03:49:23 +0000 | |||
566 | +++ lib/lp/code/model/sourcepackagerecipebuild.py 2013-02-01 03:49:23 +0000 | |||
567 | @@ -93,6 +93,7 @@ | |||
568 | 93 | package_build = Reference(package_build_id, 'PackageBuild.id') | 93 | package_build = Reference(package_build_id, 'PackageBuild.id') |
569 | 94 | 94 | ||
570 | 95 | build_farm_job_type = BuildFarmJobType.RECIPEBRANCHBUILD | 95 | build_farm_job_type = BuildFarmJobType.RECIPEBRANCHBUILD |
571 | 96 | job_type = build_farm_job_type | ||
572 | 96 | 97 | ||
573 | 97 | id = Int(primary=True) | 98 | id = Int(primary=True) |
574 | 98 | 99 | ||
575 | @@ -150,8 +151,8 @@ | |||
576 | 150 | requester = Reference(requester_id, 'Person.id') | 151 | requester = Reference(requester_id, 'Person.id') |
577 | 151 | 152 | ||
578 | 152 | # Migrating from PackageBuild | 153 | # Migrating from PackageBuild |
581 | 153 | _new_build_farm_job_id = Int(name='build_farm_job') | 154 | build_farm_job_id = Int(name='build_farm_job') |
582 | 154 | _new_build_farm_job = Reference(_new_build_farm_job_id, BuildFarmJob.id) | 155 | build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id) |
583 | 155 | 156 | ||
584 | 156 | _new_archive_id = Int(name='archive') | 157 | _new_archive_id = Int(name='archive') |
585 | 157 | _new_archive = Reference(_new_archive_id, 'Archive.id') | 158 | _new_archive = Reference(_new_archive_id, 'Archive.id') |
586 | @@ -216,7 +217,7 @@ | |||
587 | 216 | requester, archive, pocket, date_created): | 217 | requester, archive, pocket, date_created): |
588 | 217 | """Construct a SourcePackageRecipeBuild.""" | 218 | """Construct a SourcePackageRecipeBuild.""" |
589 | 218 | super(SourcePackageRecipeBuild, self).__init__() | 219 | super(SourcePackageRecipeBuild, self).__init__() |
591 | 219 | self._new_build_farm_job = build_farm_job | 220 | self.build_farm_job = build_farm_job |
592 | 220 | self.package_build = package_build | 221 | self.package_build = package_build |
593 | 221 | self.distroseries = distroseries | 222 | self.distroseries = distroseries |
594 | 222 | self.recipe = recipe | 223 | self.recipe = recipe |
595 | @@ -365,16 +366,13 @@ | |||
596 | 365 | 366 | ||
597 | 366 | @classmethod | 367 | @classmethod |
598 | 367 | def getRecentBuilds(cls, requester, recipe, distroseries, _now=None): | 368 | def getRecentBuilds(cls, requester, recipe, distroseries, _now=None): |
599 | 368 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
600 | 369 | if _now is None: | 369 | if _now is None: |
601 | 370 | _now = datetime.now(pytz.UTC) | 370 | _now = datetime.now(pytz.UTC) |
602 | 371 | store = IMasterStore(SourcePackageRecipeBuild) | 371 | store = IMasterStore(SourcePackageRecipeBuild) |
603 | 372 | old_threshold = _now - timedelta(days=1) | 372 | old_threshold = _now - timedelta(days=1) |
604 | 373 | return store.find(cls, cls.distroseries_id == distroseries.id, | 373 | return store.find(cls, cls.distroseries_id == distroseries.id, |
605 | 374 | cls.requester_id == requester.id, cls.recipe_id == recipe.id, | 374 | cls.requester_id == requester.id, cls.recipe_id == recipe.id, |
609 | 375 | BuildFarmJob.date_created > old_threshold, | 375 | cls._new_date_created > old_threshold) |
607 | 376 | BuildFarmJob.id == PackageBuild.build_farm_job_id, | ||
608 | 377 | PackageBuild.id == cls.package_build_id) | ||
610 | 378 | 376 | ||
611 | 379 | def makeJob(self): | 377 | def makeJob(self): |
612 | 380 | """See `ISourcePackageRecipeBuildJob`.""" | 378 | """See `ISourcePackageRecipeBuildJob`.""" |
613 | 381 | 379 | ||
614 | === modified file 'lib/lp/registry/model/sourcepackage.py' | |||
615 | --- lib/lp/registry/model/sourcepackage.py 2012-11-26 08:33:03 +0000 | |||
616 | +++ lib/lp/registry/model/sourcepackage.py 2013-02-01 03:49:23 +0000 | |||
617 | @@ -601,8 +601,8 @@ | |||
618 | 601 | # binary_only parameter as a source package can only have | 601 | # binary_only parameter as a source package can only have |
619 | 602 | # binary builds. | 602 | # binary builds. |
620 | 603 | 603 | ||
623 | 604 | clauseTables = ['SourcePackageRelease', 'PackageBuild', | 604 | clauseTables = [ |
624 | 605 | 'SourcePackagePublishingHistory'] | 605 | 'SourcePackageRelease', 'SourcePackagePublishingHistory'] |
625 | 606 | 606 | ||
626 | 607 | condition_clauses = [""" | 607 | condition_clauses = [""" |
627 | 608 | BinaryPackageBuild.source_package_release = | 608 | BinaryPackageBuild.source_package_release = |
628 | @@ -612,7 +612,7 @@ | |||
629 | 612 | SourcePackagePublishingHistory.archive IN %s AND | 612 | SourcePackagePublishingHistory.archive IN %s AND |
630 | 613 | SourcePackagePublishingHistory.sourcepackagerelease = | 613 | SourcePackagePublishingHistory.sourcepackagerelease = |
631 | 614 | SourcePackageRelease.id AND | 614 | SourcePackageRelease.id AND |
633 | 615 | SourcePackagePublishingHistory.archive = PackageBuild.archive | 615 | SourcePackagePublishingHistory.archive = BinaryPackageBuild.archive |
634 | 616 | """ % sqlvalues(self.sourcepackagename, | 616 | """ % sqlvalues(self.sourcepackagename, |
635 | 617 | self.distroseries, | 617 | self.distroseries, |
636 | 618 | list(self.distribution.all_distro_archive_ids))] | 618 | list(self.distribution.all_distro_archive_ids))] |
637 | @@ -627,8 +627,8 @@ | |||
638 | 627 | # exclude gina-generated and security (dak-made) builds | 627 | # exclude gina-generated and security (dak-made) builds |
639 | 628 | # buildstate == FULLYBUILT && datebuilt == null | 628 | # buildstate == FULLYBUILT && datebuilt == null |
640 | 629 | condition_clauses.append( | 629 | condition_clauses.append( |
643 | 630 | "NOT (BuildFarmJob.status=%s AND " | 630 | "NOT (BinaryPackageBuild.status=%s AND " |
644 | 631 | " BuildFarmJob.date_finished is NULL)" | 631 | " BinaryPackageBuild.date_finished is NULL)" |
645 | 632 | % sqlvalues(BuildStatus.FULLYBUILT)) | 632 | % sqlvalues(BuildStatus.FULLYBUILT)) |
646 | 633 | 633 | ||
647 | 634 | # Ordering according status | 634 | # Ordering according status |
648 | @@ -648,9 +648,9 @@ | |||
649 | 648 | clauseTables.append('BuildQueue') | 648 | clauseTables.append('BuildQueue') |
650 | 649 | condition_clauses.append('BuildQueue.job = BuildPackageJob.job') | 649 | condition_clauses.append('BuildQueue.job = BuildPackageJob.job') |
651 | 650 | elif build_state == BuildStatus.SUPERSEDED or build_state is None: | 650 | elif build_state == BuildStatus.SUPERSEDED or build_state is None: |
653 | 651 | orderBy = [Desc("BuildFarmJob.date_created")] | 651 | orderBy = [Desc("BinaryPackageBuild.date_created")] |
654 | 652 | else: | 652 | else: |
656 | 653 | orderBy = [Desc("BuildFarmJob.date_finished")] | 653 | orderBy = [Desc("BinaryPackageBuild.date_finished")] |
657 | 654 | 654 | ||
658 | 655 | # Fallback to ordering by -id as a tie-breaker. | 655 | # Fallback to ordering by -id as a tie-breaker. |
659 | 656 | orderBy.append(Desc("id")) | 656 | orderBy.append(Desc("id")) |
660 | 657 | 657 | ||
661 | === modified file 'lib/lp/scripts/garbo.py' | |||
662 | --- lib/lp/scripts/garbo.py 2013-02-01 03:49:23 +0000 | |||
663 | +++ lib/lp/scripts/garbo.py 2013-01-17 00:25:48 +0000 | |||
664 | @@ -57,8 +57,6 @@ | |||
665 | 57 | BugWatchScheduler, | 57 | BugWatchScheduler, |
666 | 58 | MAX_SAMPLE_SIZE, | 58 | MAX_SAMPLE_SIZE, |
667 | 59 | ) | 59 | ) |
668 | 60 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
669 | 61 | from lp.buildmaster.model.packagebuild import PackageBuild | ||
670 | 62 | from lp.code.interfaces.revision import IRevisionSet | 60 | from lp.code.interfaces.revision import IRevisionSet |
671 | 63 | from lp.code.model.codeimportevent import CodeImportEvent | 61 | from lp.code.model.codeimportevent import CodeImportEvent |
672 | 64 | from lp.code.model.codeimportresult import CodeImportResult | 62 | from lp.code.model.codeimportresult import CodeImportResult |
673 | @@ -66,10 +64,8 @@ | |||
674 | 66 | RevisionAuthor, | 64 | RevisionAuthor, |
675 | 67 | RevisionCache, | 65 | RevisionCache, |
676 | 68 | ) | 66 | ) |
677 | 69 | from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild | ||
678 | 70 | from lp.hardwaredb.model.hwdb import HWSubmission | 67 | from lp.hardwaredb.model.hwdb import HWSubmission |
679 | 71 | from lp.registry.model.commercialsubscription import CommercialSubscription | 68 | from lp.registry.model.commercialsubscription import CommercialSubscription |
680 | 72 | from lp.registry.model.distroseries import DistroSeries | ||
681 | 73 | from lp.registry.model.person import Person | 69 | from lp.registry.model.person import Person |
682 | 74 | from lp.registry.model.product import Product | 70 | from lp.registry.model.product import Product |
683 | 75 | from lp.registry.model.teammembership import TeamMembership | 71 | from lp.registry.model.teammembership import TeamMembership |
684 | @@ -108,7 +104,6 @@ | |||
685 | 108 | from lp.services.librarian.model import TimeLimitedToken | 104 | from lp.services.librarian.model import TimeLimitedToken |
686 | 109 | from lp.services.log.logger import PrefixFilter | 105 | from lp.services.log.logger import PrefixFilter |
687 | 110 | from lp.services.looptuner import TunableLoop | 106 | from lp.services.looptuner import TunableLoop |
688 | 111 | from lp.services.memcache.interfaces import IMemcacheClient | ||
689 | 112 | from lp.services.oauth.model import OAuthNonce | 107 | from lp.services.oauth.model import OAuthNonce |
690 | 113 | from lp.services.openid.model.openidconsumer import OpenIDConsumerNonce | 108 | from lp.services.openid.model.openidconsumer import OpenIDConsumerNonce |
691 | 114 | from lp.services.propertycache import cachedproperty | 109 | from lp.services.propertycache import cachedproperty |
692 | @@ -123,10 +118,7 @@ | |||
693 | 123 | ) | 118 | ) |
694 | 124 | from lp.services.session.model import SessionData | 119 | from lp.services.session.model import SessionData |
695 | 125 | from lp.services.verification.model.logintoken import LoginToken | 120 | from lp.services.verification.model.logintoken import LoginToken |
696 | 126 | from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES | ||
697 | 127 | from lp.soyuz.model.archive import Archive | 121 | from lp.soyuz.model.archive import Archive |
698 | 128 | from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild | ||
699 | 129 | from lp.soyuz.model.distroarchseries import DistroArchSeries | ||
700 | 130 | from lp.soyuz.model.publishing import SourcePackagePublishingHistory | 122 | from lp.soyuz.model.publishing import SourcePackagePublishingHistory |
701 | 131 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache | 123 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache |
702 | 132 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease | 124 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease |
703 | @@ -137,9 +129,6 @@ | |||
704 | 137 | from lp.translations.model.translationtemplateitem import ( | 129 | from lp.translations.model.translationtemplateitem import ( |
705 | 138 | TranslationTemplateItem, | 130 | TranslationTemplateItem, |
706 | 139 | ) | 131 | ) |
707 | 140 | from lp.translations.model.translationtemplatesbuild import ( | ||
708 | 141 | TranslationTemplatesBuild, | ||
709 | 142 | ) | ||
710 | 143 | from lp.translations.scripts.scrub_pofiletranslator import ( | 132 | from lp.translations.scripts.scrub_pofiletranslator import ( |
711 | 144 | ScrubPOFileTranslator, | 133 | ScrubPOFileTranslator, |
712 | 145 | ) | 134 | ) |
713 | @@ -1346,219 +1335,6 @@ | |||
714 | 1346 | transaction.commit() | 1335 | transaction.commit() |
715 | 1347 | 1336 | ||
716 | 1348 | 1337 | ||
717 | 1349 | class BinaryPackageBuildFlattener(TunableLoop): | ||
718 | 1350 | """Populates the new denormalised columns on BinaryPackageBuild.""" | ||
719 | 1351 | |||
720 | 1352 | maximum_chunk_size = 5000 | ||
721 | 1353 | |||
722 | 1354 | def __init__(self, log, abort_time=None): | ||
723 | 1355 | super(BinaryPackageBuildFlattener, self).__init__(log, abort_time) | ||
724 | 1356 | |||
725 | 1357 | self.memcache_key = '%s:bpb-flattener' % config.instance_name | ||
726 | 1358 | watermark = getUtility(IMemcacheClient).get(self.memcache_key) | ||
727 | 1359 | self.start_at = watermark or 0 | ||
728 | 1360 | self.store = IMasterStore(BinaryPackageBuild) | ||
729 | 1361 | |||
730 | 1362 | def findIDs(self): | ||
731 | 1363 | return self.store.find( | ||
732 | 1364 | BinaryPackageBuild.id, | ||
733 | 1365 | BinaryPackageBuild.id >= self.start_at, | ||
734 | 1366 | ).order_by(BinaryPackageBuild.id) | ||
735 | 1367 | |||
736 | 1368 | def isDone(self): | ||
737 | 1369 | return ( | ||
738 | 1370 | not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled') | ||
739 | 1371 | or self.findIDs().is_empty()) | ||
740 | 1372 | |||
741 | 1373 | def __call__(self, chunk_size): | ||
742 | 1374 | """See `ITunableLoop`.""" | ||
743 | 1375 | ids = list(self.findIDs()[:chunk_size]) | ||
744 | 1376 | updated_columns = { | ||
745 | 1377 | BinaryPackageBuild._new_archive_id: PackageBuild.archive_id, | ||
746 | 1378 | BinaryPackageBuild._new_pocket: PackageBuild.pocket, | ||
747 | 1379 | BinaryPackageBuild._new_processor_id: BuildFarmJob.processor_id, | ||
748 | 1380 | BinaryPackageBuild._new_virtualized: BuildFarmJob.virtualized, | ||
749 | 1381 | BinaryPackageBuild._new_date_created: BuildFarmJob.date_created, | ||
750 | 1382 | BinaryPackageBuild._new_date_started: BuildFarmJob.date_started, | ||
751 | 1383 | BinaryPackageBuild._new_date_finished: BuildFarmJob.date_finished, | ||
752 | 1384 | BinaryPackageBuild._new_date_first_dispatched: | ||
753 | 1385 | BuildFarmJob.date_first_dispatched, | ||
754 | 1386 | BinaryPackageBuild._new_builder_id: BuildFarmJob.builder_id, | ||
755 | 1387 | BinaryPackageBuild._new_status: BuildFarmJob.status, | ||
756 | 1388 | BinaryPackageBuild._new_log_id: BuildFarmJob.log_id, | ||
757 | 1389 | BinaryPackageBuild._new_upload_log_id: PackageBuild.upload_log_id, | ||
758 | 1390 | BinaryPackageBuild._new_dependencies: PackageBuild.dependencies, | ||
759 | 1391 | BinaryPackageBuild._new_failure_count: BuildFarmJob.failure_count, | ||
760 | 1392 | BinaryPackageBuild._new_build_farm_job_id: BuildFarmJob.id, | ||
761 | 1393 | BinaryPackageBuild._new_distribution_id: | ||
762 | 1394 | DistroSeries.distributionID, | ||
763 | 1395 | BinaryPackageBuild._new_distro_series_id: DistroSeries.id, | ||
764 | 1396 | BinaryPackageBuild._new_source_package_name_id: | ||
765 | 1397 | SourcePackageRelease.sourcepackagenameID, | ||
766 | 1398 | BinaryPackageBuild._new_is_distro_archive: | ||
767 | 1399 | Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES), | ||
768 | 1400 | } | ||
769 | 1401 | condition = And( | ||
770 | 1402 | BinaryPackageBuild.id.is_in(ids), | ||
771 | 1403 | PackageBuild.id == BinaryPackageBuild.package_build_id, | ||
772 | 1404 | BuildFarmJob.id == PackageBuild.build_farm_job_id) | ||
773 | 1405 | extra_condition = And( | ||
774 | 1406 | condition, | ||
775 | 1407 | SourcePackageRelease.id == | ||
776 | 1408 | BinaryPackageBuild.source_package_release_id, | ||
777 | 1409 | Archive.id == PackageBuild.archive_id, | ||
778 | 1410 | DistroArchSeries.id == BinaryPackageBuild.distro_arch_series_id, | ||
779 | 1411 | DistroSeries.id == DistroArchSeries.distroseriesID) | ||
780 | 1412 | self.store.execute( | ||
781 | 1413 | BulkUpdate( | ||
782 | 1414 | updated_columns, table=BinaryPackageBuild, | ||
783 | 1415 | values=( | ||
784 | 1416 | PackageBuild, BuildFarmJob, Archive, DistroArchSeries, | ||
785 | 1417 | DistroSeries, SourcePackageRelease), | ||
786 | 1418 | where=And(condition, extra_condition))) | ||
787 | 1419 | self.store.execute( | ||
788 | 1420 | BulkUpdate( | ||
789 | 1421 | {BuildFarmJob.archive_id: PackageBuild.archive_id}, | ||
790 | 1422 | table=BuildFarmJob, values=(PackageBuild, BinaryPackageBuild), | ||
791 | 1423 | where=condition)) | ||
792 | 1424 | transaction.commit() | ||
793 | 1425 | self.start_at = ids[-1] + 1 | ||
794 | 1426 | getUtility(IMemcacheClient).set(self.memcache_key, self.start_at) | ||
795 | 1427 | |||
796 | 1428 | |||
797 | 1429 | class SourcePackageRecipeBuildFlattener(TunableLoop): | ||
798 | 1430 | """Populates the new denormalised columns on SourcePackageRecipeBuild.""" | ||
799 | 1431 | |||
800 | 1432 | maximum_chunk_size = 5000 | ||
801 | 1433 | |||
802 | 1434 | def __init__(self, log, abort_time=None): | ||
803 | 1435 | super(SourcePackageRecipeBuildFlattener, self).__init__( | ||
804 | 1436 | log, abort_time) | ||
805 | 1437 | |||
806 | 1438 | self.memcache_key = '%s:sprb-flattener' % config.instance_name | ||
807 | 1439 | watermark = getUtility(IMemcacheClient).get(self.memcache_key) | ||
808 | 1440 | self.start_at = watermark or 0 | ||
809 | 1441 | self.store = IMasterStore(SourcePackageRecipeBuild) | ||
810 | 1442 | |||
811 | 1443 | def findIDs(self): | ||
812 | 1444 | return self.store.find( | ||
813 | 1445 | SourcePackageRecipeBuild.id, | ||
814 | 1446 | SourcePackageRecipeBuild.id >= self.start_at, | ||
815 | 1447 | ).order_by(SourcePackageRecipeBuild.id) | ||
816 | 1448 | |||
817 | 1449 | def isDone(self): | ||
818 | 1450 | return ( | ||
819 | 1451 | not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled') | ||
820 | 1452 | or self.findIDs().is_empty()) | ||
821 | 1453 | |||
822 | 1454 | def __call__(self, chunk_size): | ||
823 | 1455 | """See `ITunableLoop`.""" | ||
824 | 1456 | ids = list(self.findIDs()[:chunk_size]) | ||
825 | 1457 | updated_columns = { | ||
826 | 1458 | SourcePackageRecipeBuild._new_archive_id: PackageBuild.archive_id, | ||
827 | 1459 | SourcePackageRecipeBuild._new_pocket: PackageBuild.pocket, | ||
828 | 1460 | SourcePackageRecipeBuild._new_processor_id: | ||
829 | 1461 | BuildFarmJob.processor_id, | ||
830 | 1462 | SourcePackageRecipeBuild._new_virtualized: | ||
831 | 1463 | BuildFarmJob.virtualized, | ||
832 | 1464 | SourcePackageRecipeBuild._new_date_created: | ||
833 | 1465 | BuildFarmJob.date_created, | ||
834 | 1466 | SourcePackageRecipeBuild._new_date_started: | ||
835 | 1467 | BuildFarmJob.date_started, | ||
836 | 1468 | SourcePackageRecipeBuild._new_date_finished: | ||
837 | 1469 | BuildFarmJob.date_finished, | ||
838 | 1470 | SourcePackageRecipeBuild._new_date_first_dispatched: | ||
839 | 1471 | BuildFarmJob.date_first_dispatched, | ||
840 | 1472 | SourcePackageRecipeBuild._new_builder_id: BuildFarmJob.builder_id, | ||
841 | 1473 | SourcePackageRecipeBuild._new_status: BuildFarmJob.status, | ||
842 | 1474 | SourcePackageRecipeBuild._new_log_id: BuildFarmJob.log_id, | ||
843 | 1475 | SourcePackageRecipeBuild._new_upload_log_id: | ||
844 | 1476 | PackageBuild.upload_log_id, | ||
845 | 1477 | SourcePackageRecipeBuild._new_dependencies: | ||
846 | 1478 | PackageBuild.dependencies, | ||
847 | 1479 | SourcePackageRecipeBuild._new_failure_count: | ||
848 | 1480 | BuildFarmJob.failure_count, | ||
849 | 1481 | SourcePackageRecipeBuild._new_build_farm_job_id: BuildFarmJob.id, | ||
850 | 1482 | } | ||
851 | 1483 | condition = And( | ||
852 | 1484 | SourcePackageRecipeBuild.id.is_in(ids), | ||
853 | 1485 | PackageBuild.id == SourcePackageRecipeBuild.package_build_id, | ||
854 | 1486 | BuildFarmJob.id == PackageBuild.build_farm_job_id) | ||
855 | 1487 | self.store.execute( | ||
856 | 1488 | BulkUpdate( | ||
857 | 1489 | updated_columns, table=SourcePackageRecipeBuild, | ||
858 | 1490 | values=(PackageBuild, BuildFarmJob), where=condition)) | ||
859 | 1491 | self.store.execute( | ||
860 | 1492 | BulkUpdate( | ||
861 | 1493 | {BuildFarmJob.archive_id: PackageBuild.archive_id}, | ||
862 | 1494 | table=BuildFarmJob, | ||
863 | 1495 | values=(PackageBuild, SourcePackageRecipeBuild), | ||
864 | 1496 | where=condition)) | ||
865 | 1497 | transaction.commit() | ||
866 | 1498 | self.start_at = ids[-1] + 1 | ||
867 | 1499 | getUtility(IMemcacheClient).set(self.memcache_key, self.start_at) | ||
868 | 1500 | |||
869 | 1501 | |||
870 | 1502 | class TranslationTemplatesBuildFlattener(TunableLoop): | ||
871 | 1503 | """Populates the new denormalised columns on TranslationTemplatesBuild.""" | ||
872 | 1504 | |||
873 | 1505 | maximum_chunk_size = 5000 | ||
874 | 1506 | |||
875 | 1507 | def __init__(self, log, abort_time=None): | ||
876 | 1508 | super(TranslationTemplatesBuildFlattener, self).__init__( | ||
877 | 1509 | log, abort_time) | ||
878 | 1510 | |||
879 | 1511 | self.memcache_key = '%s:ttb-flattener' % config.instance_name | ||
880 | 1512 | watermark = getUtility(IMemcacheClient).get(self.memcache_key) | ||
881 | 1513 | self.start_at = watermark or 0 | ||
882 | 1514 | self.store = IMasterStore(TranslationTemplatesBuild) | ||
883 | 1515 | |||
884 | 1516 | def findIDs(self): | ||
885 | 1517 | return self.store.find( | ||
886 | 1518 | TranslationTemplatesBuild.id, | ||
887 | 1519 | TranslationTemplatesBuild.id >= self.start_at, | ||
888 | 1520 | ).order_by(TranslationTemplatesBuild.id) | ||
889 | 1521 | |||
890 | 1522 | def isDone(self): | ||
891 | 1523 | return ( | ||
892 | 1524 | not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled') | ||
893 | 1525 | or self.findIDs().is_empty()) | ||
894 | 1526 | |||
895 | 1527 | def __call__(self, chunk_size): | ||
896 | 1528 | """See `ITunableLoop`.""" | ||
897 | 1529 | ids = list(self.findIDs()[:chunk_size]) | ||
898 | 1530 | updated_columns = { | ||
899 | 1531 | TranslationTemplatesBuild._new_processor_id: | ||
900 | 1532 | BuildFarmJob.processor_id, | ||
901 | 1533 | TranslationTemplatesBuild._new_virtualized: | ||
902 | 1534 | BuildFarmJob.virtualized, | ||
903 | 1535 | TranslationTemplatesBuild._new_date_created: | ||
904 | 1536 | BuildFarmJob.date_created, | ||
905 | 1537 | TranslationTemplatesBuild._new_date_started: | ||
906 | 1538 | BuildFarmJob.date_started, | ||
907 | 1539 | TranslationTemplatesBuild._new_date_finished: | ||
908 | 1540 | BuildFarmJob.date_finished, | ||
909 | 1541 | TranslationTemplatesBuild._new_date_first_dispatched: | ||
910 | 1542 | BuildFarmJob.date_first_dispatched, | ||
911 | 1543 | TranslationTemplatesBuild._new_builder_id: BuildFarmJob.builder_id, | ||
912 | 1544 | TranslationTemplatesBuild._new_status: BuildFarmJob.status, | ||
913 | 1545 | TranslationTemplatesBuild._new_log_id: BuildFarmJob.log_id, | ||
914 | 1546 | TranslationTemplatesBuild._new_failure_count: | ||
915 | 1547 | BuildFarmJob.failure_count, | ||
916 | 1548 | } | ||
917 | 1549 | self.store.execute( | ||
918 | 1550 | BulkUpdate( | ||
919 | 1551 | updated_columns, table=TranslationTemplatesBuild, | ||
920 | 1552 | values=(PackageBuild, BuildFarmJob), | ||
921 | 1553 | where=And( | ||
922 | 1554 | TranslationTemplatesBuild.id.is_in(ids), | ||
923 | 1555 | BuildFarmJob.id == | ||
924 | 1556 | TranslationTemplatesBuild.build_farm_job_id))) | ||
925 | 1557 | transaction.commit() | ||
926 | 1558 | self.start_at = ids[-1] + 1 | ||
927 | 1559 | getUtility(IMemcacheClient).set(self.memcache_key, self.start_at) | ||
928 | 1560 | |||
929 | 1561 | |||
930 | 1562 | class BaseDatabaseGarbageCollector(LaunchpadCronScript): | 1338 | class BaseDatabaseGarbageCollector(LaunchpadCronScript): |
931 | 1563 | """Abstract base class to run a collection of TunableLoops.""" | 1339 | """Abstract base class to run a collection of TunableLoops.""" |
932 | 1564 | script_name = None # Script name for locking and database user. Override. | 1340 | script_name = None # Script name for locking and database user. Override. |
933 | @@ -1814,9 +1590,6 @@ | |||
934 | 1814 | UnusedSessionPruner, | 1590 | UnusedSessionPruner, |
935 | 1815 | DuplicateSessionPruner, | 1591 | DuplicateSessionPruner, |
936 | 1816 | BugHeatUpdater, | 1592 | BugHeatUpdater, |
937 | 1817 | BinaryPackageBuildFlattener, | ||
938 | 1818 | SourcePackageRecipeBuildFlattener, | ||
939 | 1819 | TranslationTemplatesBuildFlattener, | ||
940 | 1820 | ] | 1593 | ] |
941 | 1821 | experimental_tunable_loops = [] | 1594 | experimental_tunable_loops = [] |
942 | 1822 | 1595 | ||
943 | 1823 | 1596 | ||
944 | === modified file 'lib/lp/scripts/tests/test_garbo.py' | |||
945 | --- lib/lp/scripts/tests/test_garbo.py 2013-02-01 03:49:23 +0000 | |||
946 | +++ lib/lp/scripts/tests/test_garbo.py 2013-01-17 00:25:48 +0000 | |||
947 | @@ -31,7 +31,6 @@ | |||
948 | 31 | from testtools.matchers import ( | 31 | from testtools.matchers import ( |
949 | 32 | Equals, | 32 | Equals, |
950 | 33 | GreaterThan, | 33 | GreaterThan, |
951 | 34 | MatchesStructure, | ||
952 | 35 | ) | 34 | ) |
953 | 36 | import transaction | 35 | import transaction |
954 | 37 | from zope.component import getUtility | 36 | from zope.component import getUtility |
955 | @@ -43,7 +42,6 @@ | |||
956 | 43 | BugNotification, | 42 | BugNotification, |
957 | 44 | BugNotificationRecipient, | 43 | BugNotificationRecipient, |
958 | 45 | ) | 44 | ) |
959 | 46 | from lp.buildmaster.enums import BuildStatus | ||
960 | 47 | from lp.code.bzr import ( | 45 | from lp.code.bzr import ( |
961 | 48 | BranchFormat, | 46 | BranchFormat, |
962 | 49 | RepositoryFormat, | 47 | RepositoryFormat, |
963 | @@ -60,7 +58,6 @@ | |||
964 | 60 | BranchSharingPolicy, | 58 | BranchSharingPolicy, |
965 | 61 | BugSharingPolicy, | 59 | BugSharingPolicy, |
966 | 62 | ) | 60 | ) |
967 | 63 | from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild | ||
968 | 64 | from lp.registry.interfaces.accesspolicy import IAccessPolicySource | 61 | from lp.registry.interfaces.accesspolicy import IAccessPolicySource |
969 | 65 | from lp.registry.interfaces.person import IPersonSet | 62 | from lp.registry.interfaces.person import IPersonSet |
970 | 66 | from lp.registry.interfaces.teammembership import TeamMembershipStatus | 63 | from lp.registry.interfaces.teammembership import TeamMembershipStatus |
971 | @@ -117,7 +114,6 @@ | |||
972 | 117 | from lp.services.verification.model.logintoken import LoginToken | 114 | from lp.services.verification.model.logintoken import LoginToken |
973 | 118 | from lp.services.worlddata.interfaces.language import ILanguageSet | 115 | from lp.services.worlddata.interfaces.language import ILanguageSet |
974 | 119 | from lp.soyuz.enums import PackagePublishingStatus | 116 | from lp.soyuz.enums import PackagePublishingStatus |
975 | 120 | from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild | ||
976 | 121 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache | 117 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache |
977 | 122 | from lp.testing import ( | 118 | from lp.testing import ( |
978 | 123 | FakeAdapterMixin, | 119 | FakeAdapterMixin, |
979 | @@ -125,10 +121,7 @@ | |||
980 | 125 | TestCase, | 121 | TestCase, |
981 | 126 | TestCaseWithFactory, | 122 | TestCaseWithFactory, |
982 | 127 | ) | 123 | ) |
987 | 128 | from lp.testing.dbuser import ( | 124 | from lp.testing.dbuser import switch_dbuser |
984 | 129 | dbuser, | ||
985 | 130 | switch_dbuser, | ||
986 | 131 | ) | ||
988 | 132 | from lp.testing.layers import ( | 125 | from lp.testing.layers import ( |
989 | 133 | DatabaseLayer, | 126 | DatabaseLayer, |
990 | 134 | LaunchpadScriptLayer, | 127 | LaunchpadScriptLayer, |
991 | @@ -140,9 +133,6 @@ | |||
992 | 140 | from lp.translations.model.translationtemplateitem import ( | 133 | from lp.translations.model.translationtemplateitem import ( |
993 | 141 | TranslationTemplateItem, | 134 | TranslationTemplateItem, |
994 | 142 | ) | 135 | ) |
995 | 143 | from lp.translations.model.translationtemplatesbuild import ( | ||
996 | 144 | TranslationTemplatesBuild, | ||
997 | 145 | ) | ||
998 | 146 | 136 | ||
999 | 147 | 137 | ||
1000 | 148 | class TestGarboScript(TestCase): | 138 | class TestGarboScript(TestCase): |
1001 | @@ -1283,131 +1273,6 @@ | |||
1002 | 1283 | 'PopulateLatestPersonSourcePackageReleaseCache') | 1273 | 'PopulateLatestPersonSourcePackageReleaseCache') |
1003 | 1284 | self.assertEqual(spph_2.id, job_data['last_spph_id']) | 1274 | self.assertEqual(spph_2.id, job_data['last_spph_id']) |
1004 | 1285 | 1275 | ||
1005 | 1286 | def test_BinaryPackageBuildFlattener(self): | ||
1006 | 1287 | store = IMasterStore(BinaryPackageBuild) | ||
1007 | 1288 | # Sampledata builds start off with the new columns set to None, | ||
1008 | 1289 | # and garbo won't run without a feature flag set. | ||
1009 | 1290 | self.runHourly() | ||
1010 | 1291 | self.assertNotEqual( | ||
1011 | 1292 | 0, store.find(BinaryPackageBuild, _new_archive=None).count()) | ||
1012 | 1293 | |||
1013 | 1294 | # But after a garbo run they're all set properly. | ||
1014 | 1295 | with dbuser('testadmin'): | ||
1015 | 1296 | IMasterStore(FeatureFlag).add(FeatureFlag( | ||
1016 | 1297 | u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true')) | ||
1017 | 1298 | self.runHourly() | ||
1018 | 1299 | self.assertEqual( | ||
1019 | 1300 | 0, store.find(BinaryPackageBuild, _new_archive=None).count()) | ||
1020 | 1301 | |||
1021 | 1302 | with dbuser('testadmin'): | ||
1022 | 1303 | # Create a build with lots of attributes set. | ||
1023 | 1304 | build = self.factory.makeBinaryPackageBuild() | ||
1024 | 1305 | build.gotFailure() | ||
1025 | 1306 | build.updateStatus( | ||
1026 | 1307 | BuildStatus.BUILDING, builder=self.factory.makeBuilder()) | ||
1027 | 1308 | build.updateStatus(BuildStatus.FULLYBUILT) | ||
1028 | 1309 | build.setLog(self.factory.makeLibraryFileAlias()) | ||
1029 | 1310 | build.storeUploadLog('uploaded') | ||
1030 | 1311 | |||
1031 | 1312 | # Manually unset the build's denormed columns. | ||
1032 | 1313 | attrs = ( | ||
1033 | 1314 | 'archive', 'pocket', 'processor', 'virtualized', | ||
1034 | 1315 | 'date_created', 'date_started', 'date_finished', | ||
1035 | 1316 | 'date_first_dispatched', 'builder', 'status', 'log', | ||
1036 | 1317 | 'upload_log', 'dependencies', 'failure_count', | ||
1037 | 1318 | 'build_farm_job', 'distribution', 'distro_series', | ||
1038 | 1319 | 'source_package_name', 'is_distro_archive') | ||
1039 | 1320 | for attr in attrs: | ||
1040 | 1321 | setattr(removeSecurityProxy(build), '_new_' + attr, None) | ||
1041 | 1322 | removeSecurityProxy(build.build_farm_job).archive = None | ||
1042 | 1323 | self.assertEqual( | ||
1043 | 1324 | 1, store.find(BinaryPackageBuild, _new_archive=None).count()) | ||
1044 | 1325 | self.runHourly() | ||
1045 | 1326 | self.assertEqual( | ||
1046 | 1327 | 0, store.find(BinaryPackageBuild, _new_archive=None).count()) | ||
1047 | 1328 | |||
1048 | 1329 | self.assertThat( | ||
1049 | 1330 | removeSecurityProxy(build), | ||
1050 | 1331 | MatchesStructure.byEquality( | ||
1051 | 1332 | **dict( | ||
1052 | 1333 | ('_new_' + attr, getattr(build, attr)) for attr in attrs))) | ||
1053 | 1334 | self.assertEqual( | ||
1054 | 1335 | build.archive, removeSecurityProxy(build.build_farm_job).archive) | ||
1055 | 1336 | |||
1056 | 1337 | def test_SourcePackageRecipeBuildFlattener(self): | ||
1057 | 1338 | store = IMasterStore(BinaryPackageBuild) | ||
1058 | 1339 | with dbuser('testadmin'): | ||
1059 | 1340 | IMasterStore(FeatureFlag).add(FeatureFlag( | ||
1060 | 1341 | u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true')) | ||
1061 | 1342 | |||
1062 | 1343 | with dbuser('testadmin'): | ||
1063 | 1344 | # Create a build with lots of attributes set. | ||
1064 | 1345 | build = self.factory.makeSourcePackageRecipeBuild() | ||
1065 | 1346 | build.gotFailure() | ||
1066 | 1347 | build.updateStatus( | ||
1067 | 1348 | BuildStatus.BUILDING, builder=self.factory.makeBuilder()) | ||
1068 | 1349 | build.updateStatus(BuildStatus.FULLYBUILT) | ||
1069 | 1350 | build.setLog(self.factory.makeLibraryFileAlias()) | ||
1070 | 1351 | build.storeUploadLog('uploaded') | ||
1071 | 1352 | |||
1072 | 1353 | # Manually unset the build's denormed columns. | ||
1073 | 1354 | attrs = ( | ||
1074 | 1355 | 'archive', 'pocket', 'processor', 'virtualized', | ||
1075 | 1356 | 'date_created', 'date_started', 'date_finished', | ||
1076 | 1357 | 'date_first_dispatched', 'builder', 'status', 'log', | ||
1077 | 1358 | 'upload_log', 'dependencies', 'failure_count', | ||
1078 | 1359 | 'build_farm_job') | ||
1079 | 1360 | for attr in attrs: | ||
1080 | 1361 | setattr(removeSecurityProxy(build), '_new_' + attr, None) | ||
1081 | 1362 | removeSecurityProxy(build).build_farm_job.archive = None | ||
1082 | 1363 | self.assertEqual( | ||
1083 | 1364 | 1, store.find(SourcePackageRecipeBuild, _new_archive=None).count()) | ||
1084 | 1365 | self.runHourly() | ||
1085 | 1366 | self.assertEqual( | ||
1086 | 1367 | 0, store.find(SourcePackageRecipeBuild, _new_archive=None).count()) | ||
1087 | 1368 | |||
1088 | 1369 | self.assertThat( | ||
1089 | 1370 | removeSecurityProxy(build), | ||
1090 | 1371 | MatchesStructure.byEquality( | ||
1091 | 1372 | **dict( | ||
1092 | 1373 | ('_new_' + attr, getattr(build, attr)) for attr in attrs))) | ||
1093 | 1374 | self.assertEqual( | ||
1094 | 1375 | build.archive, removeSecurityProxy(build.build_farm_job).archive) | ||
1095 | 1376 | |||
1096 | 1377 | def test_TranslationTemplatesBuildFlattener(self): | ||
1097 | 1378 | store = IMasterStore(BinaryPackageBuild) | ||
1098 | 1379 | with dbuser('testadmin'): | ||
1099 | 1380 | IMasterStore(FeatureFlag).add(FeatureFlag( | ||
1100 | 1381 | u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true')) | ||
1101 | 1382 | |||
1102 | 1383 | with dbuser('testadmin'): | ||
1103 | 1384 | # Create a build with lots of attributes set. | ||
1104 | 1385 | build = self.factory.makeTranslationTemplatesBuildJob().build | ||
1105 | 1386 | build.gotFailure() | ||
1106 | 1387 | build.updateStatus( | ||
1107 | 1388 | BuildStatus.BUILDING, builder=self.factory.makeBuilder()) | ||
1108 | 1389 | build.updateStatus(BuildStatus.FULLYBUILT) | ||
1109 | 1390 | build.setLog(self.factory.makeLibraryFileAlias()) | ||
1110 | 1391 | |||
1111 | 1392 | # Manually unset the build's denormed columns. | ||
1112 | 1393 | attrs = ( | ||
1113 | 1394 | 'processor', 'virtualized', 'date_created', 'date_started', | ||
1114 | 1395 | 'date_finished', 'date_first_dispatched', 'builder', 'status', | ||
1115 | 1396 | 'log', 'failure_count') | ||
1116 | 1397 | for attr in attrs: | ||
1117 | 1398 | setattr(removeSecurityProxy(build), '_new_' + attr, None) | ||
1118 | 1399 | self.assertEqual( | ||
1119 | 1400 | 1, store.find(TranslationTemplatesBuild, _new_status=None).count()) | ||
1120 | 1401 | self.runHourly() | ||
1121 | 1402 | self.assertEqual( | ||
1122 | 1403 | 0, store.find(TranslationTemplatesBuild, _new_status=None).count()) | ||
1123 | 1404 | |||
1124 | 1405 | self.assertThat( | ||
1125 | 1406 | removeSecurityProxy(build), | ||
1126 | 1407 | MatchesStructure.byEquality( | ||
1127 | 1408 | **dict( | ||
1128 | 1409 | ('_new_' + attr, getattr(build, attr)) for attr in attrs))) | ||
1129 | 1410 | |||
1130 | 1411 | 1276 | ||
1131 | 1412 | class TestGarboTasks(TestCaseWithFactory): | 1277 | class TestGarboTasks(TestCaseWithFactory): |
1132 | 1413 | layer = LaunchpadZopelessLayer | 1278 | layer = LaunchpadZopelessLayer |
1133 | 1414 | 1279 | ||
1134 | === modified file 'lib/lp/soyuz/doc/sourcepackagerelease.txt' | |||
1135 | --- lib/lp/soyuz/doc/sourcepackagerelease.txt 2012-07-05 09:04:09 +0000 | |||
1136 | +++ lib/lp/soyuz/doc/sourcepackagerelease.txt 2013-02-01 03:49:23 +0000 | |||
1137 | @@ -60,7 +60,6 @@ | |||
1138 | 60 | Mozilla-firefox 0.9 has got some builds. including a PPA build. The 'builds' | 60 | Mozilla-firefox 0.9 has got some builds. including a PPA build. The 'builds' |
1139 | 61 | property only returns the non-PPA builds. | 61 | property only returns the non-PPA builds. |
1140 | 62 | 62 | ||
1141 | 63 | >>> from lp.buildmaster.model.packagebuild import PackageBuild | ||
1142 | 64 | >>> from lp.registry.interfaces.person import IPersonSet | 63 | >>> from lp.registry.interfaces.person import IPersonSet |
1143 | 65 | >>> from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild | 64 | >>> from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild |
1144 | 66 | >>> from storm.store import Store | 65 | >>> from storm.store import Store |
1145 | @@ -68,8 +67,7 @@ | |||
1146 | 68 | >>> ff_ppa_build = Store.of(cprov_ppa).find( | 67 | >>> ff_ppa_build = Store.of(cprov_ppa).find( |
1147 | 69 | ... BinaryPackageBuild, | 68 | ... BinaryPackageBuild, |
1148 | 70 | ... BinaryPackageBuild.source_package_release == spr, | 69 | ... BinaryPackageBuild.source_package_release == spr, |
1151 | 71 | ... BinaryPackageBuild.package_build == PackageBuild.id, | 70 | ... BinaryPackageBuild._new_archive == cprov_ppa) |
1150 | 72 | ... PackageBuild.archive == cprov_ppa) | ||
1152 | 73 | >>> ff_ppa_build.count() | 71 | >>> ff_ppa_build.count() |
1153 | 74 | 1 | 72 | 1 |
1154 | 75 | >>> ff_ppa_build[0].archive.purpose.name | 73 | >>> ff_ppa_build[0].archive.purpose.name |
1155 | 76 | 74 | ||
1156 | === modified file 'lib/lp/soyuz/interfaces/binarypackagebuild.py' | |||
1157 | --- lib/lp/soyuz/interfaces/binarypackagebuild.py 2013-02-01 03:49:23 +0000 | |||
1158 | +++ lib/lp/soyuz/interfaces/binarypackagebuild.py 2013-02-01 03:49:23 +0000 | |||
1159 | @@ -63,10 +63,6 @@ | |||
1160 | 63 | """A Build interface for items requiring launchpad.View.""" | 63 | """A Build interface for items requiring launchpad.View.""" |
1161 | 64 | id = Int(title=_('ID'), required=True, readonly=True) | 64 | id = Int(title=_('ID'), required=True, readonly=True) |
1162 | 65 | 65 | ||
1163 | 66 | package_build = Reference( | ||
1164 | 67 | title=_('Package build'), schema=IPackageBuild, required=True, | ||
1165 | 68 | readonly=True, description=_('The base package build')) | ||
1166 | 69 | |||
1167 | 70 | # Overridden from IBuildFarmJob to ensure required is True. | 66 | # Overridden from IBuildFarmJob to ensure required is True. |
1168 | 71 | processor = Reference( | 67 | processor = Reference( |
1169 | 72 | title=_("Processor"), schema=IProcessor, | 68 | title=_("Processor"), schema=IProcessor, |
1170 | @@ -100,9 +96,6 @@ | |||
1171 | 100 | distro_series = Attribute("Direct parent needed by CanonicalURL") | 96 | distro_series = Attribute("Direct parent needed by CanonicalURL") |
1172 | 101 | arch_tag = exported( | 97 | arch_tag = exported( |
1173 | 102 | Text(title=_("Architecture tag"), required=False)) | 98 | Text(title=_("Architecture tag"), required=False)) |
1174 | 103 | source_package_name = Attribute("Source package name") | ||
1175 | 104 | is_distro_archive = Attribute( | ||
1176 | 105 | "Whether the target archive belongs to the distro") | ||
1177 | 106 | distributionsourcepackagerelease = Attribute("The page showing the " | 99 | distributionsourcepackagerelease = Attribute("The page showing the " |
1178 | 107 | "details for this sourcepackagerelease in this distribution.") | 100 | "details for this sourcepackagerelease in this distribution.") |
1179 | 108 | binarypackages = Attribute( | 101 | binarypackages = Attribute( |
1180 | 109 | 102 | ||
1181 | === modified file 'lib/lp/soyuz/model/archive.py' | |||
1182 | --- lib/lp/soyuz/model/archive.py 2013-01-11 00:18:49 +0000 | |||
1183 | +++ lib/lp/soyuz/model/archive.py 2013-02-01 03:49:23 +0000 | |||
1184 | @@ -55,8 +55,6 @@ | |||
1185 | 55 | ) | 55 | ) |
1186 | 56 | from lp.buildmaster.enums import BuildStatus | 56 | from lp.buildmaster.enums import BuildStatus |
1187 | 57 | from lp.buildmaster.interfaces.packagebuild import IPackageBuildSet | 57 | from lp.buildmaster.interfaces.packagebuild import IPackageBuildSet |
1188 | 58 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
1189 | 59 | from lp.buildmaster.model.packagebuild import PackageBuild | ||
1190 | 60 | from lp.registry.enums import ( | 58 | from lp.registry.enums import ( |
1191 | 61 | INCLUSIVE_TEAM_POLICY, | 59 | INCLUSIVE_TEAM_POLICY, |
1192 | 62 | PersonVisibility, | 60 | PersonVisibility, |
1193 | @@ -86,7 +84,10 @@ | |||
1194 | 86 | IStoreSelector, | 84 | IStoreSelector, |
1195 | 87 | MAIN_STORE, | 85 | MAIN_STORE, |
1196 | 88 | ) | 86 | ) |
1198 | 89 | from lp.services.database.lpstorm import ISlaveStore | 87 | from lp.services.database.lpstorm import ( |
1199 | 88 | ISlaveStore, | ||
1200 | 89 | IStore, | ||
1201 | 90 | ) | ||
1202 | 90 | from lp.services.database.sqlbase import ( | 91 | from lp.services.database.sqlbase import ( |
1203 | 91 | cursor, | 92 | cursor, |
1204 | 92 | quote, | 93 | quote, |
1205 | @@ -1117,20 +1118,17 @@ | |||
1206 | 1117 | extra_exprs = [] | 1118 | extra_exprs = [] |
1207 | 1118 | if not include_needsbuild: | 1119 | if not include_needsbuild: |
1208 | 1119 | extra_exprs.append( | 1120 | extra_exprs.append( |
1210 | 1120 | BuildFarmJob.status != BuildStatus.NEEDSBUILD) | 1121 | BinaryPackageBuild._new_status != BuildStatus.NEEDSBUILD) |
1211 | 1121 | 1122 | ||
1212 | 1122 | find_spec = ( | 1123 | find_spec = ( |
1214 | 1123 | BuildFarmJob.status, | 1124 | BinaryPackageBuild._new_status, |
1215 | 1124 | Count(BinaryPackageBuild.id), | 1125 | Count(BinaryPackageBuild.id), |
1216 | 1125 | ) | 1126 | ) |
1219 | 1126 | result = store.using( | 1127 | result = store.find( |
1218 | 1127 | BinaryPackageBuild, PackageBuild, BuildFarmJob).find( | ||
1220 | 1128 | find_spec, | 1128 | find_spec, |
1226 | 1129 | BinaryPackageBuild.package_build == PackageBuild.id, | 1129 | BinaryPackageBuild._new_archive == self, |
1227 | 1130 | PackageBuild.archive == self, | 1130 | *extra_exprs).group_by(BinaryPackageBuild._new_status).order_by( |
1228 | 1131 | PackageBuild.build_farm_job == BuildFarmJob.id, | 1131 | BinaryPackageBuild._new_status) |
1224 | 1132 | *extra_exprs).group_by(BuildFarmJob.status).order_by( | ||
1225 | 1133 | BuildFarmJob.status) | ||
1229 | 1134 | 1132 | ||
1230 | 1135 | # Create a map for each count summary to a number of buildstates: | 1133 | # Create a map for each count summary to a number of buildstates: |
1231 | 1136 | count_map = { | 1134 | count_map = { |
1232 | @@ -1898,18 +1896,14 @@ | |||
1233 | 1898 | """See `IArchive`.""" | 1896 | """See `IArchive`.""" |
1234 | 1899 | store = Store.of(self) | 1897 | store = Store.of(self) |
1235 | 1900 | 1898 | ||
1236 | 1901 | base_query = ( | ||
1237 | 1902 | BinaryPackageBuild.package_build == PackageBuild.id, | ||
1238 | 1903 | PackageBuild.archive == self, | ||
1239 | 1904 | PackageBuild.build_farm_job == BuildFarmJob.id) | ||
1240 | 1905 | sprs_building = store.find( | 1899 | sprs_building = store.find( |
1241 | 1906 | BinaryPackageBuild.source_package_release_id, | 1900 | BinaryPackageBuild.source_package_release_id, |
1244 | 1907 | BuildFarmJob.status == BuildStatus.BUILDING, | 1901 | BinaryPackageBuild._new_archive == self, |
1245 | 1908 | *base_query) | 1902 | BinaryPackageBuild._new_status == BuildStatus.BUILDING) |
1246 | 1909 | sprs_waiting = store.find( | 1903 | sprs_waiting = store.find( |
1247 | 1910 | BinaryPackageBuild.source_package_release_id, | 1904 | BinaryPackageBuild.source_package_release_id, |
1250 | 1911 | BuildFarmJob.status == BuildStatus.NEEDSBUILD, | 1905 | BinaryPackageBuild._new_archive == self, |
1251 | 1912 | *base_query) | 1906 | BinaryPackageBuild._new_status == BuildStatus.NEEDSBUILD) |
1252 | 1913 | 1907 | ||
1253 | 1914 | # A package is not counted as waiting if it already has at least | 1908 | # A package is not counted as waiting if it already has at least |
1254 | 1915 | # one build building. | 1909 | # one build building. |
1255 | @@ -1924,17 +1918,13 @@ | |||
1256 | 1924 | 1918 | ||
1257 | 1925 | extra_exprs = [] | 1919 | extra_exprs = [] |
1258 | 1926 | if build_status is not None: | 1920 | if build_status is not None: |
1263 | 1927 | extra_exprs = [ | 1921 | extra_exprs = [BinaryPackageBuild._new_status == build_status] |
1260 | 1928 | PackageBuild.build_farm_job == BuildFarmJob.id, | ||
1261 | 1929 | BuildFarmJob.status == build_status, | ||
1262 | 1930 | ] | ||
1264 | 1931 | 1922 | ||
1265 | 1932 | result_set = store.find( | 1923 | result_set = store.find( |
1266 | 1933 | SourcePackageRelease, | 1924 | SourcePackageRelease, |
1267 | 1934 | (BinaryPackageBuild.source_package_release_id == | 1925 | (BinaryPackageBuild.source_package_release_id == |
1268 | 1935 | SourcePackageRelease.id), | 1926 | SourcePackageRelease.id), |
1271 | 1936 | BinaryPackageBuild.package_build == PackageBuild.id, | 1927 | BinaryPackageBuild._new_archive == self, |
1270 | 1937 | PackageBuild.archive == self, | ||
1272 | 1938 | *extra_exprs) | 1928 | *extra_exprs) |
1273 | 1939 | 1929 | ||
1274 | 1940 | result_set.config(distinct=True).order_by(SourcePackageRelease.id) | 1930 | result_set.config(distinct=True).order_by(SourcePackageRelease.id) |
1275 | @@ -1974,18 +1964,15 @@ | |||
1276 | 1974 | 1964 | ||
1277 | 1975 | query = """ | 1965 | query = """ |
1278 | 1976 | UPDATE Job SET status = %s | 1966 | UPDATE Job SET status = %s |
1281 | 1977 | FROM BinaryPackageBuild, PackageBuild, BuildFarmJob, | 1967 | FROM BinaryPackageBuild, BuildPackageJob, BuildQueue |
1280 | 1978 | BuildPackageJob, BuildQueue | ||
1282 | 1979 | WHERE | 1968 | WHERE |
1283 | 1980 | BinaryPackageBuild.package_build = PackageBuild.id | ||
1284 | 1981 | -- insert self.id here | 1969 | -- insert self.id here |
1286 | 1982 | AND PackageBuild.archive = %s | 1970 | BinaryPackageBuild.archive = %s |
1287 | 1983 | AND BuildPackageJob.build = BinaryPackageBuild.id | 1971 | AND BuildPackageJob.build = BinaryPackageBuild.id |
1288 | 1984 | AND BuildPackageJob.job = BuildQueue.job | 1972 | AND BuildPackageJob.job = BuildQueue.job |
1289 | 1985 | AND Job.id = BuildQueue.job | 1973 | AND Job.id = BuildQueue.job |
1290 | 1986 | -- Build is in state BuildStatus.NEEDSBUILD (0) | 1974 | -- Build is in state BuildStatus.NEEDSBUILD (0) |
1293 | 1987 | AND PackageBuild.build_farm_job = BuildFarmJob.id | 1975 | AND BinaryPackageBuild.status = %s; |
1292 | 1988 | AND BuildFarmJob.status = %s; | ||
1294 | 1989 | """ % sqlvalues(status, self, BuildStatus.NEEDSBUILD) | 1976 | """ % sqlvalues(status, self, BuildStatus.NEEDSBUILD) |
1295 | 1990 | 1977 | ||
1296 | 1991 | store = Store.of(self) | 1978 | store = Store.of(self) |
1297 | @@ -2411,19 +2398,13 @@ | |||
1298 | 2411 | 2398 | ||
1299 | 2412 | def getBuildCountersForArchitecture(self, archive, distroarchseries): | 2399 | def getBuildCountersForArchitecture(self, archive, distroarchseries): |
1300 | 2413 | """See `IArchiveSet`.""" | 2400 | """See `IArchiveSet`.""" |
1314 | 2414 | cur = cursor() | 2401 | result = IStore(BinaryPackageBuild).find( |
1315 | 2415 | query = """ | 2402 | (BinaryPackageBuild._new_status, Count(BinaryPackageBuild.id)), |
1316 | 2416 | SELECT BuildFarmJob.status, count(BuildFarmJob.id) FROM | 2403 | BinaryPackageBuild._new_archive == archive, |
1317 | 2417 | BinaryPackageBuild, PackageBuild, BuildFarmJob | 2404 | BinaryPackageBuild.distro_arch_series == distroarchseries, |
1318 | 2418 | WHERE | 2405 | ).group_by( |
1319 | 2419 | BinaryPackageBuild.package_build = PackageBuild.id AND | 2406 | BinaryPackageBuild._new_status |
1320 | 2420 | PackageBuild.build_farm_job = BuildFarmJob.id AND | 2407 | ).order_by(BinaryPackageBuild._new_status) |
1308 | 2421 | PackageBuild.archive = %s AND | ||
1309 | 2422 | BinaryPackageBuild.distro_arch_series = %s | ||
1310 | 2423 | GROUP BY BuildFarmJob.status ORDER BY BuildFarmJob.status; | ||
1311 | 2424 | """ % sqlvalues(archive, distroarchseries) | ||
1312 | 2425 | cur.execute(query) | ||
1313 | 2426 | result = cur.fetchall() | ||
1321 | 2427 | 2408 | ||
1322 | 2428 | status_map = { | 2409 | status_map = { |
1323 | 2429 | 'failed': ( | 2410 | 'failed': ( |
1324 | @@ -2452,8 +2433,7 @@ | |||
1325 | 2452 | for key, status in status_map.iteritems(): | 2433 | for key, status in status_map.iteritems(): |
1326 | 2453 | status_and_counters[key] = 0 | 2434 | status_and_counters[key] = 0 |
1327 | 2454 | for status_value, status_counter in result: | 2435 | for status_value, status_counter in result: |
1330 | 2455 | status_values = [item.value for item in status] | 2436 | if status_value in status: |
1329 | 2456 | if status_value in status_values: | ||
1331 | 2457 | status_and_counters[key] += status_counter | 2437 | status_and_counters[key] += status_counter |
1332 | 2458 | 2438 | ||
1333 | 2459 | return status_and_counters | 2439 | return status_and_counters |
1334 | 2460 | 2440 | ||
1335 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' | |||
1336 | --- lib/lp/soyuz/model/binarypackagebuild.py 2013-02-01 03:49:23 +0000 | |||
1337 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2013-02-01 03:49:23 +0000 | |||
1338 | @@ -105,6 +105,7 @@ | |||
1339 | 105 | _defaultOrder = 'id' | 105 | _defaultOrder = 'id' |
1340 | 106 | 106 | ||
1341 | 107 | build_farm_job_type = BuildFarmJobType.PACKAGEBUILD | 107 | build_farm_job_type = BuildFarmJobType.PACKAGEBUILD |
1342 | 108 | job_type = build_farm_job_type | ||
1343 | 108 | 109 | ||
1344 | 109 | package_build_id = Int(name='package_build', allow_none=False) | 110 | package_build_id = Int(name='package_build', allow_none=False) |
1345 | 110 | package_build = Reference(package_build_id, 'PackageBuild.id') | 111 | package_build = Reference(package_build_id, 'PackageBuild.id') |
1346 | @@ -118,8 +119,8 @@ | |||
1347 | 118 | source_package_release_id, 'SourcePackageRelease.id') | 119 | source_package_release_id, 'SourcePackageRelease.id') |
1348 | 119 | 120 | ||
1349 | 120 | # Migrating from PackageBuild | 121 | # Migrating from PackageBuild |
1352 | 121 | _new_build_farm_job_id = Int(name='build_farm_job') | 122 | build_farm_job_id = Int(name='build_farm_job') |
1353 | 122 | _new_build_farm_job = Reference(_new_build_farm_job_id, BuildFarmJob.id) | 123 | build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id) |
1354 | 123 | 124 | ||
1355 | 124 | _new_archive_id = Int(name='archive') | 125 | _new_archive_id = Int(name='archive') |
1356 | 125 | _new_archive = Reference(_new_archive_id, 'Archive.id') | 126 | _new_archive = Reference(_new_archive_id, 'Archive.id') |
1357 | @@ -265,16 +266,6 @@ | |||
1358 | 265 | return self.distro_series.distribution | 266 | return self.distro_series.distribution |
1359 | 266 | 267 | ||
1360 | 267 | @property | 268 | @property |
1361 | 268 | def source_package_name(self): | ||
1362 | 269 | """See `IBinaryPackageBuild`.""" | ||
1363 | 270 | return self.source_package_release.sourcepackagename | ||
1364 | 271 | |||
1365 | 272 | @property | ||
1366 | 273 | def is_distro_archive(self): | ||
1367 | 274 | """See `IBinaryPackageBuild`.""" | ||
1368 | 275 | return self.archive.is_main | ||
1369 | 276 | |||
1370 | 277 | @property | ||
1371 | 278 | def is_virtualized(self): | 269 | def is_virtualized(self): |
1372 | 279 | """See `IBuild`""" | 270 | """See `IBuild`""" |
1373 | 280 | return self.archive.require_virtualized | 271 | return self.archive.require_virtualized |
1374 | @@ -634,17 +625,14 @@ | |||
1375 | 634 | BinaryPackageBuild.distro_arch_series = %s AND | 625 | BinaryPackageBuild.distro_arch_series = %s AND |
1376 | 635 | SourcePackageRelease.sourcepackagename = SourcePackageName.id AND | 626 | SourcePackageRelease.sourcepackagename = SourcePackageName.id AND |
1377 | 636 | SourcePackageName.name = %s AND | 627 | SourcePackageName.name = %s AND |
1383 | 637 | BinaryPackageBuild.package_build = PackageBuild.id AND | 628 | BinaryPackageBuild.archive IN %s AND |
1384 | 638 | PackageBuild.archive IN %s AND | 629 | BinaryPackageBuild.date_finished IS NOT NULL AND |
1385 | 639 | PackageBuild.build_farm_job = BuildFarmJob.id AND | 630 | BinaryPackageBuild.status = %s |
1381 | 640 | BuildFarmJob.date_finished IS NOT NULL AND | ||
1382 | 641 | BuildFarmJob.status = %s | ||
1386 | 642 | """ % sqlvalues(self, self.distro_arch_series, | 631 | """ % sqlvalues(self, self.distro_arch_series, |
1387 | 643 | self.source_package_release.name, archives, | 632 | self.source_package_release.name, archives, |
1388 | 644 | BuildStatus.FULLYBUILT), | 633 | BuildStatus.FULLYBUILT), |
1392 | 645 | orderBy=['-BuildFarmJob.date_finished', '-id'], | 634 | orderBy=['-date_finished', '-id'], |
1393 | 646 | clauseTables=['PackageBuild', 'BuildFarmJob', 'SourcePackageName', | 635 | clauseTables=['SourcePackageName', 'SourcePackageRelease']) |
1391 | 647 | 'SourcePackageRelease']) | ||
1394 | 648 | 636 | ||
1395 | 649 | estimated_duration = None | 637 | estimated_duration = None |
1396 | 650 | if bool(completed_builds): | 638 | if bool(completed_builds): |
1397 | @@ -881,8 +869,8 @@ | |||
1398 | 881 | archive, pocket, status=BuildStatus.NEEDSBUILD, | 869 | archive, pocket, status=BuildStatus.NEEDSBUILD, |
1399 | 882 | date_created=None, builder=None): | 870 | date_created=None, builder=None): |
1400 | 883 | """See `IBinaryPackageBuildSet`.""" | 871 | """See `IBinaryPackageBuildSet`.""" |
1403 | 884 | # Create the PackageBuild to which the new BinaryPackageBuild | 872 | # Create the BuildFarmJob and PackageBuild to which the new |
1404 | 885 | # will delegate. | 873 | # BinaryPackageBuild will delegate. |
1405 | 886 | build_farm_job = getUtility(IBuildFarmJobSource).new( | 874 | build_farm_job = getUtility(IBuildFarmJobSource).new( |
1406 | 887 | BinaryPackageBuild.build_farm_job_type, status, processor, | 875 | BinaryPackageBuild.build_farm_job_type, status, processor, |
1407 | 888 | archive.require_virtualized, date_created, builder, archive) | 876 | archive.require_virtualized, date_created, builder, archive) |
1408 | @@ -890,7 +878,7 @@ | |||
1409 | 890 | build_farm_job, archive, pocket) | 878 | build_farm_job, archive, pocket) |
1410 | 891 | 879 | ||
1411 | 892 | binary_package_build = BinaryPackageBuild( | 880 | binary_package_build = BinaryPackageBuild( |
1413 | 893 | _new_build_farm_job=build_farm_job, | 881 | build_farm_job=build_farm_job, |
1414 | 894 | package_build=package_build, | 882 | package_build=package_build, |
1415 | 895 | distro_arch_series=distro_arch_series, | 883 | distro_arch_series=distro_arch_series, |
1416 | 896 | source_package_release=source_package_release, | 884 | source_package_release=source_package_release, |
1417 | @@ -995,23 +983,18 @@ | |||
1418 | 995 | from lp.soyuz.model.distroarchseries import DistroArchSeries | 983 | from lp.soyuz.model.distroarchseries import DistroArchSeries |
1419 | 996 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease | 984 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease |
1420 | 997 | 985 | ||
1427 | 998 | # Ensure the underlying buildfarmjob and package build tables | 986 | origin.append(BinaryPackageBuild) |
1422 | 999 | # are included. | ||
1423 | 1000 | clauses.extend([ | ||
1424 | 1001 | BinaryPackageBuild.package_build == PackageBuild.id, | ||
1425 | 1002 | PackageBuild.build_farm_job == BuildFarmJob.id]) | ||
1426 | 1003 | origin.extend([BinaryPackageBuild, BuildFarmJob]) | ||
1428 | 1004 | 987 | ||
1429 | 1005 | # Add query clause that filters on build state if the latter is | 988 | # Add query clause that filters on build state if the latter is |
1430 | 1006 | # provided. | 989 | # provided. |
1431 | 1007 | if status is not None: | 990 | if status is not None: |
1433 | 1008 | clauses.append(BuildFarmJob.status == status) | 991 | clauses.append(BinaryPackageBuild._new_status == status) |
1434 | 1009 | 992 | ||
1435 | 1010 | # Add query clause that filters on pocket if the latter is provided. | 993 | # Add query clause that filters on pocket if the latter is provided. |
1436 | 1011 | if pocket: | 994 | if pocket: |
1437 | 1012 | if not isinstance(pocket, (list, tuple)): | 995 | if not isinstance(pocket, (list, tuple)): |
1438 | 1013 | pocket = (pocket,) | 996 | pocket = (pocket,) |
1440 | 1014 | clauses.append(PackageBuild.pocket.is_in(pocket)) | 997 | clauses.append(BinaryPackageBuild._new_pocket.is_in(pocket)) |
1441 | 1015 | 998 | ||
1442 | 1016 | # Add query clause that filters on architecture tag if provided. | 999 | # Add query clause that filters on architecture tag if provided. |
1443 | 1017 | if arch_tag is not None: | 1000 | if arch_tag is not None: |
1444 | @@ -1044,23 +1027,24 @@ | |||
1445 | 1044 | Archive, get_archive_privacy_filter) | 1027 | Archive, get_archive_privacy_filter) |
1446 | 1045 | 1028 | ||
1447 | 1046 | clauses = [ | 1029 | clauses = [ |
1450 | 1047 | PackageBuild.archive_id == Archive.id, | 1030 | BinaryPackageBuild._new_archive_id == Archive.id, |
1451 | 1048 | BuildFarmJob.builder_id == builder_id, | 1031 | BinaryPackageBuild._new_builder_id == builder_id, |
1452 | 1049 | get_archive_privacy_filter(user)] | 1032 | get_archive_privacy_filter(user)] |
1454 | 1050 | origin = [PackageBuild, Archive] | 1033 | origin = [Archive] |
1455 | 1051 | 1034 | ||
1456 | 1052 | self.handleOptionalParamsForBuildQueries( | 1035 | self.handleOptionalParamsForBuildQueries( |
1457 | 1053 | clauses, origin, status, name, pocket=None, arch_tag=arch_tag) | 1036 | clauses, origin, status, name, pocket=None, arch_tag=arch_tag) |
1458 | 1054 | 1037 | ||
1459 | 1055 | return IStore(BinaryPackageBuild).using(*origin).find( | 1038 | return IStore(BinaryPackageBuild).using(*origin).find( |
1460 | 1056 | BinaryPackageBuild, *clauses).order_by( | 1039 | BinaryPackageBuild, *clauses).order_by( |
1462 | 1057 | Desc(BuildFarmJob.date_finished), BinaryPackageBuild.id) | 1040 | Desc(BinaryPackageBuild._new_date_finished), |
1463 | 1041 | BinaryPackageBuild.id) | ||
1464 | 1058 | 1042 | ||
1465 | 1059 | def getBuildsForArchive(self, archive, status=None, name=None, | 1043 | def getBuildsForArchive(self, archive, status=None, name=None, |
1466 | 1060 | pocket=None, arch_tag=None): | 1044 | pocket=None, arch_tag=None): |
1467 | 1061 | """See `IBinaryPackageBuildSet`.""" | 1045 | """See `IBinaryPackageBuildSet`.""" |
1470 | 1062 | clauses = [PackageBuild.archive_id == archive.id] | 1046 | clauses = [BinaryPackageBuild._new_archive_id == archive.id] |
1471 | 1063 | origin = [PackageBuild] | 1047 | origin = [] |
1472 | 1064 | 1048 | ||
1473 | 1065 | self.handleOptionalParamsForBuildQueries( | 1049 | self.handleOptionalParamsForBuildQueries( |
1474 | 1066 | clauses, origin, status, name, pocket, arch_tag) | 1050 | clauses, origin, status, name, pocket, arch_tag) |
1475 | @@ -1070,9 +1054,9 @@ | |||
1476 | 1070 | # * FULLYBUILT & FAILURES by -datebuilt | 1054 | # * FULLYBUILT & FAILURES by -datebuilt |
1477 | 1071 | # It should present the builds in a more natural order. | 1055 | # It should present the builds in a more natural order. |
1478 | 1072 | if status == BuildStatus.SUPERSEDED or status is None: | 1056 | if status == BuildStatus.SUPERSEDED or status is None: |
1480 | 1073 | orderBy = [Desc(BuildFarmJob.date_created)] | 1057 | orderBy = [Desc(BinaryPackageBuild._new_date_created)] |
1481 | 1074 | else: | 1058 | else: |
1483 | 1075 | orderBy = [Desc(BuildFarmJob.date_finished)] | 1059 | orderBy = [Desc(BinaryPackageBuild._new_date_finished)] |
1484 | 1076 | # All orders fallback to id if the primary order doesn't succeed | 1060 | # All orders fallback to id if the primary order doesn't succeed |
1485 | 1077 | orderBy.append(BinaryPackageBuild.id) | 1061 | orderBy.append(BinaryPackageBuild.id) |
1486 | 1078 | 1062 | ||
1487 | @@ -1087,8 +1071,6 @@ | |||
1488 | 1087 | if not arch_ids: | 1071 | if not arch_ids: |
1489 | 1088 | return EmptyResultSet() | 1072 | return EmptyResultSet() |
1490 | 1089 | 1073 | ||
1491 | 1090 | clauseTables = [PackageBuild] | ||
1492 | 1091 | |||
1493 | 1092 | # format clause according single/multiple architecture(s) form | 1074 | # format clause according single/multiple architecture(s) form |
1494 | 1093 | if len(arch_ids) == 1: | 1075 | if len(arch_ids) == 1: |
1495 | 1094 | condition_clauses = [('distro_arch_series=%s' | 1076 | condition_clauses = [('distro_arch_series=%s' |
1496 | @@ -1097,10 +1079,6 @@ | |||
1497 | 1097 | condition_clauses = [('distro_arch_series IN %s' | 1079 | condition_clauses = [('distro_arch_series IN %s' |
1498 | 1098 | % sqlvalues(arch_ids))] | 1080 | % sqlvalues(arch_ids))] |
1499 | 1099 | 1081 | ||
1500 | 1100 | condition_clauses.extend([ | ||
1501 | 1101 | "BinaryPackageBuild.package_build = PackageBuild.id", | ||
1502 | 1102 | "PackageBuild.build_farm_job = BuildFarmJob.id"]) | ||
1503 | 1103 | |||
1504 | 1104 | # XXX cprov 2006-09-25: It would be nice if we could encapsulate | 1082 | # XXX cprov 2006-09-25: It would be nice if we could encapsulate |
1505 | 1105 | # the chunk of code below (which deals with the optional paramenters) | 1083 | # the chunk of code below (which deals with the optional paramenters) |
1506 | 1106 | # and share it with ISourcePackage.getBuildRecords() | 1084 | # and share it with ISourcePackage.getBuildRecords() |
1507 | @@ -1108,20 +1086,23 @@ | |||
1508 | 1108 | # exclude gina-generated and security (dak-made) builds | 1086 | # exclude gina-generated and security (dak-made) builds |
1509 | 1109 | # status == FULLYBUILT && datebuilt == null | 1087 | # status == FULLYBUILT && datebuilt == null |
1510 | 1110 | if status == BuildStatus.FULLYBUILT: | 1088 | if status == BuildStatus.FULLYBUILT: |
1512 | 1111 | condition_clauses.append("BuildFarmJob.date_finished IS NOT NULL") | 1089 | condition_clauses.append( |
1513 | 1090 | "BinaryPackageBuild.date_finished IS NOT NULL") | ||
1514 | 1112 | else: | 1091 | else: |
1515 | 1113 | condition_clauses.append( | 1092 | condition_clauses.append( |
1518 | 1114 | "(BuildFarmJob.status <> %s OR " | 1093 | "(BinaryPackageBuild.status <> %s OR " |
1519 | 1115 | " BuildFarmJob.date_finished IS NOT NULL)" | 1094 | " BinaryPackageBuild.date_finished IS NOT NULL)" |
1520 | 1116 | % sqlvalues(BuildStatus.FULLYBUILT)) | 1095 | % sqlvalues(BuildStatus.FULLYBUILT)) |
1521 | 1117 | 1096 | ||
1522 | 1118 | # Ordering according status | 1097 | # Ordering according status |
1523 | 1119 | # * NEEDSBUILD, BUILDING & UPLOADING by -lastscore | 1098 | # * NEEDSBUILD, BUILDING & UPLOADING by -lastscore |
1525 | 1120 | # * SUPERSEDED & All by -PackageBuild.build_farm_job | 1099 | # * SUPERSEDED & All by -BinaryPackageBuild.id |
1526 | 1121 | # (nearly equivalent to -datecreated, but much more | 1100 | # (nearly equivalent to -datecreated, but much more |
1527 | 1122 | # efficient.) | 1101 | # efficient.) |
1528 | 1123 | # * FULLYBUILT & FAILURES by -datebuilt | 1102 | # * FULLYBUILT & FAILURES by -datebuilt |
1529 | 1124 | # It should present the builds in a more natural order. | 1103 | # It should present the builds in a more natural order. |
1530 | 1104 | clauseTables = [] | ||
1531 | 1105 | order_by_table = None | ||
1532 | 1125 | if status in [ | 1106 | if status in [ |
1533 | 1126 | BuildStatus.NEEDSBUILD, | 1107 | BuildStatus.NEEDSBUILD, |
1534 | 1127 | BuildStatus.BUILDING, | 1108 | BuildStatus.BUILDING, |
1535 | @@ -1134,12 +1115,10 @@ | |||
1536 | 1134 | 'BuildPackageJob.build = BinaryPackageBuild.id') | 1115 | 'BuildPackageJob.build = BinaryPackageBuild.id') |
1537 | 1135 | condition_clauses.append('BuildPackageJob.job = BuildQueue.job') | 1116 | condition_clauses.append('BuildPackageJob.job = BuildQueue.job') |
1538 | 1136 | elif status == BuildStatus.SUPERSEDED or status is None: | 1117 | elif status == BuildStatus.SUPERSEDED or status is None: |
1541 | 1137 | order_by = [Desc(PackageBuild.build_farm_job_id)] | 1118 | order_by = [Desc(BinaryPackageBuild.id)] |
1540 | 1138 | order_by_table = PackageBuild | ||
1542 | 1139 | else: | 1119 | else: |
1544 | 1140 | order_by = [Desc(BuildFarmJob.date_finished), | 1120 | order_by = [Desc(BinaryPackageBuild._new_date_finished), |
1545 | 1141 | BinaryPackageBuild.id] | 1121 | BinaryPackageBuild.id] |
1546 | 1142 | order_by_table = BuildFarmJob | ||
1547 | 1143 | 1122 | ||
1548 | 1144 | # End of duplication (see XXX cprov 2006-09-25 above). | 1123 | # End of duplication (see XXX cprov 2006-09-25 above). |
1549 | 1145 | 1124 | ||
1550 | @@ -1149,11 +1128,14 @@ | |||
1551 | 1149 | # Only pick builds from the distribution's main archive to | 1128 | # Only pick builds from the distribution's main archive to |
1552 | 1150 | # exclude PPA builds | 1129 | # exclude PPA builds |
1553 | 1151 | condition_clauses.append( | 1130 | condition_clauses.append( |
1555 | 1152 | "PackageBuild.archive IN %s" % | 1131 | "BinaryPackageBuild.archive IN %s" % |
1556 | 1153 | sqlvalues(list(distribution.all_distro_archive_ids))) | 1132 | sqlvalues(list(distribution.all_distro_archive_ids))) |
1557 | 1154 | 1133 | ||
1558 | 1134 | find_spec = (BinaryPackageBuild,) | ||
1559 | 1135 | if order_by_table: | ||
1560 | 1136 | find_spec = find_spec + (order_by_table,) | ||
1561 | 1155 | result_set = Store.of(distribution).using(*clauseTables).find( | 1137 | result_set = Store.of(distribution).using(*clauseTables).find( |
1563 | 1156 | (BinaryPackageBuild, order_by_table), *condition_clauses) | 1138 | find_spec, *condition_clauses) |
1564 | 1157 | result_set.order_by(*order_by) | 1139 | result_set.order_by(*order_by) |
1565 | 1158 | 1140 | ||
1566 | 1159 | def get_bpp(result_row): | 1141 | def get_bpp(result_row): |
1567 | @@ -1182,20 +1164,21 @@ | |||
1568 | 1182 | query = """ | 1164 | query = """ |
1569 | 1183 | source_package_release IN %s AND | 1165 | source_package_release IN %s AND |
1570 | 1184 | package_build = packagebuild.id AND | 1166 | package_build = packagebuild.id AND |
1572 | 1185 | archive.id = packagebuild.archive AND | 1167 | archive.id = binarypackagebuild.archive AND |
1573 | 1186 | archive.purpose != %s AND | 1168 | archive.purpose != %s AND |
1574 | 1187 | packagebuild.build_farm_job = buildfarmjob.id | 1169 | packagebuild.build_farm_job = buildfarmjob.id |
1575 | 1188 | """ % sqlvalues(sourcepackagerelease_ids, ArchivePurpose.PPA) | 1170 | """ % sqlvalues(sourcepackagerelease_ids, ArchivePurpose.PPA) |
1576 | 1189 | 1171 | ||
1577 | 1190 | if buildstate is not None: | 1172 | if buildstate is not None: |
1579 | 1191 | query += "AND buildfarmjob.status = %s" % sqlvalues(buildstate) | 1173 | query += ( |
1580 | 1174 | "AND binarypackagebuild.status = %s" % sqlvalues(buildstate)) | ||
1581 | 1192 | 1175 | ||
1582 | 1193 | resultset = IStore(BinaryPackageBuild).using( | 1176 | resultset = IStore(BinaryPackageBuild).using( |
1583 | 1194 | BinaryPackageBuild, PackageBuild, BuildFarmJob, Archive).find( | 1177 | BinaryPackageBuild, PackageBuild, BuildFarmJob, Archive).find( |
1584 | 1195 | (BinaryPackageBuild, PackageBuild, BuildFarmJob), | 1178 | (BinaryPackageBuild, PackageBuild, BuildFarmJob), |
1585 | 1196 | SQL(query)) | 1179 | SQL(query)) |
1586 | 1197 | resultset.order_by( | 1180 | resultset.order_by( |
1588 | 1198 | Desc(BuildFarmJob.date_created), BinaryPackageBuild.id) | 1181 | Desc(BinaryPackageBuild._new_date_created), BinaryPackageBuild.id) |
1589 | 1199 | return DecoratedResultSet(resultset, operator.itemgetter(0)) | 1182 | return DecoratedResultSet(resultset, operator.itemgetter(0)) |
1590 | 1200 | 1183 | ||
1591 | 1201 | def getStatusSummaryForBuilds(self, builds): | 1184 | def getStatusSummaryForBuilds(self, builds): |
1592 | 1202 | 1185 | ||
1593 | === modified file 'lib/lp/soyuz/model/buildpackagejob.py' | |||
1594 | --- lib/lp/soyuz/model/buildpackagejob.py 2013-01-22 06:42:23 +0000 | |||
1595 | +++ lib/lp/soyuz/model/buildpackagejob.py 2013-02-01 03:49:23 +0000 | |||
1596 | @@ -157,14 +157,13 @@ | |||
1597 | 157 | ) | 157 | ) |
1598 | 158 | sub_query = """ | 158 | sub_query = """ |
1599 | 159 | SELECT TRUE FROM Archive, BinaryPackageBuild, BuildPackageJob, | 159 | SELECT TRUE FROM Archive, BinaryPackageBuild, BuildPackageJob, |
1601 | 160 | PackageBuild, BuildFarmJob, DistroArchSeries | 160 | DistroArchSeries |
1602 | 161 | WHERE | 161 | WHERE |
1603 | 162 | BuildPackageJob.job = Job.id AND | 162 | BuildPackageJob.job = Job.id AND |
1604 | 163 | BuildPackageJob.build = BinaryPackageBuild.id AND | 163 | BuildPackageJob.build = BinaryPackageBuild.id AND |
1605 | 164 | BinaryPackageBuild.distro_arch_series = | 164 | BinaryPackageBuild.distro_arch_series = |
1606 | 165 | DistroArchSeries.id AND | 165 | DistroArchSeries.id AND |
1609 | 166 | BinaryPackageBuild.package_build = PackageBuild.id AND | 166 | BinaryPackageBuild.archive = Archive.id AND |
1608 | 167 | PackageBuild.archive = Archive.id AND | ||
1610 | 168 | ((Archive.private IS TRUE AND | 167 | ((Archive.private IS TRUE AND |
1611 | 169 | EXISTS ( | 168 | EXISTS ( |
1612 | 170 | SELECT SourcePackagePublishingHistory.id | 169 | SELECT SourcePackagePublishingHistory.id |
1613 | @@ -178,8 +177,7 @@ | |||
1614 | 178 | SourcePackagePublishingHistory.status IN %s)) | 177 | SourcePackagePublishingHistory.status IN %s)) |
1615 | 179 | OR | 178 | OR |
1616 | 180 | archive.private IS FALSE) AND | 179 | archive.private IS FALSE) AND |
1619 | 181 | PackageBuild.build_farm_job = BuildFarmJob.id AND | 180 | BinaryPackageBuild.status = %s |
1618 | 182 | BuildFarmJob.status = %s | ||
1620 | 183 | """ % sqlvalues(private_statuses, BuildStatus.NEEDSBUILD) | 181 | """ % sqlvalues(private_statuses, BuildStatus.NEEDSBUILD) |
1621 | 184 | 182 | ||
1622 | 185 | # Ensure that if BUILDING builds exist for the same | 183 | # Ensure that if BUILDING builds exist for the same |
1623 | @@ -201,16 +199,12 @@ | |||
1624 | 201 | sub_query += """ | 199 | sub_query += """ |
1625 | 202 | AND Archive.id NOT IN ( | 200 | AND Archive.id NOT IN ( |
1626 | 203 | SELECT Archive.id | 201 | SELECT Archive.id |
1629 | 204 | FROM PackageBuild, BuildFarmJob, Archive, | 202 | FROM Archive, BinaryPackageBuild, DistroArchSeries |
1628 | 205 | BinaryPackageBuild, DistroArchSeries | ||
1630 | 206 | WHERE | 203 | WHERE |
1635 | 207 | PackageBuild.build_farm_job = BuildFarmJob.id | 204 | BinaryPackageBuild.distro_arch_series = DistroArchSeries.id |
1632 | 208 | AND BinaryPackageBuild.package_build = PackageBuild.id | ||
1633 | 209 | AND BinaryPackageBuild.distro_arch_series | ||
1634 | 210 | = DistroArchSeries.id | ||
1636 | 211 | AND DistroArchSeries.processorfamily = %s | 205 | AND DistroArchSeries.processorfamily = %s |
1639 | 212 | AND BuildFarmJob.status = %s | 206 | AND BinaryPackageBuild.status = %s |
1640 | 213 | AND PackageBuild.archive = Archive.id | 207 | AND BinaryPackageBuild.archive = Archive.id |
1641 | 214 | AND Archive.purpose = %s | 208 | AND Archive.purpose = %s |
1642 | 215 | AND Archive.private IS FALSE | 209 | AND Archive.private IS FALSE |
1643 | 216 | GROUP BY Archive.id | 210 | GROUP BY Archive.id |
1644 | 217 | 211 | ||
1645 | === modified file 'lib/lp/soyuz/model/distributionsourcepackagerelease.py' | |||
1646 | --- lib/lp/soyuz/model/distributionsourcepackagerelease.py 2013-01-07 02:40:55 +0000 | |||
1647 | +++ lib/lp/soyuz/model/distributionsourcepackagerelease.py 2013-02-01 03:49:23 +0000 | |||
1648 | @@ -20,8 +20,6 @@ | |||
1649 | 20 | from storm.store import Store | 20 | from storm.store import Store |
1650 | 21 | from zope.interface import implements | 21 | from zope.interface import implements |
1651 | 22 | 22 | ||
1652 | 23 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
1653 | 24 | from lp.buildmaster.model.packagebuild import PackageBuild | ||
1654 | 25 | from lp.services.database.decoratedresultset import DecoratedResultSet | 23 | from lp.services.database.decoratedresultset import DecoratedResultSet |
1655 | 26 | from lp.services.database.sqlbase import sqlvalues | 24 | from lp.services.database.sqlbase import sqlvalues |
1656 | 27 | from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES | 25 | from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES |
1657 | @@ -105,8 +103,6 @@ | |||
1658 | 105 | BinaryPackageBuild.distro_arch_series == DistroArchSeries.id, | 103 | BinaryPackageBuild.distro_arch_series == DistroArchSeries.id, |
1659 | 106 | DistroArchSeries.distroseries == DistroSeries.id, | 104 | DistroArchSeries.distroseries == DistroSeries.id, |
1660 | 107 | DistroSeries.distribution == self.distribution, | 105 | DistroSeries.distribution == self.distribution, |
1661 | 108 | BinaryPackageBuild.package_build == PackageBuild.id, | ||
1662 | 109 | PackageBuild.build_farm_job == BuildFarmJob.id | ||
1663 | 110 | ) | 106 | ) |
1664 | 111 | 107 | ||
1665 | 112 | # First, get all the builds built in a main archive (this will | 108 | # First, get all the builds built in a main archive (this will |
1666 | @@ -114,7 +110,7 @@ | |||
1667 | 114 | builds_built_in_main_archives = Store.of(self.distribution).find( | 110 | builds_built_in_main_archives = Store.of(self.distribution).find( |
1668 | 115 | BinaryPackageBuild, | 111 | BinaryPackageBuild, |
1669 | 116 | builds_for_distro_exprs, | 112 | builds_for_distro_exprs, |
1671 | 117 | PackageBuild.archive == Archive.id, | 113 | BinaryPackageBuild._new_archive == Archive.id, |
1672 | 118 | Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)) | 114 | Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)) |
1673 | 119 | 115 | ||
1674 | 120 | # Next get all the builds that have a binary published in the | 116 | # Next get all the builds that have a binary published in the |
1675 | 121 | 117 | ||
1676 | === modified file 'lib/lp/soyuz/model/publishing.py' | |||
1677 | --- lib/lp/soyuz/model/publishing.py 2013-01-03 00:16:08 +0000 | |||
1678 | +++ lib/lp/soyuz/model/publishing.py 2013-02-01 03:49:23 +0000 | |||
1679 | @@ -1579,17 +1579,14 @@ | |||
1680 | 1579 | # If an optional list of build states was passed in as a parameter, | 1579 | # If an optional list of build states was passed in as a parameter, |
1681 | 1580 | # ensure that the result is limited to builds in those states. | 1580 | # ensure that the result is limited to builds in those states. |
1682 | 1581 | if build_states is not None: | 1581 | if build_states is not None: |
1687 | 1582 | extra_exprs.extend(( | 1582 | extra_exprs.append( |
1688 | 1583 | BinaryPackageBuild.package_build == PackageBuild.id, | 1583 | BinaryPackageBuild._new_status.is_in(build_states)) |
1685 | 1584 | PackageBuild.build_farm_job == BuildFarmJob.id, | ||
1686 | 1585 | BuildFarmJob.status.is_in(build_states))) | ||
1689 | 1586 | 1584 | ||
1690 | 1587 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | 1585 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
1691 | 1588 | 1586 | ||
1692 | 1589 | # We'll be looking for builds in the same distroseries as the | 1587 | # We'll be looking for builds in the same distroseries as the |
1693 | 1590 | # SPPH for the same release. | 1588 | # SPPH for the same release. |
1694 | 1591 | builds_for_distroseries_expr = ( | 1589 | builds_for_distroseries_expr = ( |
1695 | 1592 | BinaryPackageBuild.package_build == PackageBuild.id, | ||
1696 | 1593 | BinaryPackageBuild.distro_arch_series_id == DistroArchSeries.id, | 1590 | BinaryPackageBuild.distro_arch_series_id == DistroArchSeries.id, |
1697 | 1594 | SourcePackagePublishingHistory.distroseriesID == | 1591 | SourcePackagePublishingHistory.distroseriesID == |
1698 | 1595 | DistroArchSeries.distroseriesID, | 1592 | DistroArchSeries.distroseriesID, |
1699 | @@ -1603,7 +1600,7 @@ | |||
1700 | 1603 | BinaryPackageBuild, | 1600 | BinaryPackageBuild, |
1701 | 1604 | builds_for_distroseries_expr, | 1601 | builds_for_distroseries_expr, |
1702 | 1605 | (SourcePackagePublishingHistory.archiveID == | 1602 | (SourcePackagePublishingHistory.archiveID == |
1704 | 1606 | PackageBuild.archive_id), | 1603 | BinaryPackageBuild._new_archive_id), |
1705 | 1607 | *extra_exprs) | 1604 | *extra_exprs) |
1706 | 1608 | 1605 | ||
1707 | 1609 | # Next get all the builds that have a binary published in the | 1606 | # Next get all the builds that have a binary published in the |
1708 | @@ -1613,7 +1610,7 @@ | |||
1709 | 1613 | BinaryPackageBuild, | 1610 | BinaryPackageBuild, |
1710 | 1614 | builds_for_distroseries_expr, | 1611 | builds_for_distroseries_expr, |
1711 | 1615 | (SourcePackagePublishingHistory.archiveID != | 1612 | (SourcePackagePublishingHistory.archiveID != |
1713 | 1616 | PackageBuild.archive_id), | 1613 | BinaryPackageBuild._new_archive_id), |
1714 | 1617 | BinaryPackagePublishingHistory.archive == | 1614 | BinaryPackagePublishingHistory.archive == |
1715 | 1618 | SourcePackagePublishingHistory.archiveID, | 1615 | SourcePackagePublishingHistory.archiveID, |
1716 | 1619 | BinaryPackagePublishingHistory.binarypackagerelease == | 1616 | BinaryPackagePublishingHistory.binarypackagerelease == |
1717 | @@ -1737,9 +1734,7 @@ | |||
1718 | 1737 | self._getSourceBinaryJoinForSources( | 1734 | self._getSourceBinaryJoinForSources( |
1719 | 1738 | source_publication_ids, active_binaries_only=False), | 1735 | source_publication_ids, active_binaries_only=False), |
1720 | 1739 | BinaryPackagePublishingHistory.datepublished != None, | 1736 | BinaryPackagePublishingHistory.datepublished != None, |
1724 | 1740 | BinaryPackageBuild.package_build == PackageBuild.id, | 1737 | BinaryPackageBuild._new_status.is_in(build_states)) |
1722 | 1741 | PackageBuild.build_farm_job == BuildFarmJob.id, | ||
1723 | 1742 | BuildFarmJob.status.is_in(build_states)) | ||
1725 | 1743 | 1738 | ||
1726 | 1744 | published_builds.order_by( | 1739 | published_builds.order_by( |
1727 | 1745 | SourcePackagePublishingHistory.id, | 1740 | SourcePackagePublishingHistory.id, |
1728 | 1746 | 1741 | ||
1729 | === modified file 'lib/lp/soyuz/model/sourcepackagerelease.py' | |||
1730 | --- lib/lp/soyuz/model/sourcepackagerelease.py 2013-01-07 04:53:37 +0000 | |||
1731 | +++ lib/lp/soyuz/model/sourcepackagerelease.py 2013-02-01 03:49:23 +0000 | |||
1732 | @@ -226,13 +226,11 @@ | |||
1733 | 226 | # sourcepackagerelease. | 226 | # sourcepackagerelease. |
1734 | 227 | return BinaryPackageBuild.select(""" | 227 | return BinaryPackageBuild.select(""" |
1735 | 228 | source_package_release = %s AND | 228 | source_package_release = %s AND |
1739 | 229 | package_build = packagebuild.id AND | 229 | archive.id = binarypackagebuild.archive AND |
1737 | 230 | archive.id = packagebuild.archive AND | ||
1738 | 231 | packagebuild.build_farm_job = buildfarmjob.id AND | ||
1740 | 232 | archive.purpose IN %s | 230 | archive.purpose IN %s |
1741 | 233 | """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), | 231 | """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), |
1744 | 234 | orderBy=['-buildfarmjob.date_created', 'id'], | 232 | orderBy=['-date_created', 'id'], |
1745 | 235 | clauseTables=['Archive', 'PackageBuild', 'BuildFarmJob']) | 233 | clauseTables=['Archive']) |
1746 | 236 | 234 | ||
1747 | 237 | @property | 235 | @property |
1748 | 238 | def age(self): | 236 | def age(self): |
1749 | @@ -447,16 +445,10 @@ | |||
1750 | 447 | # If there was no published binary we have to try to find a | 445 | # If there was no published binary we have to try to find a |
1751 | 448 | # suitable build in all possible location across the distroseries | 446 | # suitable build in all possible location across the distroseries |
1752 | 449 | # inheritance tree. See below. | 447 | # inheritance tree. See below. |
1758 | 450 | clause_tables = [ | 448 | clause_tables = ['DistroArchSeries'] |
1754 | 451 | 'BuildFarmJob', | ||
1755 | 452 | 'PackageBuild', | ||
1756 | 453 | 'DistroArchSeries', | ||
1757 | 454 | ] | ||
1759 | 455 | queries = [ | 449 | queries = [ |
1760 | 456 | "BinaryPackageBuild.package_build = PackageBuild.id AND " | ||
1761 | 457 | "PackageBuild.build_farm_job = BuildFarmJob.id AND " | ||
1762 | 458 | "DistroArchSeries.id = BinaryPackageBuild.distro_arch_series AND " | 450 | "DistroArchSeries.id = BinaryPackageBuild.distro_arch_series AND " |
1764 | 459 | "PackageBuild.archive = %s AND " | 451 | "BinaryPackageBuild.archive = %s AND " |
1765 | 460 | "DistroArchSeries.architecturetag = %s AND " | 452 | "DistroArchSeries.architecturetag = %s AND " |
1766 | 461 | "BinaryPackageBuild.source_package_release = %s" % ( | 453 | "BinaryPackageBuild.source_package_release = %s" % ( |
1767 | 462 | sqlvalues(archive.id, distroarchseries.architecturetag, self))] | 454 | sqlvalues(archive.id, distroarchseries.architecturetag, self))] |
1768 | @@ -467,7 +459,7 @@ | |||
1769 | 467 | 459 | ||
1770 | 468 | return BinaryPackageBuild.selectFirst( | 460 | return BinaryPackageBuild.selectFirst( |
1771 | 469 | query, clauseTables=clause_tables, | 461 | query, clauseTables=clause_tables, |
1773 | 470 | orderBy=['-BuildFarmJob.date_created']) | 462 | orderBy=['-date_created']) |
1774 | 471 | 463 | ||
1775 | 472 | def override(self, component=None, section=None, urgency=None): | 464 | def override(self, component=None, section=None, urgency=None): |
1776 | 473 | """See ISourcePackageRelease.""" | 465 | """See ISourcePackageRelease.""" |
1777 | 474 | 466 | ||
1778 | === modified file 'lib/lp/soyuz/tests/test_archive.py' | |||
1779 | --- lib/lp/soyuz/tests/test_archive.py 2012-10-25 11:02:37 +0000 | |||
1780 | +++ lib/lp/soyuz/tests/test_archive.py 2013-02-01 03:49:23 +0000 | |||
1781 | @@ -359,16 +359,13 @@ | |||
1782 | 359 | # Return the count for archive build jobs with the given status. | 359 | # Return the count for archive build jobs with the given status. |
1783 | 360 | query = """ | 360 | query = """ |
1784 | 361 | SELECT COUNT(Job.id) | 361 | SELECT COUNT(Job.id) |
1787 | 362 | FROM BinaryPackageBuild, BuildPackageJob, BuildQueue, Job, | 362 | FROM BinaryPackageBuild, BuildPackageJob, BuildQueue, Job |
1786 | 363 | PackageBuild, BuildFarmJob | ||
1788 | 364 | WHERE | 363 | WHERE |
1789 | 365 | BuildPackageJob.build = BinaryPackageBuild.id | 364 | BuildPackageJob.build = BinaryPackageBuild.id |
1790 | 366 | AND BuildPackageJob.job = BuildQueue.job | 365 | AND BuildPackageJob.job = BuildQueue.job |
1791 | 367 | AND Job.id = BuildQueue.job | 366 | AND Job.id = BuildQueue.job |
1796 | 368 | AND BinaryPackageBuild.package_build = PackageBuild.id | 367 | AND BinaryPackageBuild.archive = %s |
1797 | 369 | AND PackageBuild.archive = %s | 368 | AND BinaryPackageBuild.status = %s |
1794 | 370 | AND PackageBuild.build_farm_job = BuildFarmJob.id | ||
1795 | 371 | AND BuildFarmJob.status = %s | ||
1798 | 372 | AND Job.status = %s; | 369 | AND Job.status = %s; |
1799 | 373 | """ % sqlvalues(archive, BuildStatus.NEEDSBUILD, status) | 370 | """ % sqlvalues(archive, BuildStatus.NEEDSBUILD, status) |
1800 | 374 | 371 | ||
1801 | 375 | 372 | ||
1802 | === modified file 'lib/lp/translations/interfaces/translationtemplatesbuild.py' | |||
1803 | --- lib/lp/translations/interfaces/translationtemplatesbuild.py 2011-12-24 16:54:44 +0000 | |||
1804 | +++ lib/lp/translations/interfaces/translationtemplatesbuild.py 2013-02-01 03:49:23 +0000 | |||
1805 | @@ -22,10 +22,6 @@ | |||
1806 | 22 | class ITranslationTemplatesBuild(IBuildFarmJob): | 22 | class ITranslationTemplatesBuild(IBuildFarmJob): |
1807 | 23 | """The build information for translation templates builds.""" | 23 | """The build information for translation templates builds.""" |
1808 | 24 | 24 | ||
1809 | 25 | build_farm_job = Reference( | ||
1810 | 26 | title=_("The build farm job that this extends."), | ||
1811 | 27 | required=True, readonly=True, schema=IBuildFarmJob) | ||
1812 | 28 | |||
1813 | 29 | branch = Reference( | 25 | branch = Reference( |
1814 | 30 | title=_("The branch that this build operates on."), | 26 | title=_("The branch that this build operates on."), |
1815 | 31 | required=True, readonly=True, schema=IBranch) | 27 | required=True, readonly=True, schema=IBranch) |
1816 | @@ -34,7 +30,7 @@ | |||
1817 | 34 | class ITranslationTemplatesBuildSource(ISpecificBuildFarmJobSource): | 30 | class ITranslationTemplatesBuildSource(ISpecificBuildFarmJobSource): |
1818 | 35 | """Utility for `ITranslationTemplatesBuild`.""" | 31 | """Utility for `ITranslationTemplatesBuild`.""" |
1819 | 36 | 32 | ||
1821 | 37 | def create(build_farm_job, branch): | 33 | def create(branch): |
1822 | 38 | """Create a new `ITranslationTemplatesBuild`.""" | 34 | """Create a new `ITranslationTemplatesBuild`.""" |
1823 | 39 | 35 | ||
1824 | 40 | def findByBranch(branch, store=None): | 36 | def findByBranch(branch, store=None): |
1825 | 41 | 37 | ||
1826 | === modified file 'lib/lp/translations/model/translationtemplatesbuild.py' | |||
1827 | --- lib/lp/translations/model/translationtemplatesbuild.py 2013-02-01 03:49:23 +0000 | |||
1828 | +++ lib/lp/translations/model/translationtemplatesbuild.py 2013-02-01 03:49:23 +0000 | |||
1829 | @@ -57,6 +57,8 @@ | |||
1830 | 57 | 57 | ||
1831 | 58 | __storm_table__ = 'TranslationTemplatesBuild' | 58 | __storm_table__ = 'TranslationTemplatesBuild' |
1832 | 59 | 59 | ||
1833 | 60 | job_type = BuildFarmJobType.TRANSLATIONTEMPLATESBUILD | ||
1834 | 61 | |||
1835 | 60 | id = Int(name='id', primary=True) | 62 | id = Int(name='id', primary=True) |
1836 | 61 | build_farm_job_id = Int(name='build_farm_job', allow_none=False) | 63 | build_farm_job_id = Int(name='build_farm_job', allow_none=False) |
1837 | 62 | build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') | 64 | build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') |
799 + ).group_by( ild._new_ status by(BinaryPackag eBuild. _new_status)
800 + BinaryPackageBu
801 + ).order_
Perhaps this could be re-flowed?
1011 + find_spec = (BinaryPackageB uild,)
Why? Surely this can just be inlined into the .find() call.