Merge lp:~wgrant/launchpad/flatten-bfj-3-query into lp:launchpad
- flatten-bfj-3-query
- Merge into devel
Status: | Superseded | ||||
---|---|---|---|---|---|
Proposed branch: | lp:~wgrant/launchpad/flatten-bfj-3-query | ||||
Merge into: | lp:launchpad | ||||
Prerequisite: | lp:~wgrant/launchpad/flatten-bfj-2-garbo | ||||
Diff against target: |
1837 lines (+264/-721) 23 files modified
database/sampledata/current-dev.sql (+48/-48) database/sampledata/current.sql (+48/-48) database/schema/security.cfg (+0/-4) lib/lp/buildmaster/interfaces/buildfarmjob.py (+2/-0) lib/lp/buildmaster/interfaces/packagebuild.py (+0/-4) lib/lp/buildmaster/model/buildfarmjob.py (+11/-17) lib/lp/buildmaster/model/packagebuild.py (+4/-8) lib/lp/code/model/sourcepackagerecipe.py (+44/-47) lib/lp/code/model/sourcepackagerecipebuild.py (+5/-7) lib/lp/registry/model/sourcepackage.py (+7/-7) lib/lp/scripts/garbo.py (+0/-227) lib/lp/scripts/tests/test_garbo.py (+1/-136) lib/lp/soyuz/doc/sourcepackagerelease.txt (+1/-3) lib/lp/soyuz/interfaces/binarypackagebuild.py (+0/-7) lib/lp/soyuz/model/archive.py (+27/-47) lib/lp/soyuz/model/binarypackagebuild.py (+41/-58) lib/lp/soyuz/model/buildpackagejob.py (+7/-13) lib/lp/soyuz/model/distributionsourcepackagerelease.py (+1/-5) lib/lp/soyuz/model/publishing.py (+5/-10) lib/lp/soyuz/model/sourcepackagerelease.py (+6/-14) lib/lp/soyuz/tests/test_archive.py (+3/-6) lib/lp/translations/interfaces/translationtemplatesbuild.py (+1/-5) lib/lp/translations/model/translationtemplatesbuild.py (+2/-0) |
||||
To merge this branch: | bzr merge lp:~wgrant/launchpad/flatten-bfj-3-query | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Steve Kowalik (community) | code | Approve | |
Review via email: mp+145543@code.launchpad.net |
This proposal has been superseded by a proposal from 2013-02-01.
Commit message
Description of the change
The build farm job schema is being reworked to improve performance. Columns from PackageBuild and BuildFarmJob are being merged into tables that previously delegated to them. The PackageBuild table will end up dying entirely, but BuildFarmJob will remain, a shadow of its former self, to answer questions about Archive:+builds and Builder:+history. Additionally, BinaryPackageBuild is growing new distribution, distroseries, sourcepackagename and is_distro_archive columns to make searches even faster.
This branch (which can only land once the garbo jobs have completed) changes the app to read values from the new denormed columns, while still writing to the old ones on PB/BFJ during the transition. Most queries involving BuildFarmJob or PackageBuild are now just over BPB/SPRB/TTB.
Preview Diff
1 | === modified file 'database/sampledata/current-dev.sql' |
2 | --- database/sampledata/current-dev.sql 2013-01-17 11:57:53 +0000 |
3 | +++ database/sampledata/current-dev.sql 2013-02-01 03:49:23 +0000 |
4 | @@ -2464,30 +2464,30 @@ |
5 | |
6 | ALTER TABLE buildfarmjob DISABLE TRIGGER ALL; |
7 | |
8 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0); |
9 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0); |
10 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0); |
11 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0); |
12 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0); |
13 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); |
14 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0); |
15 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0); |
16 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0); |
17 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0); |
18 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0); |
19 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0); |
20 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0); |
21 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0); |
22 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0); |
23 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0); |
24 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); |
25 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0); |
26 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); |
27 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0); |
28 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0); |
29 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0); |
30 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0); |
31 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0); |
32 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0, 1); |
33 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
34 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0, 1); |
35 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0, 1); |
36 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
37 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
38 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0, 1); |
39 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0, 1); |
40 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0, 1); |
41 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0, 1); |
42 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0, 1); |
43 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0, 1); |
44 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0, 1); |
45 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0, 1); |
46 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0, 1); |
47 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0, 1); |
48 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
49 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0, 11); |
50 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 9); |
51 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0, 9); |
52 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0, 9); |
53 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0, 9); |
54 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0, 12); |
55 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0, 1); |
56 | |
57 | |
58 | ALTER TABLE buildfarmjob ENABLE TRIGGER ALL; |
59 | @@ -2719,30 +2719,30 @@ |
60 | |
61 | ALTER TABLE binarypackagebuild DISABLE TRIGGER ALL; |
62 | |
63 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (2, 1, 1, 14); |
64 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (6, 2, 1, 32); |
65 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (7, 3, 6, 20); |
66 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (8, 4, 6, 14); |
67 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (9, 5, 1, 20); |
68 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (10, 6, 1, 26); |
69 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (11, 7, 6, 25); |
70 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (12, 8, 6, 27); |
71 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (13, 9, 1, 17); |
72 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (14, 10, 1, 28); |
73 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (15, 11, 1, 29); |
74 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (16, 12, 11, 20); |
75 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (18, 13, 8, 14); |
76 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (19, 14, 8, 20); |
77 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (21, 15, 1, 33); |
78 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (22, 16, 8, 33); |
79 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (23, 17, 1, 35); |
80 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (24, 18, 1, 33); |
81 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (25, 19, 1, 35); |
82 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (26, 20, 8, 33); |
83 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (27, 21, 1, 20); |
84 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (28, 22, 12, 14); |
85 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (29, 23, 8, 36); |
86 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (30, 24, 12, 14); |
87 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (2, 1, 1, 14, 1, 0, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, NULL, NULL, 0, 2, 1, 1, true, 1); |
88 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (6, 2, 1, 32, 1, 0, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 6, 1, 1, true, 23); |
89 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (7, 3, 6, 20, 1, 0, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, NULL, NULL, 0, 7, 1, 3, true, 14); |
90 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (8, 4, 6, 14, 1, 0, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, NULL, NULL, 0, 8, 1, 3, true, 1); |
91 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (9, 5, 1, 20, 1, 0, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 9, 1, 1, true, 14); |
92 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (10, 6, 1, 26, 1, 0, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 10, 1, 1, true, 20); |
93 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (11, 7, 6, 25, 1, 0, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, NULL, NULL, 0, 11, 1, 3, true, 19); |
94 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (12, 8, 6, 27, 1, 0, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, NULL, 'cpp (>= 4:4.0.1-3), gcc-4.0 (>= 4.0.1-2)', 0, 12, 1, 3, true, 21); |
95 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (13, 9, 1, 17, 1, 0, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, NULL, NULL, 0, 13, 1, 1, true, 10); |
96 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (14, 10, 1, 28, 1, 0, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, NULL, NULL, 0, 14, 1, 1, true, 22); |
97 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (15, 11, 1, 29, 1, 0, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, NULL, NULL, 0, 15, 1, 1, true, 17); |
98 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (16, 12, 11, 20, 1, 0, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, NULL, NULL, 0, 16, 1, 3, true, 14); |
99 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (18, 13, 8, 14, 1, 0, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, NULL, NULL, 0, 18, 1, 10, true, 1); |
100 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (19, 14, 8, 20, 1, 0, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 19, 1, 10, true, 14); |
101 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (21, 15, 1, 33, 1, 0, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, NULL, NULL, 0, 21, 1, 1, true, 24); |
102 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (22, 16, 8, 33, 1, 0, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 91, NULL, 0, 22, 1, 10, true, 24); |
103 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (23, 17, 1, 35, 1, 0, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 23, 1, 1, true, 26); |
104 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (24, 18, 1, 33, 11, 0, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 24, 1, 1, false, 24); |
105 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (25, 19, 1, 35, 9, 0, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 25, 1, 1, false, 26); |
106 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (26, 20, 8, 33, 9, 0, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 26, 1, 10, false, 24); |
107 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (27, 21, 1, 20, 9, 0, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 27, 1, 1, false, 14); |
108 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (28, 22, 12, 14, 9, 0, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, NULL, NULL, 0, 28, 1, 1, false, 1); |
109 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (29, 23, 8, 36, 12, 0, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, NULL, NULL, 0, 29, 1, 10, true, 27); |
110 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (30, 24, 12, 14, 1, 0, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, NULL, NULL, 0, 30, 1, 1, true, 1); |
111 | |
112 | |
113 | ALTER TABLE binarypackagebuild ENABLE TRIGGER ALL; |
114 | |
115 | === modified file 'database/sampledata/current.sql' |
116 | --- database/sampledata/current.sql 2013-01-17 11:57:53 +0000 |
117 | +++ database/sampledata/current.sql 2013-02-01 03:49:23 +0000 |
118 | @@ -2459,30 +2459,30 @@ |
119 | |
120 | ALTER TABLE buildfarmjob DISABLE TRIGGER ALL; |
121 | |
122 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0); |
123 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0); |
124 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0); |
125 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0); |
126 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0); |
127 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); |
128 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0); |
129 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0); |
130 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0); |
131 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0); |
132 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0); |
133 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0); |
134 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0); |
135 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0); |
136 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0); |
137 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0); |
138 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); |
139 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0); |
140 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0); |
141 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0); |
142 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0); |
143 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0); |
144 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0); |
145 | -INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0); |
146 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (2, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, 1, 0, 1); |
147 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (6, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
148 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (7, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, 1, 0, 1); |
149 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (8, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, 1, 0, 1); |
150 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (9, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, 1, 0, 1); |
151 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (10, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
152 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (11, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, 1, 0, 1); |
153 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (12, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, 1, 0, 1); |
154 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (13, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, 1, 0, 1); |
155 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (14, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, 1, 0, 1); |
156 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (15, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, 1, 0, 1); |
157 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (16, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, 1, 0, 1); |
158 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (18, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, 1, 0, 1); |
159 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (19, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, 1, 0, 1); |
160 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (21, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, 1, 0, 1); |
161 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (22, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 1, 0, 1); |
162 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (23, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 1); |
163 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (24, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, 1, 0, 11); |
164 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (25, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, 1, 0, 9); |
165 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (26, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, 1, 0, 9); |
166 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (27, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, 1, 0, 9); |
167 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (28, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, 1, 0, 9); |
168 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (29, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, 1, 0, 12); |
169 | +INSERT INTO buildfarmjob (id, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, job_type, failure_count, archive) VALUES (30, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, 1, 0, 1); |
170 | |
171 | |
172 | ALTER TABLE buildfarmjob ENABLE TRIGGER ALL; |
173 | @@ -2656,30 +2656,30 @@ |
174 | |
175 | ALTER TABLE binarypackagebuild DISABLE TRIGGER ALL; |
176 | |
177 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (2, 1, 1, 14); |
178 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (6, 2, 1, 32); |
179 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (7, 3, 6, 20); |
180 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (8, 4, 6, 14); |
181 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (9, 5, 1, 20); |
182 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (10, 6, 1, 26); |
183 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (11, 7, 6, 25); |
184 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (12, 8, 6, 27); |
185 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (13, 9, 1, 17); |
186 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (14, 10, 1, 28); |
187 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (15, 11, 1, 29); |
188 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (16, 12, 11, 20); |
189 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (18, 13, 8, 14); |
190 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (19, 14, 8, 20); |
191 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (21, 15, 1, 33); |
192 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (22, 16, 8, 33); |
193 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (23, 17, 1, 35); |
194 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (24, 18, 1, 33); |
195 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (25, 19, 1, 35); |
196 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (26, 20, 8, 33); |
197 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (27, 21, 1, 20); |
198 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (28, 22, 12, 14); |
199 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (29, 23, 8, 36); |
200 | -INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release) VALUES (30, 24, 12, 14); |
201 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (2, 1, 1, 14, 1, 0, 1, false, '2004-09-27 11:57:13', '2004-09-27 11:55:13', '2004-09-27 11:57:14', NULL, 1, 1, 1, NULL, NULL, 0, 2, 1, 1, true, 1); |
202 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (6, 2, 1, 32, 1, 0, 1, false, '2006-12-01 00:00:00', '2006-12-01 00:00:00', '2006-12-01 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 6, 1, 1, true, 23); |
203 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (7, 3, 6, 20, 1, 0, 1, false, '2005-03-24 00:00:00', '2005-03-24 23:58:43', '2005-03-25 00:00:03', NULL, 1, 1, 1, NULL, NULL, 0, 7, 1, 3, true, 14); |
204 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (8, 4, 6, 14, 1, 0, 1, false, '2005-09-30 00:00:00', NULL, NULL, NULL, NULL, 6, NULL, NULL, NULL, 0, 8, 1, 3, true, 1); |
205 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (9, 5, 1, 20, 1, 0, 1, false, '2005-10-01 00:00:00', '2005-10-01 23:56:41', '2005-10-02 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 9, 1, 1, true, 14); |
206 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (10, 6, 1, 26, 1, 0, 1, false, '2006-01-27 00:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 10, 1, 1, true, 20); |
207 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (11, 7, 6, 25, 1, 0, 1, false, '2006-02-14 00:00:00', NULL, NULL, NULL, NULL, 0, NULL, NULL, NULL, 0, 11, 1, 3, true, 19); |
208 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (12, 8, 6, 27, 1, 0, 1, false, '2006-02-28 00:00:00', '2006-02-27 23:53:59', '2006-02-28 00:00:01', NULL, 1, 3, 1, NULL, 'cpp (>= 4:4.0.1-3), gcc-4.0 (>= 4.0.1-2)', 0, 12, 1, 3, true, 21); |
209 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (13, 9, 1, 17, 1, 0, 1, false, '2006-03-21 00:00:00', '2006-03-21 00:58:33', '2006-03-21 01:00:03', NULL, 1, 5, 1, NULL, NULL, 0, 13, 1, 1, true, 10); |
210 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (14, 10, 1, 28, 1, 0, 1, false, '2006-03-22 00:00:00', '2006-03-21 00:58:32', '2006-03-21 01:00:02', NULL, 1, 5, 1, NULL, NULL, 0, 14, 1, 1, true, 22); |
211 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (15, 11, 1, 29, 1, 0, 1, false, '2006-03-22 00:00:01', '2006-03-21 00:58:30', '2006-03-21 01:00:00', NULL, 1, 5, 1, NULL, NULL, 0, 15, 1, 1, true, 17); |
212 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (16, 12, 11, 20, 1, 0, 1, false, '2005-03-24 00:00:01', '2005-03-24 23:58:42', '2005-03-25 00:00:02', NULL, 1, 1, 1, NULL, NULL, 0, 16, 1, 3, true, 14); |
213 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (18, 13, 8, 14, 1, 0, 1, false, '2004-09-27 11:57:14', '2004-09-27 11:55:12', '2004-09-27 11:57:13', NULL, 1, 1, 1, NULL, NULL, 0, 18, 1, 10, true, 1); |
214 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (19, 14, 8, 20, 1, 0, 1, false, '2005-03-24 00:00:02', '2005-03-24 23:58:41', '2005-03-25 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 19, 1, 10, true, 14); |
215 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (21, 15, 1, 33, 1, 0, 1, false, '2006-12-01 00:00:01', NULL, NULL, NULL, NULL, 2, NULL, NULL, NULL, 0, 21, 1, 1, true, 24); |
216 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (22, 16, 8, 33, 1, 0, 1, false, '2007-04-20 00:00:00', '2007-04-19 23:58:41', '2007-04-20 00:00:01', NULL, 1, 7, 1, 91, NULL, 0, 22, 1, 10, true, 24); |
217 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (23, 17, 1, 35, 1, 0, 1, false, '2006-04-11 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 23, 1, 1, true, 26); |
218 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (24, 18, 1, 33, 11, 0, 1, true, '2007-05-30 00:00:00', '2007-05-29 23:58:41', '2007-05-30 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 24, 1, 1, false, 24); |
219 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (25, 19, 1, 35, 9, 0, 1, true, '2007-07-08 12:00:00', NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL, 0, 25, 1, 1, false, 26); |
220 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (26, 20, 8, 33, 9, 0, 1, true, '2007-07-08 00:00:00', '2007-07-07 23:58:41', '2007-07-08 00:00:01', NULL, 1, 2, 1, NULL, NULL, 0, 26, 1, 10, false, 24); |
221 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (27, 21, 1, 20, 9, 0, 1, true, '2007-07-24 00:00:00', '2007-07-23 23:58:41', '2007-07-24 00:00:01', NULL, 1, 1, 1, NULL, NULL, 0, 27, 1, 1, false, 14); |
222 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (28, 22, 12, 14, 9, 0, 3, true, '2007-08-10 00:00:00', '2007-08-10 00:00:00', '2007-08-10 00:00:13', NULL, 1, 1, 1, NULL, NULL, 0, 28, 1, 1, false, 1); |
223 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (29, 23, 8, 36, 12, 0, 1, false, '2007-08-09 21:54:18.553132', '2007-08-09 23:49:59', '2007-08-09 23:59:59', NULL, NULL, 1, NULL, NULL, NULL, 0, 29, 1, 10, true, 27); |
224 | +INSERT INTO binarypackagebuild (id, package_build, distro_arch_series, source_package_release, archive, pocket, processor, virtualized, date_created, date_started, date_finished, date_first_dispatched, builder, status, log, upload_log, dependencies, failure_count, build_farm_job, distribution, distro_series, is_distro_archive, source_package_name) VALUES (30, 24, 12, 14, 1, 0, 3, false, '2007-08-10 00:00:01', '2007-08-10 00:00:01', '2007-08-10 00:00:14', NULL, 1, 1, 1, NULL, NULL, 0, 30, 1, 1, true, 1); |
225 | |
226 | |
227 | ALTER TABLE binarypackagebuild ENABLE TRIGGER ALL; |
228 | |
229 | === modified file 'database/schema/security.cfg' |
230 | --- database/schema/security.cfg 2013-02-01 03:49:23 +0000 |
231 | +++ database/schema/security.cfg 2013-02-01 03:49:23 +0000 |
232 | @@ -2218,7 +2218,6 @@ |
233 | public.answercontact = SELECT, DELETE |
234 | public.branch = SELECT, UPDATE |
235 | public.branchjob = SELECT, DELETE |
236 | -public.binarypackagebuild = SELECT, UPDATE |
237 | public.binarypackagename = SELECT |
238 | public.binarypackagerelease = SELECT |
239 | public.binarypackagepublishinghistory = SELECT, UPDATE |
240 | @@ -2241,7 +2240,6 @@ |
241 | public.bugtaskflat = SELECT |
242 | public.bugwatch = SELECT, UPDATE |
243 | public.bugwatchactivity = SELECT, DELETE |
244 | -public.buildfarmjob = SELECT, UPDATE |
245 | public.codeimportevent = SELECT, DELETE |
246 | public.codeimporteventdata = SELECT, DELETE |
247 | public.codeimportresult = SELECT, DELETE |
248 | @@ -2265,7 +2263,6 @@ |
249 | public.revisionauthor = SELECT, UPDATE |
250 | public.revisioncache = SELECT, DELETE |
251 | public.sourcepackagename = SELECT |
252 | -public.sourcepackagerecipebuild = SELECT, UPDATE |
253 | public.sourcepackagerelease = SELECT |
254 | public.sourcepackagepublishinghistory = SELECT, UPDATE |
255 | public.suggestivepotemplate = INSERT, DELETE |
256 | @@ -2273,7 +2270,6 @@ |
257 | public.teamparticipation = SELECT, DELETE |
258 | public.translationmessage = SELECT, DELETE |
259 | public.translationtemplateitem = SELECT, DELETE |
260 | -public.translationtemplatesbuild = SELECT, UPDATE |
261 | type=user |
262 | |
263 | [garbo_daily] |
264 | |
265 | === modified file 'lib/lp/buildmaster/interfaces/buildfarmjob.py' |
266 | --- lib/lp/buildmaster/interfaces/buildfarmjob.py 2013-01-22 08:31:09 +0000 |
267 | +++ lib/lp/buildmaster/interfaces/buildfarmjob.py 2013-02-01 03:49:23 +0000 |
268 | @@ -179,6 +179,8 @@ |
269 | |
270 | id = Attribute('The build farm job ID.') |
271 | |
272 | + build_farm_job = Attribute('Generic build farm job record') |
273 | + |
274 | processor = Reference( |
275 | IProcessor, title=_("Processor"), required=False, readonly=True, |
276 | description=_( |
277 | |
278 | === modified file 'lib/lp/buildmaster/interfaces/packagebuild.py' |
279 | --- lib/lp/buildmaster/interfaces/packagebuild.py 2013-02-01 03:49:23 +0000 |
280 | +++ lib/lp/buildmaster/interfaces/packagebuild.py 2013-02-01 03:49:23 +0000 |
281 | @@ -72,10 +72,6 @@ |
282 | description=_("A URL for failed upload logs." |
283 | "Will be None if there was no failure."))) |
284 | |
285 | - build_farm_job = Reference( |
286 | - title=_('Build farm job'), schema=IBuildFarmJob, required=True, |
287 | - readonly=True, description=_('The base build farm job.')) |
288 | - |
289 | current_component = Attribute( |
290 | 'Component where the source related to this build was last ' |
291 | 'published.') |
292 | |
293 | === modified file 'lib/lp/buildmaster/model/buildfarmjob.py' |
294 | --- lib/lp/buildmaster/model/buildfarmjob.py 2013-02-01 03:49:23 +0000 |
295 | +++ lib/lp/buildmaster/model/buildfarmjob.py 2013-02-01 03:49:23 +0000 |
296 | @@ -199,8 +199,6 @@ |
297 | archive_id = Int(name='archive') |
298 | archive = Reference(archive_id, 'Archive.id') |
299 | |
300 | - dependencies = None |
301 | - |
302 | def __init__(self, job_type, status=BuildStatus.NEEDSBUILD, |
303 | processor=None, virtualized=None, date_created=None, |
304 | builder=None, archive=None): |
305 | @@ -227,51 +225,47 @@ |
306 | |
307 | @property |
308 | def processor(self): |
309 | - return self.build_farm_job.processor |
310 | + return self._new_processor |
311 | |
312 | @property |
313 | def virtualized(self): |
314 | - return self.build_farm_job.virtualized |
315 | + return self._new_virtualized |
316 | |
317 | @property |
318 | def date_created(self): |
319 | - return self.build_farm_job.date_created |
320 | + return self._new_date_created |
321 | |
322 | @property |
323 | def date_started(self): |
324 | - return self.build_farm_job.date_started |
325 | + return self._new_date_started |
326 | |
327 | @property |
328 | def date_finished(self): |
329 | - return self.build_farm_job.date_finished |
330 | + return self._new_date_finished |
331 | |
332 | @property |
333 | def date_first_dispatched(self): |
334 | - return self.build_farm_job.date_first_dispatched |
335 | + return self._new_date_first_dispatched |
336 | |
337 | @property |
338 | def builder(self): |
339 | - return self.build_farm_job.builder |
340 | + return self._new_builder |
341 | |
342 | @property |
343 | def status(self): |
344 | - return self.build_farm_job.status |
345 | + return self._new_status |
346 | |
347 | @property |
348 | def log(self): |
349 | - return self.build_farm_job.log |
350 | - |
351 | - @property |
352 | - def job_type(self): |
353 | - return self.build_farm_job.job_type |
354 | + return self._new_log |
355 | |
356 | @property |
357 | def failure_count(self): |
358 | - return self.build_farm_job.failure_count |
359 | + return self._new_failure_count |
360 | |
361 | @property |
362 | def dependencies(self): |
363 | - return self.build_farm_job.dependencies |
364 | + return None |
365 | |
366 | @property |
367 | def title(self): |
368 | |
369 | === modified file 'lib/lp/buildmaster/model/packagebuild.py' |
370 | --- lib/lp/buildmaster/model/packagebuild.py 2013-02-01 03:49:23 +0000 |
371 | +++ lib/lp/buildmaster/model/packagebuild.py 2013-02-01 03:49:23 +0000 |
372 | @@ -110,24 +110,20 @@ |
373 | class PackageBuildMixin(BuildFarmJobMixin): |
374 | |
375 | @property |
376 | - def build_farm_job(self): |
377 | - return self.package_build.build_farm_job |
378 | - |
379 | - @property |
380 | def archive(self): |
381 | - return self.package_build.archive |
382 | + return self._new_archive |
383 | |
384 | @property |
385 | def pocket(self): |
386 | - return self.package_build.pocket |
387 | + return self._new_pocket |
388 | |
389 | @property |
390 | def upload_log(self): |
391 | - return self.package_build.upload_log |
392 | + return self._new_upload_log |
393 | |
394 | @property |
395 | def dependencies(self): |
396 | - return self.package_build.dependencies |
397 | + return self._new_dependencies |
398 | |
399 | @property |
400 | def current_component(self): |
401 | |
402 | === modified file 'lib/lp/code/model/sourcepackagerecipe.py' |
403 | --- lib/lp/code/model/sourcepackagerecipe.py 2012-09-05 05:08:26 +0000 |
404 | +++ lib/lp/code/model/sourcepackagerecipe.py 2013-02-01 03:49:23 +0000 |
405 | @@ -17,8 +17,7 @@ |
406 | from pytz import utc |
407 | from storm.expr import ( |
408 | And, |
409 | - Join, |
410 | - RightJoin, |
411 | + LeftJoin, |
412 | ) |
413 | from storm.locals import ( |
414 | Bool, |
415 | @@ -37,8 +36,6 @@ |
416 | ) |
417 | |
418 | from lp.buildmaster.enums import BuildStatus |
419 | -from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
420 | -from lp.buildmaster.model.packagebuild import PackageBuild |
421 | from lp.code.errors import ( |
422 | BuildAlreadyPending, |
423 | BuildNotAllowedForDistro, |
424 | @@ -215,24 +212,25 @@ |
425 | store.add(sprecipe) |
426 | return sprecipe |
427 | |
428 | - @classmethod |
429 | - def findStaleDailyBuilds(cls): |
430 | + @staticmethod |
431 | + def findStaleDailyBuilds(): |
432 | one_day_ago = datetime.now(utc) - timedelta(hours=23, minutes=50) |
433 | - joins = RightJoin( |
434 | - Join( |
435 | - Join(SourcePackageRecipeBuild, PackageBuild, |
436 | - PackageBuild.id == |
437 | - SourcePackageRecipeBuild.package_build_id), |
438 | - BuildFarmJob, |
439 | - And(BuildFarmJob.id == PackageBuild.build_farm_job_id, |
440 | - BuildFarmJob.date_created > one_day_ago)), |
441 | - SourcePackageRecipe, |
442 | - And(SourcePackageRecipeBuild.recipe == SourcePackageRecipe.id, |
443 | - SourcePackageRecipe.daily_build_archive_id == |
444 | - PackageBuild.archive_id)) |
445 | - return IStore(cls).using(joins).find( |
446 | - cls, cls.is_stale == True, cls.build_daily == True, |
447 | - BuildFarmJob.date_created == None).config(distinct=True) |
448 | + joins = ( |
449 | + SourcePackageRecipe, |
450 | + LeftJoin( |
451 | + SourcePackageRecipeBuild, |
452 | + And(SourcePackageRecipeBuild.recipe_id == |
453 | + SourcePackageRecipe.id, |
454 | + SourcePackageRecipeBuild._new_archive_id == |
455 | + SourcePackageRecipe.daily_build_archive_id, |
456 | + SourcePackageRecipeBuild._new_date_created > one_day_ago)), |
457 | + ) |
458 | + return IStore(SourcePackageRecipe).using(*joins).find( |
459 | + SourcePackageRecipe, |
460 | + SourcePackageRecipe.is_stale == True, |
461 | + SourcePackageRecipe.build_daily == True, |
462 | + SourcePackageRecipeBuild._new_date_created == None, |
463 | + ).config(distinct=True) |
464 | |
465 | @staticmethod |
466 | def exists(owner, name): |
467 | @@ -288,10 +286,8 @@ |
468 | pending = IStore(self).find(SourcePackageRecipeBuild, |
469 | SourcePackageRecipeBuild.recipe_id == self.id, |
470 | SourcePackageRecipeBuild.distroseries_id == distroseries.id, |
471 | - PackageBuild.archive_id == archive.id, |
472 | - PackageBuild.id == SourcePackageRecipeBuild.package_build_id, |
473 | - BuildFarmJob.id == PackageBuild.build_farm_job_id, |
474 | - BuildFarmJob.status == BuildStatus.NEEDSBUILD) |
475 | + SourcePackageRecipeBuild._new_archive_id == archive.id, |
476 | + SourcePackageRecipeBuild._new_status == BuildStatus.NEEDSBUILD) |
477 | if pending.any() is not None: |
478 | raise BuildAlreadyPending(self, distroseries) |
479 | |
480 | @@ -323,39 +319,42 @@ |
481 | @property |
482 | def builds(self): |
483 | """See `ISourcePackageRecipe`.""" |
484 | - order_by = (Desc(Greatest( |
485 | - BuildFarmJob.date_started, |
486 | - BuildFarmJob.date_finished)), |
487 | - Desc(BuildFarmJob.date_created), Desc(BuildFarmJob.id)) |
488 | + order_by = ( |
489 | + Desc(Greatest( |
490 | + SourcePackageRecipeBuild._new_date_started, |
491 | + SourcePackageRecipeBuild._new_date_finished)), |
492 | + Desc(SourcePackageRecipeBuild._new_date_created), |
493 | + Desc(SourcePackageRecipeBuild.id)) |
494 | return self._getBuilds(None, order_by) |
495 | |
496 | @property |
497 | def completed_builds(self): |
498 | """See `ISourcePackageRecipe`.""" |
499 | - filter_term = BuildFarmJob.status != BuildStatus.NEEDSBUILD |
500 | - order_by = (Desc(Greatest( |
501 | - BuildFarmJob.date_started, |
502 | - BuildFarmJob.date_finished)), |
503 | - Desc(BuildFarmJob.id)) |
504 | + filter_term = ( |
505 | + SourcePackageRecipeBuild._new_status != BuildStatus.NEEDSBUILD) |
506 | + order_by = ( |
507 | + Desc(Greatest( |
508 | + SourcePackageRecipeBuild._new_date_started, |
509 | + SourcePackageRecipeBuild._new_date_finished)), |
510 | + Desc(SourcePackageRecipeBuild.id)) |
511 | return self._getBuilds(filter_term, order_by) |
512 | |
513 | @property |
514 | def pending_builds(self): |
515 | """See `ISourcePackageRecipe`.""" |
516 | - filter_term = BuildFarmJob.status == BuildStatus.NEEDSBUILD |
517 | + filter_term = ( |
518 | + SourcePackageRecipeBuild._new_status == BuildStatus.NEEDSBUILD) |
519 | # We want to order by date_created but this is the same as ordering |
520 | # by id (since id increases monotonically) and is less expensive. |
521 | - order_by = Desc(BuildFarmJob.id) |
522 | + order_by = Desc(SourcePackageRecipeBuild.id) |
523 | return self._getBuilds(filter_term, order_by) |
524 | |
525 | def _getBuilds(self, filter_term, order_by): |
526 | """The actual query to get the builds.""" |
527 | query_args = [ |
528 | SourcePackageRecipeBuild.recipe == self, |
529 | - SourcePackageRecipeBuild.package_build_id == PackageBuild.id, |
530 | - PackageBuild.build_farm_job_id == BuildFarmJob.id, |
531 | - And(PackageBuild.archive_id == Archive.id, |
532 | - Archive._enabled == True), |
533 | + SourcePackageRecipeBuild._new_archive_id == Archive.id, |
534 | + Archive._enabled == True, |
535 | ] |
536 | if filter_term is not None: |
537 | query_args.append(filter_term) |
538 | @@ -378,19 +377,17 @@ |
539 | def last_build(self): |
540 | """See `ISourcePackageRecipeBuild`.""" |
541 | return self._getBuilds( |
542 | - True, Desc(BuildFarmJob.date_finished)).first() |
543 | + True, Desc(SourcePackageRecipeBuild._new_date_finished)).first() |
544 | |
545 | def getMedianBuildDuration(self): |
546 | """Return the median duration of builds of this recipe.""" |
547 | store = IStore(self) |
548 | result = store.find( |
549 | - BuildFarmJob, |
550 | + SourcePackageRecipeBuild, |
551 | SourcePackageRecipeBuild.recipe == self.id, |
552 | - BuildFarmJob.date_finished != None, |
553 | - BuildFarmJob.id == PackageBuild.build_farm_job_id, |
554 | - SourcePackageRecipeBuild.package_build_id == PackageBuild.id) |
555 | - durations = [build.date_finished - build.date_started for build in |
556 | - result] |
557 | + SourcePackageRecipeBuild._new_date_finished != None) |
558 | + durations = [ |
559 | + build.date_finished - build.date_started for build in result] |
560 | if len(durations) == 0: |
561 | return None |
562 | durations.sort(reverse=True) |
563 | |
564 | === modified file 'lib/lp/code/model/sourcepackagerecipebuild.py' |
565 | --- lib/lp/code/model/sourcepackagerecipebuild.py 2013-02-01 03:49:23 +0000 |
566 | +++ lib/lp/code/model/sourcepackagerecipebuild.py 2013-02-01 03:49:23 +0000 |
567 | @@ -93,6 +93,7 @@ |
568 | package_build = Reference(package_build_id, 'PackageBuild.id') |
569 | |
570 | build_farm_job_type = BuildFarmJobType.RECIPEBRANCHBUILD |
571 | + job_type = build_farm_job_type |
572 | |
573 | id = Int(primary=True) |
574 | |
575 | @@ -150,8 +151,8 @@ |
576 | requester = Reference(requester_id, 'Person.id') |
577 | |
578 | # Migrating from PackageBuild |
579 | - _new_build_farm_job_id = Int(name='build_farm_job') |
580 | - _new_build_farm_job = Reference(_new_build_farm_job_id, BuildFarmJob.id) |
581 | + build_farm_job_id = Int(name='build_farm_job') |
582 | + build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id) |
583 | |
584 | _new_archive_id = Int(name='archive') |
585 | _new_archive = Reference(_new_archive_id, 'Archive.id') |
586 | @@ -216,7 +217,7 @@ |
587 | requester, archive, pocket, date_created): |
588 | """Construct a SourcePackageRecipeBuild.""" |
589 | super(SourcePackageRecipeBuild, self).__init__() |
590 | - self._new_build_farm_job = build_farm_job |
591 | + self.build_farm_job = build_farm_job |
592 | self.package_build = package_build |
593 | self.distroseries = distroseries |
594 | self.recipe = recipe |
595 | @@ -365,16 +366,13 @@ |
596 | |
597 | @classmethod |
598 | def getRecentBuilds(cls, requester, recipe, distroseries, _now=None): |
599 | - from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
600 | if _now is None: |
601 | _now = datetime.now(pytz.UTC) |
602 | store = IMasterStore(SourcePackageRecipeBuild) |
603 | old_threshold = _now - timedelta(days=1) |
604 | return store.find(cls, cls.distroseries_id == distroseries.id, |
605 | cls.requester_id == requester.id, cls.recipe_id == recipe.id, |
606 | - BuildFarmJob.date_created > old_threshold, |
607 | - BuildFarmJob.id == PackageBuild.build_farm_job_id, |
608 | - PackageBuild.id == cls.package_build_id) |
609 | + cls._new_date_created > old_threshold) |
610 | |
611 | def makeJob(self): |
612 | """See `ISourcePackageRecipeBuildJob`.""" |
613 | |
614 | === modified file 'lib/lp/registry/model/sourcepackage.py' |
615 | --- lib/lp/registry/model/sourcepackage.py 2012-11-26 08:33:03 +0000 |
616 | +++ lib/lp/registry/model/sourcepackage.py 2013-02-01 03:49:23 +0000 |
617 | @@ -601,8 +601,8 @@ |
618 | # binary_only parameter as a source package can only have |
619 | # binary builds. |
620 | |
621 | - clauseTables = ['SourcePackageRelease', 'PackageBuild', |
622 | - 'SourcePackagePublishingHistory'] |
623 | + clauseTables = [ |
624 | + 'SourcePackageRelease', 'SourcePackagePublishingHistory'] |
625 | |
626 | condition_clauses = [""" |
627 | BinaryPackageBuild.source_package_release = |
628 | @@ -612,7 +612,7 @@ |
629 | SourcePackagePublishingHistory.archive IN %s AND |
630 | SourcePackagePublishingHistory.sourcepackagerelease = |
631 | SourcePackageRelease.id AND |
632 | - SourcePackagePublishingHistory.archive = PackageBuild.archive |
633 | + SourcePackagePublishingHistory.archive = BinaryPackageBuild.archive |
634 | """ % sqlvalues(self.sourcepackagename, |
635 | self.distroseries, |
636 | list(self.distribution.all_distro_archive_ids))] |
637 | @@ -627,8 +627,8 @@ |
638 | # exclude gina-generated and security (dak-made) builds |
639 | # buildstate == FULLYBUILT && datebuilt == null |
640 | condition_clauses.append( |
641 | - "NOT (BuildFarmJob.status=%s AND " |
642 | - " BuildFarmJob.date_finished is NULL)" |
643 | + "NOT (BinaryPackageBuild.status=%s AND " |
644 | + " BinaryPackageBuild.date_finished is NULL)" |
645 | % sqlvalues(BuildStatus.FULLYBUILT)) |
646 | |
647 | # Ordering according status |
648 | @@ -648,9 +648,9 @@ |
649 | clauseTables.append('BuildQueue') |
650 | condition_clauses.append('BuildQueue.job = BuildPackageJob.job') |
651 | elif build_state == BuildStatus.SUPERSEDED or build_state is None: |
652 | - orderBy = [Desc("BuildFarmJob.date_created")] |
653 | + orderBy = [Desc("BinaryPackageBuild.date_created")] |
654 | else: |
655 | - orderBy = [Desc("BuildFarmJob.date_finished")] |
656 | + orderBy = [Desc("BinaryPackageBuild.date_finished")] |
657 | |
658 | # Fallback to ordering by -id as a tie-breaker. |
659 | orderBy.append(Desc("id")) |
660 | |
661 | === modified file 'lib/lp/scripts/garbo.py' |
662 | --- lib/lp/scripts/garbo.py 2013-02-01 03:49:23 +0000 |
663 | +++ lib/lp/scripts/garbo.py 2013-01-17 00:25:48 +0000 |
664 | @@ -57,8 +57,6 @@ |
665 | BugWatchScheduler, |
666 | MAX_SAMPLE_SIZE, |
667 | ) |
668 | -from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
669 | -from lp.buildmaster.model.packagebuild import PackageBuild |
670 | from lp.code.interfaces.revision import IRevisionSet |
671 | from lp.code.model.codeimportevent import CodeImportEvent |
672 | from lp.code.model.codeimportresult import CodeImportResult |
673 | @@ -66,10 +64,8 @@ |
674 | RevisionAuthor, |
675 | RevisionCache, |
676 | ) |
677 | -from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild |
678 | from lp.hardwaredb.model.hwdb import HWSubmission |
679 | from lp.registry.model.commercialsubscription import CommercialSubscription |
680 | -from lp.registry.model.distroseries import DistroSeries |
681 | from lp.registry.model.person import Person |
682 | from lp.registry.model.product import Product |
683 | from lp.registry.model.teammembership import TeamMembership |
684 | @@ -108,7 +104,6 @@ |
685 | from lp.services.librarian.model import TimeLimitedToken |
686 | from lp.services.log.logger import PrefixFilter |
687 | from lp.services.looptuner import TunableLoop |
688 | -from lp.services.memcache.interfaces import IMemcacheClient |
689 | from lp.services.oauth.model import OAuthNonce |
690 | from lp.services.openid.model.openidconsumer import OpenIDConsumerNonce |
691 | from lp.services.propertycache import cachedproperty |
692 | @@ -123,10 +118,7 @@ |
693 | ) |
694 | from lp.services.session.model import SessionData |
695 | from lp.services.verification.model.logintoken import LoginToken |
696 | -from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES |
697 | from lp.soyuz.model.archive import Archive |
698 | -from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild |
699 | -from lp.soyuz.model.distroarchseries import DistroArchSeries |
700 | from lp.soyuz.model.publishing import SourcePackagePublishingHistory |
701 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache |
702 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease |
703 | @@ -137,9 +129,6 @@ |
704 | from lp.translations.model.translationtemplateitem import ( |
705 | TranslationTemplateItem, |
706 | ) |
707 | -from lp.translations.model.translationtemplatesbuild import ( |
708 | - TranslationTemplatesBuild, |
709 | - ) |
710 | from lp.translations.scripts.scrub_pofiletranslator import ( |
711 | ScrubPOFileTranslator, |
712 | ) |
713 | @@ -1346,219 +1335,6 @@ |
714 | transaction.commit() |
715 | |
716 | |
717 | -class BinaryPackageBuildFlattener(TunableLoop): |
718 | - """Populates the new denormalised columns on BinaryPackageBuild.""" |
719 | - |
720 | - maximum_chunk_size = 5000 |
721 | - |
722 | - def __init__(self, log, abort_time=None): |
723 | - super(BinaryPackageBuildFlattener, self).__init__(log, abort_time) |
724 | - |
725 | - self.memcache_key = '%s:bpb-flattener' % config.instance_name |
726 | - watermark = getUtility(IMemcacheClient).get(self.memcache_key) |
727 | - self.start_at = watermark or 0 |
728 | - self.store = IMasterStore(BinaryPackageBuild) |
729 | - |
730 | - def findIDs(self): |
731 | - return self.store.find( |
732 | - BinaryPackageBuild.id, |
733 | - BinaryPackageBuild.id >= self.start_at, |
734 | - ).order_by(BinaryPackageBuild.id) |
735 | - |
736 | - def isDone(self): |
737 | - return ( |
738 | - not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled') |
739 | - or self.findIDs().is_empty()) |
740 | - |
741 | - def __call__(self, chunk_size): |
742 | - """See `ITunableLoop`.""" |
743 | - ids = list(self.findIDs()[:chunk_size]) |
744 | - updated_columns = { |
745 | - BinaryPackageBuild._new_archive_id: PackageBuild.archive_id, |
746 | - BinaryPackageBuild._new_pocket: PackageBuild.pocket, |
747 | - BinaryPackageBuild._new_processor_id: BuildFarmJob.processor_id, |
748 | - BinaryPackageBuild._new_virtualized: BuildFarmJob.virtualized, |
749 | - BinaryPackageBuild._new_date_created: BuildFarmJob.date_created, |
750 | - BinaryPackageBuild._new_date_started: BuildFarmJob.date_started, |
751 | - BinaryPackageBuild._new_date_finished: BuildFarmJob.date_finished, |
752 | - BinaryPackageBuild._new_date_first_dispatched: |
753 | - BuildFarmJob.date_first_dispatched, |
754 | - BinaryPackageBuild._new_builder_id: BuildFarmJob.builder_id, |
755 | - BinaryPackageBuild._new_status: BuildFarmJob.status, |
756 | - BinaryPackageBuild._new_log_id: BuildFarmJob.log_id, |
757 | - BinaryPackageBuild._new_upload_log_id: PackageBuild.upload_log_id, |
758 | - BinaryPackageBuild._new_dependencies: PackageBuild.dependencies, |
759 | - BinaryPackageBuild._new_failure_count: BuildFarmJob.failure_count, |
760 | - BinaryPackageBuild._new_build_farm_job_id: BuildFarmJob.id, |
761 | - BinaryPackageBuild._new_distribution_id: |
762 | - DistroSeries.distributionID, |
763 | - BinaryPackageBuild._new_distro_series_id: DistroSeries.id, |
764 | - BinaryPackageBuild._new_source_package_name_id: |
765 | - SourcePackageRelease.sourcepackagenameID, |
766 | - BinaryPackageBuild._new_is_distro_archive: |
767 | - Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES), |
768 | - } |
769 | - condition = And( |
770 | - BinaryPackageBuild.id.is_in(ids), |
771 | - PackageBuild.id == BinaryPackageBuild.package_build_id, |
772 | - BuildFarmJob.id == PackageBuild.build_farm_job_id) |
773 | - extra_condition = And( |
774 | - condition, |
775 | - SourcePackageRelease.id == |
776 | - BinaryPackageBuild.source_package_release_id, |
777 | - Archive.id == PackageBuild.archive_id, |
778 | - DistroArchSeries.id == BinaryPackageBuild.distro_arch_series_id, |
779 | - DistroSeries.id == DistroArchSeries.distroseriesID) |
780 | - self.store.execute( |
781 | - BulkUpdate( |
782 | - updated_columns, table=BinaryPackageBuild, |
783 | - values=( |
784 | - PackageBuild, BuildFarmJob, Archive, DistroArchSeries, |
785 | - DistroSeries, SourcePackageRelease), |
786 | - where=And(condition, extra_condition))) |
787 | - self.store.execute( |
788 | - BulkUpdate( |
789 | - {BuildFarmJob.archive_id: PackageBuild.archive_id}, |
790 | - table=BuildFarmJob, values=(PackageBuild, BinaryPackageBuild), |
791 | - where=condition)) |
792 | - transaction.commit() |
793 | - self.start_at = ids[-1] + 1 |
794 | - getUtility(IMemcacheClient).set(self.memcache_key, self.start_at) |
795 | - |
796 | - |
797 | -class SourcePackageRecipeBuildFlattener(TunableLoop): |
798 | - """Populates the new denormalised columns on SourcePackageRecipeBuild.""" |
799 | - |
800 | - maximum_chunk_size = 5000 |
801 | - |
802 | - def __init__(self, log, abort_time=None): |
803 | - super(SourcePackageRecipeBuildFlattener, self).__init__( |
804 | - log, abort_time) |
805 | - |
806 | - self.memcache_key = '%s:sprb-flattener' % config.instance_name |
807 | - watermark = getUtility(IMemcacheClient).get(self.memcache_key) |
808 | - self.start_at = watermark or 0 |
809 | - self.store = IMasterStore(SourcePackageRecipeBuild) |
810 | - |
811 | - def findIDs(self): |
812 | - return self.store.find( |
813 | - SourcePackageRecipeBuild.id, |
814 | - SourcePackageRecipeBuild.id >= self.start_at, |
815 | - ).order_by(SourcePackageRecipeBuild.id) |
816 | - |
817 | - def isDone(self): |
818 | - return ( |
819 | - not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled') |
820 | - or self.findIDs().is_empty()) |
821 | - |
822 | - def __call__(self, chunk_size): |
823 | - """See `ITunableLoop`.""" |
824 | - ids = list(self.findIDs()[:chunk_size]) |
825 | - updated_columns = { |
826 | - SourcePackageRecipeBuild._new_archive_id: PackageBuild.archive_id, |
827 | - SourcePackageRecipeBuild._new_pocket: PackageBuild.pocket, |
828 | - SourcePackageRecipeBuild._new_processor_id: |
829 | - BuildFarmJob.processor_id, |
830 | - SourcePackageRecipeBuild._new_virtualized: |
831 | - BuildFarmJob.virtualized, |
832 | - SourcePackageRecipeBuild._new_date_created: |
833 | - BuildFarmJob.date_created, |
834 | - SourcePackageRecipeBuild._new_date_started: |
835 | - BuildFarmJob.date_started, |
836 | - SourcePackageRecipeBuild._new_date_finished: |
837 | - BuildFarmJob.date_finished, |
838 | - SourcePackageRecipeBuild._new_date_first_dispatched: |
839 | - BuildFarmJob.date_first_dispatched, |
840 | - SourcePackageRecipeBuild._new_builder_id: BuildFarmJob.builder_id, |
841 | - SourcePackageRecipeBuild._new_status: BuildFarmJob.status, |
842 | - SourcePackageRecipeBuild._new_log_id: BuildFarmJob.log_id, |
843 | - SourcePackageRecipeBuild._new_upload_log_id: |
844 | - PackageBuild.upload_log_id, |
845 | - SourcePackageRecipeBuild._new_dependencies: |
846 | - PackageBuild.dependencies, |
847 | - SourcePackageRecipeBuild._new_failure_count: |
848 | - BuildFarmJob.failure_count, |
849 | - SourcePackageRecipeBuild._new_build_farm_job_id: BuildFarmJob.id, |
850 | - } |
851 | - condition = And( |
852 | - SourcePackageRecipeBuild.id.is_in(ids), |
853 | - PackageBuild.id == SourcePackageRecipeBuild.package_build_id, |
854 | - BuildFarmJob.id == PackageBuild.build_farm_job_id) |
855 | - self.store.execute( |
856 | - BulkUpdate( |
857 | - updated_columns, table=SourcePackageRecipeBuild, |
858 | - values=(PackageBuild, BuildFarmJob), where=condition)) |
859 | - self.store.execute( |
860 | - BulkUpdate( |
861 | - {BuildFarmJob.archive_id: PackageBuild.archive_id}, |
862 | - table=BuildFarmJob, |
863 | - values=(PackageBuild, SourcePackageRecipeBuild), |
864 | - where=condition)) |
865 | - transaction.commit() |
866 | - self.start_at = ids[-1] + 1 |
867 | - getUtility(IMemcacheClient).set(self.memcache_key, self.start_at) |
868 | - |
869 | - |
870 | -class TranslationTemplatesBuildFlattener(TunableLoop): |
871 | - """Populates the new denormalised columns on TranslationTemplatesBuild.""" |
872 | - |
873 | - maximum_chunk_size = 5000 |
874 | - |
875 | - def __init__(self, log, abort_time=None): |
876 | - super(TranslationTemplatesBuildFlattener, self).__init__( |
877 | - log, abort_time) |
878 | - |
879 | - self.memcache_key = '%s:ttb-flattener' % config.instance_name |
880 | - watermark = getUtility(IMemcacheClient).get(self.memcache_key) |
881 | - self.start_at = watermark or 0 |
882 | - self.store = IMasterStore(TranslationTemplatesBuild) |
883 | - |
884 | - def findIDs(self): |
885 | - return self.store.find( |
886 | - TranslationTemplatesBuild.id, |
887 | - TranslationTemplatesBuild.id >= self.start_at, |
888 | - ).order_by(TranslationTemplatesBuild.id) |
889 | - |
890 | - def isDone(self): |
891 | - return ( |
892 | - not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled') |
893 | - or self.findIDs().is_empty()) |
894 | - |
895 | - def __call__(self, chunk_size): |
896 | - """See `ITunableLoop`.""" |
897 | - ids = list(self.findIDs()[:chunk_size]) |
898 | - updated_columns = { |
899 | - TranslationTemplatesBuild._new_processor_id: |
900 | - BuildFarmJob.processor_id, |
901 | - TranslationTemplatesBuild._new_virtualized: |
902 | - BuildFarmJob.virtualized, |
903 | - TranslationTemplatesBuild._new_date_created: |
904 | - BuildFarmJob.date_created, |
905 | - TranslationTemplatesBuild._new_date_started: |
906 | - BuildFarmJob.date_started, |
907 | - TranslationTemplatesBuild._new_date_finished: |
908 | - BuildFarmJob.date_finished, |
909 | - TranslationTemplatesBuild._new_date_first_dispatched: |
910 | - BuildFarmJob.date_first_dispatched, |
911 | - TranslationTemplatesBuild._new_builder_id: BuildFarmJob.builder_id, |
912 | - TranslationTemplatesBuild._new_status: BuildFarmJob.status, |
913 | - TranslationTemplatesBuild._new_log_id: BuildFarmJob.log_id, |
914 | - TranslationTemplatesBuild._new_failure_count: |
915 | - BuildFarmJob.failure_count, |
916 | - } |
917 | - self.store.execute( |
918 | - BulkUpdate( |
919 | - updated_columns, table=TranslationTemplatesBuild, |
920 | - values=(PackageBuild, BuildFarmJob), |
921 | - where=And( |
922 | - TranslationTemplatesBuild.id.is_in(ids), |
923 | - BuildFarmJob.id == |
924 | - TranslationTemplatesBuild.build_farm_job_id))) |
925 | - transaction.commit() |
926 | - self.start_at = ids[-1] + 1 |
927 | - getUtility(IMemcacheClient).set(self.memcache_key, self.start_at) |
928 | - |
929 | - |
930 | class BaseDatabaseGarbageCollector(LaunchpadCronScript): |
931 | """Abstract base class to run a collection of TunableLoops.""" |
932 | script_name = None # Script name for locking and database user. Override. |
933 | @@ -1814,9 +1590,6 @@ |
934 | UnusedSessionPruner, |
935 | DuplicateSessionPruner, |
936 | BugHeatUpdater, |
937 | - BinaryPackageBuildFlattener, |
938 | - SourcePackageRecipeBuildFlattener, |
939 | - TranslationTemplatesBuildFlattener, |
940 | ] |
941 | experimental_tunable_loops = [] |
942 | |
943 | |
944 | === modified file 'lib/lp/scripts/tests/test_garbo.py' |
945 | --- lib/lp/scripts/tests/test_garbo.py 2013-02-01 03:49:23 +0000 |
946 | +++ lib/lp/scripts/tests/test_garbo.py 2013-01-17 00:25:48 +0000 |
947 | @@ -31,7 +31,6 @@ |
948 | from testtools.matchers import ( |
949 | Equals, |
950 | GreaterThan, |
951 | - MatchesStructure, |
952 | ) |
953 | import transaction |
954 | from zope.component import getUtility |
955 | @@ -43,7 +42,6 @@ |
956 | BugNotification, |
957 | BugNotificationRecipient, |
958 | ) |
959 | -from lp.buildmaster.enums import BuildStatus |
960 | from lp.code.bzr import ( |
961 | BranchFormat, |
962 | RepositoryFormat, |
963 | @@ -60,7 +58,6 @@ |
964 | BranchSharingPolicy, |
965 | BugSharingPolicy, |
966 | ) |
967 | -from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild |
968 | from lp.registry.interfaces.accesspolicy import IAccessPolicySource |
969 | from lp.registry.interfaces.person import IPersonSet |
970 | from lp.registry.interfaces.teammembership import TeamMembershipStatus |
971 | @@ -117,7 +114,6 @@ |
972 | from lp.services.verification.model.logintoken import LoginToken |
973 | from lp.services.worlddata.interfaces.language import ILanguageSet |
974 | from lp.soyuz.enums import PackagePublishingStatus |
975 | -from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild |
976 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache |
977 | from lp.testing import ( |
978 | FakeAdapterMixin, |
979 | @@ -125,10 +121,7 @@ |
980 | TestCase, |
981 | TestCaseWithFactory, |
982 | ) |
983 | -from lp.testing.dbuser import ( |
984 | - dbuser, |
985 | - switch_dbuser, |
986 | - ) |
987 | +from lp.testing.dbuser import switch_dbuser |
988 | from lp.testing.layers import ( |
989 | DatabaseLayer, |
990 | LaunchpadScriptLayer, |
991 | @@ -140,9 +133,6 @@ |
992 | from lp.translations.model.translationtemplateitem import ( |
993 | TranslationTemplateItem, |
994 | ) |
995 | -from lp.translations.model.translationtemplatesbuild import ( |
996 | - TranslationTemplatesBuild, |
997 | - ) |
998 | |
999 | |
1000 | class TestGarboScript(TestCase): |
1001 | @@ -1283,131 +1273,6 @@ |
1002 | 'PopulateLatestPersonSourcePackageReleaseCache') |
1003 | self.assertEqual(spph_2.id, job_data['last_spph_id']) |
1004 | |
1005 | - def test_BinaryPackageBuildFlattener(self): |
1006 | - store = IMasterStore(BinaryPackageBuild) |
1007 | - # Sampledata builds start off with the new columns set to None, |
1008 | - # and garbo won't run without a feature flag set. |
1009 | - self.runHourly() |
1010 | - self.assertNotEqual( |
1011 | - 0, store.find(BinaryPackageBuild, _new_archive=None).count()) |
1012 | - |
1013 | - # But after a garbo run they're all set properly. |
1014 | - with dbuser('testadmin'): |
1015 | - IMasterStore(FeatureFlag).add(FeatureFlag( |
1016 | - u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true')) |
1017 | - self.runHourly() |
1018 | - self.assertEqual( |
1019 | - 0, store.find(BinaryPackageBuild, _new_archive=None).count()) |
1020 | - |
1021 | - with dbuser('testadmin'): |
1022 | - # Create a build with lots of attributes set. |
1023 | - build = self.factory.makeBinaryPackageBuild() |
1024 | - build.gotFailure() |
1025 | - build.updateStatus( |
1026 | - BuildStatus.BUILDING, builder=self.factory.makeBuilder()) |
1027 | - build.updateStatus(BuildStatus.FULLYBUILT) |
1028 | - build.setLog(self.factory.makeLibraryFileAlias()) |
1029 | - build.storeUploadLog('uploaded') |
1030 | - |
1031 | - # Manually unset the build's denormed columns. |
1032 | - attrs = ( |
1033 | - 'archive', 'pocket', 'processor', 'virtualized', |
1034 | - 'date_created', 'date_started', 'date_finished', |
1035 | - 'date_first_dispatched', 'builder', 'status', 'log', |
1036 | - 'upload_log', 'dependencies', 'failure_count', |
1037 | - 'build_farm_job', 'distribution', 'distro_series', |
1038 | - 'source_package_name', 'is_distro_archive') |
1039 | - for attr in attrs: |
1040 | - setattr(removeSecurityProxy(build), '_new_' + attr, None) |
1041 | - removeSecurityProxy(build.build_farm_job).archive = None |
1042 | - self.assertEqual( |
1043 | - 1, store.find(BinaryPackageBuild, _new_archive=None).count()) |
1044 | - self.runHourly() |
1045 | - self.assertEqual( |
1046 | - 0, store.find(BinaryPackageBuild, _new_archive=None).count()) |
1047 | - |
1048 | - self.assertThat( |
1049 | - removeSecurityProxy(build), |
1050 | - MatchesStructure.byEquality( |
1051 | - **dict( |
1052 | - ('_new_' + attr, getattr(build, attr)) for attr in attrs))) |
1053 | - self.assertEqual( |
1054 | - build.archive, removeSecurityProxy(build.build_farm_job).archive) |
1055 | - |
1056 | - def test_SourcePackageRecipeBuildFlattener(self): |
1057 | - store = IMasterStore(BinaryPackageBuild) |
1058 | - with dbuser('testadmin'): |
1059 | - IMasterStore(FeatureFlag).add(FeatureFlag( |
1060 | - u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true')) |
1061 | - |
1062 | - with dbuser('testadmin'): |
1063 | - # Create a build with lots of attributes set. |
1064 | - build = self.factory.makeSourcePackageRecipeBuild() |
1065 | - build.gotFailure() |
1066 | - build.updateStatus( |
1067 | - BuildStatus.BUILDING, builder=self.factory.makeBuilder()) |
1068 | - build.updateStatus(BuildStatus.FULLYBUILT) |
1069 | - build.setLog(self.factory.makeLibraryFileAlias()) |
1070 | - build.storeUploadLog('uploaded') |
1071 | - |
1072 | - # Manually unset the build's denormed columns. |
1073 | - attrs = ( |
1074 | - 'archive', 'pocket', 'processor', 'virtualized', |
1075 | - 'date_created', 'date_started', 'date_finished', |
1076 | - 'date_first_dispatched', 'builder', 'status', 'log', |
1077 | - 'upload_log', 'dependencies', 'failure_count', |
1078 | - 'build_farm_job') |
1079 | - for attr in attrs: |
1080 | - setattr(removeSecurityProxy(build), '_new_' + attr, None) |
1081 | - removeSecurityProxy(build).build_farm_job.archive = None |
1082 | - self.assertEqual( |
1083 | - 1, store.find(SourcePackageRecipeBuild, _new_archive=None).count()) |
1084 | - self.runHourly() |
1085 | - self.assertEqual( |
1086 | - 0, store.find(SourcePackageRecipeBuild, _new_archive=None).count()) |
1087 | - |
1088 | - self.assertThat( |
1089 | - removeSecurityProxy(build), |
1090 | - MatchesStructure.byEquality( |
1091 | - **dict( |
1092 | - ('_new_' + attr, getattr(build, attr)) for attr in attrs))) |
1093 | - self.assertEqual( |
1094 | - build.archive, removeSecurityProxy(build.build_farm_job).archive) |
1095 | - |
1096 | - def test_TranslationTemplatesBuildFlattener(self): |
1097 | - store = IMasterStore(BinaryPackageBuild) |
1098 | - with dbuser('testadmin'): |
1099 | - IMasterStore(FeatureFlag).add(FeatureFlag( |
1100 | - u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true')) |
1101 | - |
1102 | - with dbuser('testadmin'): |
1103 | - # Create a build with lots of attributes set. |
1104 | - build = self.factory.makeTranslationTemplatesBuildJob().build |
1105 | - build.gotFailure() |
1106 | - build.updateStatus( |
1107 | - BuildStatus.BUILDING, builder=self.factory.makeBuilder()) |
1108 | - build.updateStatus(BuildStatus.FULLYBUILT) |
1109 | - build.setLog(self.factory.makeLibraryFileAlias()) |
1110 | - |
1111 | - # Manually unset the build's denormed columns. |
1112 | - attrs = ( |
1113 | - 'processor', 'virtualized', 'date_created', 'date_started', |
1114 | - 'date_finished', 'date_first_dispatched', 'builder', 'status', |
1115 | - 'log', 'failure_count') |
1116 | - for attr in attrs: |
1117 | - setattr(removeSecurityProxy(build), '_new_' + attr, None) |
1118 | - self.assertEqual( |
1119 | - 1, store.find(TranslationTemplatesBuild, _new_status=None).count()) |
1120 | - self.runHourly() |
1121 | - self.assertEqual( |
1122 | - 0, store.find(TranslationTemplatesBuild, _new_status=None).count()) |
1123 | - |
1124 | - self.assertThat( |
1125 | - removeSecurityProxy(build), |
1126 | - MatchesStructure.byEquality( |
1127 | - **dict( |
1128 | - ('_new_' + attr, getattr(build, attr)) for attr in attrs))) |
1129 | - |
1130 | |
1131 | class TestGarboTasks(TestCaseWithFactory): |
1132 | layer = LaunchpadZopelessLayer |
1133 | |
1134 | === modified file 'lib/lp/soyuz/doc/sourcepackagerelease.txt' |
1135 | --- lib/lp/soyuz/doc/sourcepackagerelease.txt 2012-07-05 09:04:09 +0000 |
1136 | +++ lib/lp/soyuz/doc/sourcepackagerelease.txt 2013-02-01 03:49:23 +0000 |
1137 | @@ -60,7 +60,6 @@ |
1138 | Mozilla-firefox 0.9 has got some builds. including a PPA build. The 'builds' |
1139 | property only returns the non-PPA builds. |
1140 | |
1141 | - >>> from lp.buildmaster.model.packagebuild import PackageBuild |
1142 | >>> from lp.registry.interfaces.person import IPersonSet |
1143 | >>> from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild |
1144 | >>> from storm.store import Store |
1145 | @@ -68,8 +67,7 @@ |
1146 | >>> ff_ppa_build = Store.of(cprov_ppa).find( |
1147 | ... BinaryPackageBuild, |
1148 | ... BinaryPackageBuild.source_package_release == spr, |
1149 | - ... BinaryPackageBuild.package_build == PackageBuild.id, |
1150 | - ... PackageBuild.archive == cprov_ppa) |
1151 | + ... BinaryPackageBuild._new_archive == cprov_ppa) |
1152 | >>> ff_ppa_build.count() |
1153 | 1 |
1154 | >>> ff_ppa_build[0].archive.purpose.name |
1155 | |
1156 | === modified file 'lib/lp/soyuz/interfaces/binarypackagebuild.py' |
1157 | --- lib/lp/soyuz/interfaces/binarypackagebuild.py 2013-02-01 03:49:23 +0000 |
1158 | +++ lib/lp/soyuz/interfaces/binarypackagebuild.py 2013-02-01 03:49:23 +0000 |
1159 | @@ -63,10 +63,6 @@ |
1160 | """A Build interface for items requiring launchpad.View.""" |
1161 | id = Int(title=_('ID'), required=True, readonly=True) |
1162 | |
1163 | - package_build = Reference( |
1164 | - title=_('Package build'), schema=IPackageBuild, required=True, |
1165 | - readonly=True, description=_('The base package build')) |
1166 | - |
1167 | # Overridden from IBuildFarmJob to ensure required is True. |
1168 | processor = Reference( |
1169 | title=_("Processor"), schema=IProcessor, |
1170 | @@ -100,9 +96,6 @@ |
1171 | distro_series = Attribute("Direct parent needed by CanonicalURL") |
1172 | arch_tag = exported( |
1173 | Text(title=_("Architecture tag"), required=False)) |
1174 | - source_package_name = Attribute("Source package name") |
1175 | - is_distro_archive = Attribute( |
1176 | - "Whether the target archive belongs to the distro") |
1177 | distributionsourcepackagerelease = Attribute("The page showing the " |
1178 | "details for this sourcepackagerelease in this distribution.") |
1179 | binarypackages = Attribute( |
1180 | |
1181 | === modified file 'lib/lp/soyuz/model/archive.py' |
1182 | --- lib/lp/soyuz/model/archive.py 2013-01-11 00:18:49 +0000 |
1183 | +++ lib/lp/soyuz/model/archive.py 2013-02-01 03:49:23 +0000 |
1184 | @@ -55,8 +55,6 @@ |
1185 | ) |
1186 | from lp.buildmaster.enums import BuildStatus |
1187 | from lp.buildmaster.interfaces.packagebuild import IPackageBuildSet |
1188 | -from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
1189 | -from lp.buildmaster.model.packagebuild import PackageBuild |
1190 | from lp.registry.enums import ( |
1191 | INCLUSIVE_TEAM_POLICY, |
1192 | PersonVisibility, |
1193 | @@ -86,7 +84,10 @@ |
1194 | IStoreSelector, |
1195 | MAIN_STORE, |
1196 | ) |
1197 | -from lp.services.database.lpstorm import ISlaveStore |
1198 | +from lp.services.database.lpstorm import ( |
1199 | + ISlaveStore, |
1200 | + IStore, |
1201 | + ) |
1202 | from lp.services.database.sqlbase import ( |
1203 | cursor, |
1204 | quote, |
1205 | @@ -1117,20 +1118,17 @@ |
1206 | extra_exprs = [] |
1207 | if not include_needsbuild: |
1208 | extra_exprs.append( |
1209 | - BuildFarmJob.status != BuildStatus.NEEDSBUILD) |
1210 | + BinaryPackageBuild._new_status != BuildStatus.NEEDSBUILD) |
1211 | |
1212 | find_spec = ( |
1213 | - BuildFarmJob.status, |
1214 | + BinaryPackageBuild._new_status, |
1215 | Count(BinaryPackageBuild.id), |
1216 | ) |
1217 | - result = store.using( |
1218 | - BinaryPackageBuild, PackageBuild, BuildFarmJob).find( |
1219 | + result = store.find( |
1220 | find_spec, |
1221 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1222 | - PackageBuild.archive == self, |
1223 | - PackageBuild.build_farm_job == BuildFarmJob.id, |
1224 | - *extra_exprs).group_by(BuildFarmJob.status).order_by( |
1225 | - BuildFarmJob.status) |
1226 | + BinaryPackageBuild._new_archive == self, |
1227 | + *extra_exprs).group_by(BinaryPackageBuild._new_status).order_by( |
1228 | + BinaryPackageBuild._new_status) |
1229 | |
1230 | # Create a map for each count summary to a number of buildstates: |
1231 | count_map = { |
1232 | @@ -1898,18 +1896,14 @@ |
1233 | """See `IArchive`.""" |
1234 | store = Store.of(self) |
1235 | |
1236 | - base_query = ( |
1237 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1238 | - PackageBuild.archive == self, |
1239 | - PackageBuild.build_farm_job == BuildFarmJob.id) |
1240 | sprs_building = store.find( |
1241 | BinaryPackageBuild.source_package_release_id, |
1242 | - BuildFarmJob.status == BuildStatus.BUILDING, |
1243 | - *base_query) |
1244 | + BinaryPackageBuild._new_archive == self, |
1245 | + BinaryPackageBuild._new_status == BuildStatus.BUILDING) |
1246 | sprs_waiting = store.find( |
1247 | BinaryPackageBuild.source_package_release_id, |
1248 | - BuildFarmJob.status == BuildStatus.NEEDSBUILD, |
1249 | - *base_query) |
1250 | + BinaryPackageBuild._new_archive == self, |
1251 | + BinaryPackageBuild._new_status == BuildStatus.NEEDSBUILD) |
1252 | |
1253 | # A package is not counted as waiting if it already has at least |
1254 | # one build building. |
1255 | @@ -1924,17 +1918,13 @@ |
1256 | |
1257 | extra_exprs = [] |
1258 | if build_status is not None: |
1259 | - extra_exprs = [ |
1260 | - PackageBuild.build_farm_job == BuildFarmJob.id, |
1261 | - BuildFarmJob.status == build_status, |
1262 | - ] |
1263 | + extra_exprs = [BinaryPackageBuild._new_status == build_status] |
1264 | |
1265 | result_set = store.find( |
1266 | SourcePackageRelease, |
1267 | (BinaryPackageBuild.source_package_release_id == |
1268 | SourcePackageRelease.id), |
1269 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1270 | - PackageBuild.archive == self, |
1271 | + BinaryPackageBuild._new_archive == self, |
1272 | *extra_exprs) |
1273 | |
1274 | result_set.config(distinct=True).order_by(SourcePackageRelease.id) |
1275 | @@ -1974,18 +1964,15 @@ |
1276 | |
1277 | query = """ |
1278 | UPDATE Job SET status = %s |
1279 | - FROM BinaryPackageBuild, PackageBuild, BuildFarmJob, |
1280 | - BuildPackageJob, BuildQueue |
1281 | + FROM BinaryPackageBuild, BuildPackageJob, BuildQueue |
1282 | WHERE |
1283 | - BinaryPackageBuild.package_build = PackageBuild.id |
1284 | -- insert self.id here |
1285 | - AND PackageBuild.archive = %s |
1286 | + BinaryPackageBuild.archive = %s |
1287 | AND BuildPackageJob.build = BinaryPackageBuild.id |
1288 | AND BuildPackageJob.job = BuildQueue.job |
1289 | AND Job.id = BuildQueue.job |
1290 | -- Build is in state BuildStatus.NEEDSBUILD (0) |
1291 | - AND PackageBuild.build_farm_job = BuildFarmJob.id |
1292 | - AND BuildFarmJob.status = %s; |
1293 | + AND BinaryPackageBuild.status = %s; |
1294 | """ % sqlvalues(status, self, BuildStatus.NEEDSBUILD) |
1295 | |
1296 | store = Store.of(self) |
1297 | @@ -2411,19 +2398,13 @@ |
1298 | |
1299 | def getBuildCountersForArchitecture(self, archive, distroarchseries): |
1300 | """See `IArchiveSet`.""" |
1301 | - cur = cursor() |
1302 | - query = """ |
1303 | - SELECT BuildFarmJob.status, count(BuildFarmJob.id) FROM |
1304 | - BinaryPackageBuild, PackageBuild, BuildFarmJob |
1305 | - WHERE |
1306 | - BinaryPackageBuild.package_build = PackageBuild.id AND |
1307 | - PackageBuild.build_farm_job = BuildFarmJob.id AND |
1308 | - PackageBuild.archive = %s AND |
1309 | - BinaryPackageBuild.distro_arch_series = %s |
1310 | - GROUP BY BuildFarmJob.status ORDER BY BuildFarmJob.status; |
1311 | - """ % sqlvalues(archive, distroarchseries) |
1312 | - cur.execute(query) |
1313 | - result = cur.fetchall() |
1314 | + result = IStore(BinaryPackageBuild).find( |
1315 | + (BinaryPackageBuild._new_status, Count(BinaryPackageBuild.id)), |
1316 | + BinaryPackageBuild._new_archive == archive, |
1317 | + BinaryPackageBuild.distro_arch_series == distroarchseries, |
1318 | + ).group_by( |
1319 | + BinaryPackageBuild._new_status |
1320 | + ).order_by(BinaryPackageBuild._new_status) |
1321 | |
1322 | status_map = { |
1323 | 'failed': ( |
1324 | @@ -2452,8 +2433,7 @@ |
1325 | for key, status in status_map.iteritems(): |
1326 | status_and_counters[key] = 0 |
1327 | for status_value, status_counter in result: |
1328 | - status_values = [item.value for item in status] |
1329 | - if status_value in status_values: |
1330 | + if status_value in status: |
1331 | status_and_counters[key] += status_counter |
1332 | |
1333 | return status_and_counters |
1334 | |
1335 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' |
1336 | --- lib/lp/soyuz/model/binarypackagebuild.py 2013-02-01 03:49:23 +0000 |
1337 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2013-02-01 03:49:23 +0000 |
1338 | @@ -105,6 +105,7 @@ |
1339 | _defaultOrder = 'id' |
1340 | |
1341 | build_farm_job_type = BuildFarmJobType.PACKAGEBUILD |
1342 | + job_type = build_farm_job_type |
1343 | |
1344 | package_build_id = Int(name='package_build', allow_none=False) |
1345 | package_build = Reference(package_build_id, 'PackageBuild.id') |
1346 | @@ -118,8 +119,8 @@ |
1347 | source_package_release_id, 'SourcePackageRelease.id') |
1348 | |
1349 | # Migrating from PackageBuild |
1350 | - _new_build_farm_job_id = Int(name='build_farm_job') |
1351 | - _new_build_farm_job = Reference(_new_build_farm_job_id, BuildFarmJob.id) |
1352 | + build_farm_job_id = Int(name='build_farm_job') |
1353 | + build_farm_job = Reference(build_farm_job_id, BuildFarmJob.id) |
1354 | |
1355 | _new_archive_id = Int(name='archive') |
1356 | _new_archive = Reference(_new_archive_id, 'Archive.id') |
1357 | @@ -265,16 +266,6 @@ |
1358 | return self.distro_series.distribution |
1359 | |
1360 | @property |
1361 | - def source_package_name(self): |
1362 | - """See `IBinaryPackageBuild`.""" |
1363 | - return self.source_package_release.sourcepackagename |
1364 | - |
1365 | - @property |
1366 | - def is_distro_archive(self): |
1367 | - """See `IBinaryPackageBuild`.""" |
1368 | - return self.archive.is_main |
1369 | - |
1370 | - @property |
1371 | def is_virtualized(self): |
1372 | """See `IBuild`""" |
1373 | return self.archive.require_virtualized |
1374 | @@ -634,17 +625,14 @@ |
1375 | BinaryPackageBuild.distro_arch_series = %s AND |
1376 | SourcePackageRelease.sourcepackagename = SourcePackageName.id AND |
1377 | SourcePackageName.name = %s AND |
1378 | - BinaryPackageBuild.package_build = PackageBuild.id AND |
1379 | - PackageBuild.archive IN %s AND |
1380 | - PackageBuild.build_farm_job = BuildFarmJob.id AND |
1381 | - BuildFarmJob.date_finished IS NOT NULL AND |
1382 | - BuildFarmJob.status = %s |
1383 | + BinaryPackageBuild.archive IN %s AND |
1384 | + BinaryPackageBuild.date_finished IS NOT NULL AND |
1385 | + BinaryPackageBuild.status = %s |
1386 | """ % sqlvalues(self, self.distro_arch_series, |
1387 | self.source_package_release.name, archives, |
1388 | BuildStatus.FULLYBUILT), |
1389 | - orderBy=['-BuildFarmJob.date_finished', '-id'], |
1390 | - clauseTables=['PackageBuild', 'BuildFarmJob', 'SourcePackageName', |
1391 | - 'SourcePackageRelease']) |
1392 | + orderBy=['-date_finished', '-id'], |
1393 | + clauseTables=['SourcePackageName', 'SourcePackageRelease']) |
1394 | |
1395 | estimated_duration = None |
1396 | if bool(completed_builds): |
1397 | @@ -881,8 +869,8 @@ |
1398 | archive, pocket, status=BuildStatus.NEEDSBUILD, |
1399 | date_created=None, builder=None): |
1400 | """See `IBinaryPackageBuildSet`.""" |
1401 | - # Create the PackageBuild to which the new BinaryPackageBuild |
1402 | - # will delegate. |
1403 | + # Create the BuildFarmJob and PackageBuild to which the new |
1404 | + # BinaryPackageBuild will delegate. |
1405 | build_farm_job = getUtility(IBuildFarmJobSource).new( |
1406 | BinaryPackageBuild.build_farm_job_type, status, processor, |
1407 | archive.require_virtualized, date_created, builder, archive) |
1408 | @@ -890,7 +878,7 @@ |
1409 | build_farm_job, archive, pocket) |
1410 | |
1411 | binary_package_build = BinaryPackageBuild( |
1412 | - _new_build_farm_job=build_farm_job, |
1413 | + build_farm_job=build_farm_job, |
1414 | package_build=package_build, |
1415 | distro_arch_series=distro_arch_series, |
1416 | source_package_release=source_package_release, |
1417 | @@ -995,23 +983,18 @@ |
1418 | from lp.soyuz.model.distroarchseries import DistroArchSeries |
1419 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease |
1420 | |
1421 | - # Ensure the underlying buildfarmjob and package build tables |
1422 | - # are included. |
1423 | - clauses.extend([ |
1424 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1425 | - PackageBuild.build_farm_job == BuildFarmJob.id]) |
1426 | - origin.extend([BinaryPackageBuild, BuildFarmJob]) |
1427 | + origin.append(BinaryPackageBuild) |
1428 | |
1429 | # Add query clause that filters on build state if the latter is |
1430 | # provided. |
1431 | if status is not None: |
1432 | - clauses.append(BuildFarmJob.status == status) |
1433 | + clauses.append(BinaryPackageBuild._new_status == status) |
1434 | |
1435 | # Add query clause that filters on pocket if the latter is provided. |
1436 | if pocket: |
1437 | if not isinstance(pocket, (list, tuple)): |
1438 | pocket = (pocket,) |
1439 | - clauses.append(PackageBuild.pocket.is_in(pocket)) |
1440 | + clauses.append(BinaryPackageBuild._new_pocket.is_in(pocket)) |
1441 | |
1442 | # Add query clause that filters on architecture tag if provided. |
1443 | if arch_tag is not None: |
1444 | @@ -1044,23 +1027,24 @@ |
1445 | Archive, get_archive_privacy_filter) |
1446 | |
1447 | clauses = [ |
1448 | - PackageBuild.archive_id == Archive.id, |
1449 | - BuildFarmJob.builder_id == builder_id, |
1450 | + BinaryPackageBuild._new_archive_id == Archive.id, |
1451 | + BinaryPackageBuild._new_builder_id == builder_id, |
1452 | get_archive_privacy_filter(user)] |
1453 | - origin = [PackageBuild, Archive] |
1454 | + origin = [Archive] |
1455 | |
1456 | self.handleOptionalParamsForBuildQueries( |
1457 | clauses, origin, status, name, pocket=None, arch_tag=arch_tag) |
1458 | |
1459 | return IStore(BinaryPackageBuild).using(*origin).find( |
1460 | BinaryPackageBuild, *clauses).order_by( |
1461 | - Desc(BuildFarmJob.date_finished), BinaryPackageBuild.id) |
1462 | + Desc(BinaryPackageBuild._new_date_finished), |
1463 | + BinaryPackageBuild.id) |
1464 | |
1465 | def getBuildsForArchive(self, archive, status=None, name=None, |
1466 | pocket=None, arch_tag=None): |
1467 | """See `IBinaryPackageBuildSet`.""" |
1468 | - clauses = [PackageBuild.archive_id == archive.id] |
1469 | - origin = [PackageBuild] |
1470 | + clauses = [BinaryPackageBuild._new_archive_id == archive.id] |
1471 | + origin = [] |
1472 | |
1473 | self.handleOptionalParamsForBuildQueries( |
1474 | clauses, origin, status, name, pocket, arch_tag) |
1475 | @@ -1070,9 +1054,9 @@ |
1476 | # * FULLYBUILT & FAILURES by -datebuilt |
1477 | # It should present the builds in a more natural order. |
1478 | if status == BuildStatus.SUPERSEDED or status is None: |
1479 | - orderBy = [Desc(BuildFarmJob.date_created)] |
1480 | + orderBy = [Desc(BinaryPackageBuild._new_date_created)] |
1481 | else: |
1482 | - orderBy = [Desc(BuildFarmJob.date_finished)] |
1483 | + orderBy = [Desc(BinaryPackageBuild._new_date_finished)] |
1484 | # All orders fallback to id if the primary order doesn't succeed |
1485 | orderBy.append(BinaryPackageBuild.id) |
1486 | |
1487 | @@ -1087,8 +1071,6 @@ |
1488 | if not arch_ids: |
1489 | return EmptyResultSet() |
1490 | |
1491 | - clauseTables = [PackageBuild] |
1492 | - |
1493 | # format clause according single/multiple architecture(s) form |
1494 | if len(arch_ids) == 1: |
1495 | condition_clauses = [('distro_arch_series=%s' |
1496 | @@ -1097,10 +1079,6 @@ |
1497 | condition_clauses = [('distro_arch_series IN %s' |
1498 | % sqlvalues(arch_ids))] |
1499 | |
1500 | - condition_clauses.extend([ |
1501 | - "BinaryPackageBuild.package_build = PackageBuild.id", |
1502 | - "PackageBuild.build_farm_job = BuildFarmJob.id"]) |
1503 | - |
1504 | # XXX cprov 2006-09-25: It would be nice if we could encapsulate |
1505 | # the chunk of code below (which deals with the optional paramenters) |
1506 | # and share it with ISourcePackage.getBuildRecords() |
1507 | @@ -1108,20 +1086,23 @@ |
1508 | # exclude gina-generated and security (dak-made) builds |
1509 | # status == FULLYBUILT && datebuilt == null |
1510 | if status == BuildStatus.FULLYBUILT: |
1511 | - condition_clauses.append("BuildFarmJob.date_finished IS NOT NULL") |
1512 | + condition_clauses.append( |
1513 | + "BinaryPackageBuild.date_finished IS NOT NULL") |
1514 | else: |
1515 | condition_clauses.append( |
1516 | - "(BuildFarmJob.status <> %s OR " |
1517 | - " BuildFarmJob.date_finished IS NOT NULL)" |
1518 | + "(BinaryPackageBuild.status <> %s OR " |
1519 | + " BinaryPackageBuild.date_finished IS NOT NULL)" |
1520 | % sqlvalues(BuildStatus.FULLYBUILT)) |
1521 | |
1522 | # Ordering according status |
1523 | # * NEEDSBUILD, BUILDING & UPLOADING by -lastscore |
1524 | - # * SUPERSEDED & All by -PackageBuild.build_farm_job |
1525 | + # * SUPERSEDED & All by -BinaryPackageBuild.id |
1526 | # (nearly equivalent to -datecreated, but much more |
1527 | # efficient.) |
1528 | # * FULLYBUILT & FAILURES by -datebuilt |
1529 | # It should present the builds in a more natural order. |
1530 | + clauseTables = [] |
1531 | + order_by_table = None |
1532 | if status in [ |
1533 | BuildStatus.NEEDSBUILD, |
1534 | BuildStatus.BUILDING, |
1535 | @@ -1134,12 +1115,10 @@ |
1536 | 'BuildPackageJob.build = BinaryPackageBuild.id') |
1537 | condition_clauses.append('BuildPackageJob.job = BuildQueue.job') |
1538 | elif status == BuildStatus.SUPERSEDED or status is None: |
1539 | - order_by = [Desc(PackageBuild.build_farm_job_id)] |
1540 | - order_by_table = PackageBuild |
1541 | + order_by = [Desc(BinaryPackageBuild.id)] |
1542 | else: |
1543 | - order_by = [Desc(BuildFarmJob.date_finished), |
1544 | + order_by = [Desc(BinaryPackageBuild._new_date_finished), |
1545 | BinaryPackageBuild.id] |
1546 | - order_by_table = BuildFarmJob |
1547 | |
1548 | # End of duplication (see XXX cprov 2006-09-25 above). |
1549 | |
1550 | @@ -1149,11 +1128,14 @@ |
1551 | # Only pick builds from the distribution's main archive to |
1552 | # exclude PPA builds |
1553 | condition_clauses.append( |
1554 | - "PackageBuild.archive IN %s" % |
1555 | + "BinaryPackageBuild.archive IN %s" % |
1556 | sqlvalues(list(distribution.all_distro_archive_ids))) |
1557 | |
1558 | + find_spec = (BinaryPackageBuild,) |
1559 | + if order_by_table: |
1560 | + find_spec = find_spec + (order_by_table,) |
1561 | result_set = Store.of(distribution).using(*clauseTables).find( |
1562 | - (BinaryPackageBuild, order_by_table), *condition_clauses) |
1563 | + find_spec, *condition_clauses) |
1564 | result_set.order_by(*order_by) |
1565 | |
1566 | def get_bpp(result_row): |
1567 | @@ -1182,20 +1164,21 @@ |
1568 | query = """ |
1569 | source_package_release IN %s AND |
1570 | package_build = packagebuild.id AND |
1571 | - archive.id = packagebuild.archive AND |
1572 | + archive.id = binarypackagebuild.archive AND |
1573 | archive.purpose != %s AND |
1574 | packagebuild.build_farm_job = buildfarmjob.id |
1575 | """ % sqlvalues(sourcepackagerelease_ids, ArchivePurpose.PPA) |
1576 | |
1577 | if buildstate is not None: |
1578 | - query += "AND buildfarmjob.status = %s" % sqlvalues(buildstate) |
1579 | + query += ( |
1580 | + "AND binarypackagebuild.status = %s" % sqlvalues(buildstate)) |
1581 | |
1582 | resultset = IStore(BinaryPackageBuild).using( |
1583 | BinaryPackageBuild, PackageBuild, BuildFarmJob, Archive).find( |
1584 | (BinaryPackageBuild, PackageBuild, BuildFarmJob), |
1585 | SQL(query)) |
1586 | resultset.order_by( |
1587 | - Desc(BuildFarmJob.date_created), BinaryPackageBuild.id) |
1588 | + Desc(BinaryPackageBuild._new_date_created), BinaryPackageBuild.id) |
1589 | return DecoratedResultSet(resultset, operator.itemgetter(0)) |
1590 | |
1591 | def getStatusSummaryForBuilds(self, builds): |
1592 | |
1593 | === modified file 'lib/lp/soyuz/model/buildpackagejob.py' |
1594 | --- lib/lp/soyuz/model/buildpackagejob.py 2013-01-22 06:42:23 +0000 |
1595 | +++ lib/lp/soyuz/model/buildpackagejob.py 2013-02-01 03:49:23 +0000 |
1596 | @@ -157,14 +157,13 @@ |
1597 | ) |
1598 | sub_query = """ |
1599 | SELECT TRUE FROM Archive, BinaryPackageBuild, BuildPackageJob, |
1600 | - PackageBuild, BuildFarmJob, DistroArchSeries |
1601 | + DistroArchSeries |
1602 | WHERE |
1603 | BuildPackageJob.job = Job.id AND |
1604 | BuildPackageJob.build = BinaryPackageBuild.id AND |
1605 | BinaryPackageBuild.distro_arch_series = |
1606 | DistroArchSeries.id AND |
1607 | - BinaryPackageBuild.package_build = PackageBuild.id AND |
1608 | - PackageBuild.archive = Archive.id AND |
1609 | + BinaryPackageBuild.archive = Archive.id AND |
1610 | ((Archive.private IS TRUE AND |
1611 | EXISTS ( |
1612 | SELECT SourcePackagePublishingHistory.id |
1613 | @@ -178,8 +177,7 @@ |
1614 | SourcePackagePublishingHistory.status IN %s)) |
1615 | OR |
1616 | archive.private IS FALSE) AND |
1617 | - PackageBuild.build_farm_job = BuildFarmJob.id AND |
1618 | - BuildFarmJob.status = %s |
1619 | + BinaryPackageBuild.status = %s |
1620 | """ % sqlvalues(private_statuses, BuildStatus.NEEDSBUILD) |
1621 | |
1622 | # Ensure that if BUILDING builds exist for the same |
1623 | @@ -201,16 +199,12 @@ |
1624 | sub_query += """ |
1625 | AND Archive.id NOT IN ( |
1626 | SELECT Archive.id |
1627 | - FROM PackageBuild, BuildFarmJob, Archive, |
1628 | - BinaryPackageBuild, DistroArchSeries |
1629 | + FROM Archive, BinaryPackageBuild, DistroArchSeries |
1630 | WHERE |
1631 | - PackageBuild.build_farm_job = BuildFarmJob.id |
1632 | - AND BinaryPackageBuild.package_build = PackageBuild.id |
1633 | - AND BinaryPackageBuild.distro_arch_series |
1634 | - = DistroArchSeries.id |
1635 | + BinaryPackageBuild.distro_arch_series = DistroArchSeries.id |
1636 | AND DistroArchSeries.processorfamily = %s |
1637 | - AND BuildFarmJob.status = %s |
1638 | - AND PackageBuild.archive = Archive.id |
1639 | + AND BinaryPackageBuild.status = %s |
1640 | + AND BinaryPackageBuild.archive = Archive.id |
1641 | AND Archive.purpose = %s |
1642 | AND Archive.private IS FALSE |
1643 | GROUP BY Archive.id |
1644 | |
1645 | === modified file 'lib/lp/soyuz/model/distributionsourcepackagerelease.py' |
1646 | --- lib/lp/soyuz/model/distributionsourcepackagerelease.py 2013-01-07 02:40:55 +0000 |
1647 | +++ lib/lp/soyuz/model/distributionsourcepackagerelease.py 2013-02-01 03:49:23 +0000 |
1648 | @@ -20,8 +20,6 @@ |
1649 | from storm.store import Store |
1650 | from zope.interface import implements |
1651 | |
1652 | -from lp.buildmaster.model.buildfarmjob import BuildFarmJob |
1653 | -from lp.buildmaster.model.packagebuild import PackageBuild |
1654 | from lp.services.database.decoratedresultset import DecoratedResultSet |
1655 | from lp.services.database.sqlbase import sqlvalues |
1656 | from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES |
1657 | @@ -105,8 +103,6 @@ |
1658 | BinaryPackageBuild.distro_arch_series == DistroArchSeries.id, |
1659 | DistroArchSeries.distroseries == DistroSeries.id, |
1660 | DistroSeries.distribution == self.distribution, |
1661 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1662 | - PackageBuild.build_farm_job == BuildFarmJob.id |
1663 | ) |
1664 | |
1665 | # First, get all the builds built in a main archive (this will |
1666 | @@ -114,7 +110,7 @@ |
1667 | builds_built_in_main_archives = Store.of(self.distribution).find( |
1668 | BinaryPackageBuild, |
1669 | builds_for_distro_exprs, |
1670 | - PackageBuild.archive == Archive.id, |
1671 | + BinaryPackageBuild._new_archive == Archive.id, |
1672 | Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES)) |
1673 | |
1674 | # Next get all the builds that have a binary published in the |
1675 | |
1676 | === modified file 'lib/lp/soyuz/model/publishing.py' |
1677 | --- lib/lp/soyuz/model/publishing.py 2013-01-03 00:16:08 +0000 |
1678 | +++ lib/lp/soyuz/model/publishing.py 2013-02-01 03:49:23 +0000 |
1679 | @@ -1579,17 +1579,14 @@ |
1680 | # If an optional list of build states was passed in as a parameter, |
1681 | # ensure that the result is limited to builds in those states. |
1682 | if build_states is not None: |
1683 | - extra_exprs.extend(( |
1684 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1685 | - PackageBuild.build_farm_job == BuildFarmJob.id, |
1686 | - BuildFarmJob.status.is_in(build_states))) |
1687 | + extra_exprs.append( |
1688 | + BinaryPackageBuild._new_status.is_in(build_states)) |
1689 | |
1690 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
1691 | |
1692 | # We'll be looking for builds in the same distroseries as the |
1693 | # SPPH for the same release. |
1694 | builds_for_distroseries_expr = ( |
1695 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1696 | BinaryPackageBuild.distro_arch_series_id == DistroArchSeries.id, |
1697 | SourcePackagePublishingHistory.distroseriesID == |
1698 | DistroArchSeries.distroseriesID, |
1699 | @@ -1603,7 +1600,7 @@ |
1700 | BinaryPackageBuild, |
1701 | builds_for_distroseries_expr, |
1702 | (SourcePackagePublishingHistory.archiveID == |
1703 | - PackageBuild.archive_id), |
1704 | + BinaryPackageBuild._new_archive_id), |
1705 | *extra_exprs) |
1706 | |
1707 | # Next get all the builds that have a binary published in the |
1708 | @@ -1613,7 +1610,7 @@ |
1709 | BinaryPackageBuild, |
1710 | builds_for_distroseries_expr, |
1711 | (SourcePackagePublishingHistory.archiveID != |
1712 | - PackageBuild.archive_id), |
1713 | + BinaryPackageBuild._new_archive_id), |
1714 | BinaryPackagePublishingHistory.archive == |
1715 | SourcePackagePublishingHistory.archiveID, |
1716 | BinaryPackagePublishingHistory.binarypackagerelease == |
1717 | @@ -1737,9 +1734,7 @@ |
1718 | self._getSourceBinaryJoinForSources( |
1719 | source_publication_ids, active_binaries_only=False), |
1720 | BinaryPackagePublishingHistory.datepublished != None, |
1721 | - BinaryPackageBuild.package_build == PackageBuild.id, |
1722 | - PackageBuild.build_farm_job == BuildFarmJob.id, |
1723 | - BuildFarmJob.status.is_in(build_states)) |
1724 | + BinaryPackageBuild._new_status.is_in(build_states)) |
1725 | |
1726 | published_builds.order_by( |
1727 | SourcePackagePublishingHistory.id, |
1728 | |
1729 | === modified file 'lib/lp/soyuz/model/sourcepackagerelease.py' |
1730 | --- lib/lp/soyuz/model/sourcepackagerelease.py 2013-01-07 04:53:37 +0000 |
1731 | +++ lib/lp/soyuz/model/sourcepackagerelease.py 2013-02-01 03:49:23 +0000 |
1732 | @@ -226,13 +226,11 @@ |
1733 | # sourcepackagerelease. |
1734 | return BinaryPackageBuild.select(""" |
1735 | source_package_release = %s AND |
1736 | - package_build = packagebuild.id AND |
1737 | - archive.id = packagebuild.archive AND |
1738 | - packagebuild.build_farm_job = buildfarmjob.id AND |
1739 | + archive.id = binarypackagebuild.archive AND |
1740 | archive.purpose IN %s |
1741 | """ % sqlvalues(self.id, MAIN_ARCHIVE_PURPOSES), |
1742 | - orderBy=['-buildfarmjob.date_created', 'id'], |
1743 | - clauseTables=['Archive', 'PackageBuild', 'BuildFarmJob']) |
1744 | + orderBy=['-date_created', 'id'], |
1745 | + clauseTables=['Archive']) |
1746 | |
1747 | @property |
1748 | def age(self): |
1749 | @@ -447,16 +445,10 @@ |
1750 | # If there was no published binary we have to try to find a |
1751 | # suitable build in all possible location across the distroseries |
1752 | # inheritance tree. See below. |
1753 | - clause_tables = [ |
1754 | - 'BuildFarmJob', |
1755 | - 'PackageBuild', |
1756 | - 'DistroArchSeries', |
1757 | - ] |
1758 | + clause_tables = ['DistroArchSeries'] |
1759 | queries = [ |
1760 | - "BinaryPackageBuild.package_build = PackageBuild.id AND " |
1761 | - "PackageBuild.build_farm_job = BuildFarmJob.id AND " |
1762 | "DistroArchSeries.id = BinaryPackageBuild.distro_arch_series AND " |
1763 | - "PackageBuild.archive = %s AND " |
1764 | + "BinaryPackageBuild.archive = %s AND " |
1765 | "DistroArchSeries.architecturetag = %s AND " |
1766 | "BinaryPackageBuild.source_package_release = %s" % ( |
1767 | sqlvalues(archive.id, distroarchseries.architecturetag, self))] |
1768 | @@ -467,7 +459,7 @@ |
1769 | |
1770 | return BinaryPackageBuild.selectFirst( |
1771 | query, clauseTables=clause_tables, |
1772 | - orderBy=['-BuildFarmJob.date_created']) |
1773 | + orderBy=['-date_created']) |
1774 | |
1775 | def override(self, component=None, section=None, urgency=None): |
1776 | """See ISourcePackageRelease.""" |
1777 | |
1778 | === modified file 'lib/lp/soyuz/tests/test_archive.py' |
1779 | --- lib/lp/soyuz/tests/test_archive.py 2012-10-25 11:02:37 +0000 |
1780 | +++ lib/lp/soyuz/tests/test_archive.py 2013-02-01 03:49:23 +0000 |
1781 | @@ -359,16 +359,13 @@ |
1782 | # Return the count for archive build jobs with the given status. |
1783 | query = """ |
1784 | SELECT COUNT(Job.id) |
1785 | - FROM BinaryPackageBuild, BuildPackageJob, BuildQueue, Job, |
1786 | - PackageBuild, BuildFarmJob |
1787 | + FROM BinaryPackageBuild, BuildPackageJob, BuildQueue, Job |
1788 | WHERE |
1789 | BuildPackageJob.build = BinaryPackageBuild.id |
1790 | AND BuildPackageJob.job = BuildQueue.job |
1791 | AND Job.id = BuildQueue.job |
1792 | - AND BinaryPackageBuild.package_build = PackageBuild.id |
1793 | - AND PackageBuild.archive = %s |
1794 | - AND PackageBuild.build_farm_job = BuildFarmJob.id |
1795 | - AND BuildFarmJob.status = %s |
1796 | + AND BinaryPackageBuild.archive = %s |
1797 | + AND BinaryPackageBuild.status = %s |
1798 | AND Job.status = %s; |
1799 | """ % sqlvalues(archive, BuildStatus.NEEDSBUILD, status) |
1800 | |
1801 | |
1802 | === modified file 'lib/lp/translations/interfaces/translationtemplatesbuild.py' |
1803 | --- lib/lp/translations/interfaces/translationtemplatesbuild.py 2011-12-24 16:54:44 +0000 |
1804 | +++ lib/lp/translations/interfaces/translationtemplatesbuild.py 2013-02-01 03:49:23 +0000 |
1805 | @@ -22,10 +22,6 @@ |
1806 | class ITranslationTemplatesBuild(IBuildFarmJob): |
1807 | """The build information for translation templates builds.""" |
1808 | |
1809 | - build_farm_job = Reference( |
1810 | - title=_("The build farm job that this extends."), |
1811 | - required=True, readonly=True, schema=IBuildFarmJob) |
1812 | - |
1813 | branch = Reference( |
1814 | title=_("The branch that this build operates on."), |
1815 | required=True, readonly=True, schema=IBranch) |
1816 | @@ -34,7 +30,7 @@ |
1817 | class ITranslationTemplatesBuildSource(ISpecificBuildFarmJobSource): |
1818 | """Utility for `ITranslationTemplatesBuild`.""" |
1819 | |
1820 | - def create(build_farm_job, branch): |
1821 | + def create(branch): |
1822 | """Create a new `ITranslationTemplatesBuild`.""" |
1823 | |
1824 | def findByBranch(branch, store=None): |
1825 | |
1826 | === modified file 'lib/lp/translations/model/translationtemplatesbuild.py' |
1827 | --- lib/lp/translations/model/translationtemplatesbuild.py 2013-02-01 03:49:23 +0000 |
1828 | +++ lib/lp/translations/model/translationtemplatesbuild.py 2013-02-01 03:49:23 +0000 |
1829 | @@ -57,6 +57,8 @@ |
1830 | |
1831 | __storm_table__ = 'TranslationTemplatesBuild' |
1832 | |
1833 | + job_type = BuildFarmJobType.TRANSLATIONTEMPLATESBUILD |
1834 | + |
1835 | id = Int(name='id', primary=True) |
1836 | build_farm_job_id = Int(name='build_farm_job', allow_none=False) |
1837 | build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') |
799 + ).group_by( ild._new_ status by(BinaryPackag eBuild. _new_status)
800 + BinaryPackageBu
801 + ).order_
Perhaps this could be re-flowed?
1011 + find_spec = (BinaryPackageB uild,)
Why? Surely this can just be inlined into the .find() call.