Merge lp:~wgrant/launchpad/flatten-bfj-2.5-destroy-garbo into lp:launchpad

Proposed by William Grant
Status: Merged
Approved by: William Grant
Approved revision: no longer in the source branch.
Merged at revision: 16471
Proposed branch: lp:~wgrant/launchpad/flatten-bfj-2.5-destroy-garbo
Merge into: lp:launchpad
Prerequisite: lp:~wgrant/launchpad/flatten-bfj-2-garbo
Diff against target: 543 lines (+1/-380)
5 files modified
database/schema/security.cfg (+0/-4)
lib/lp/scripts/garbo.py (+0/-227)
lib/lp/scripts/tests/test_garbo.py (+1/-136)
lib/lp/soyuz/interfaces/binarypackagebuild.py (+0/-3)
lib/lp/soyuz/model/binarypackagebuild.py (+0/-10)
To merge this branch: bzr merge lp:~wgrant/launchpad/flatten-bfj-2.5-destroy-garbo
Reviewer Review Type Date Requested Status
William Grant code Approve
Review via email: mp+146337@code.launchpad.net

Commit message

Drop the temporary BPB/SPRB/TTB flattener garbo jobs.

Description of the change

Drop the temporary BPB/SPRB/TTB flattener garbo jobs.

To post a comment you must log in.
Revision history for this message
William Grant (wgrant) :
review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'database/schema/security.cfg'
2--- database/schema/security.cfg 2013-01-31 07:25:36 +0000
3+++ database/schema/security.cfg 2013-02-04 03:57:20 +0000
4@@ -2218,7 +2218,6 @@
5 public.answercontact = SELECT, DELETE
6 public.branch = SELECT, UPDATE
7 public.branchjob = SELECT, DELETE
8-public.binarypackagebuild = SELECT, UPDATE
9 public.binarypackagename = SELECT
10 public.binarypackagerelease = SELECT
11 public.binarypackagepublishinghistory = SELECT, UPDATE
12@@ -2241,7 +2240,6 @@
13 public.bugtaskflat = SELECT
14 public.bugwatch = SELECT, UPDATE
15 public.bugwatchactivity = SELECT, DELETE
16-public.buildfarmjob = SELECT, UPDATE
17 public.codeimportevent = SELECT, DELETE
18 public.codeimporteventdata = SELECT, DELETE
19 public.codeimportresult = SELECT, DELETE
20@@ -2265,7 +2263,6 @@
21 public.revisionauthor = SELECT, UPDATE
22 public.revisioncache = SELECT, DELETE
23 public.sourcepackagename = SELECT
24-public.sourcepackagerecipebuild = SELECT, UPDATE
25 public.sourcepackagerelease = SELECT
26 public.sourcepackagepublishinghistory = SELECT, UPDATE
27 public.suggestivepotemplate = INSERT, DELETE
28@@ -2273,7 +2270,6 @@
29 public.teamparticipation = SELECT, DELETE
30 public.translationmessage = SELECT, DELETE
31 public.translationtemplateitem = SELECT, DELETE
32-public.translationtemplatesbuild = SELECT, UPDATE
33 type=user
34
35 [garbo_daily]
36
37=== modified file 'lib/lp/scripts/garbo.py'
38--- lib/lp/scripts/garbo.py 2013-02-04 01:59:37 +0000
39+++ lib/lp/scripts/garbo.py 2013-02-04 03:57:20 +0000
40@@ -57,8 +57,6 @@
41 BugWatchScheduler,
42 MAX_SAMPLE_SIZE,
43 )
44-from lp.buildmaster.model.buildfarmjob import BuildFarmJob
45-from lp.buildmaster.model.packagebuild import PackageBuild
46 from lp.code.interfaces.revision import IRevisionSet
47 from lp.code.model.codeimportevent import CodeImportEvent
48 from lp.code.model.codeimportresult import CodeImportResult
49@@ -66,10 +64,8 @@
50 RevisionAuthor,
51 RevisionCache,
52 )
53-from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild
54 from lp.hardwaredb.model.hwdb import HWSubmission
55 from lp.registry.model.commercialsubscription import CommercialSubscription
56-from lp.registry.model.distroseries import DistroSeries
57 from lp.registry.model.person import Person
58 from lp.registry.model.product import Product
59 from lp.registry.model.teammembership import TeamMembership
60@@ -108,7 +104,6 @@
61 from lp.services.librarian.model import TimeLimitedToken
62 from lp.services.log.logger import PrefixFilter
63 from lp.services.looptuner import TunableLoop
64-from lp.services.memcache.interfaces import IMemcacheClient
65 from lp.services.oauth.model import OAuthNonce
66 from lp.services.openid.model.openidconsumer import OpenIDConsumerNonce
67 from lp.services.propertycache import cachedproperty
68@@ -123,10 +118,7 @@
69 )
70 from lp.services.session.model import SessionData
71 from lp.services.verification.model.logintoken import LoginToken
72-from lp.soyuz.interfaces.archive import MAIN_ARCHIVE_PURPOSES
73 from lp.soyuz.model.archive import Archive
74-from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
75-from lp.soyuz.model.distroarchseries import DistroArchSeries
76 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
77 from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache
78 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
79@@ -137,9 +129,6 @@
80 from lp.translations.model.translationtemplateitem import (
81 TranslationTemplateItem,
82 )
83-from lp.translations.model.translationtemplatesbuild import (
84- TranslationTemplatesBuild,
85- )
86 from lp.translations.scripts.scrub_pofiletranslator import (
87 ScrubPOFileTranslator,
88 )
89@@ -1346,219 +1335,6 @@
90 transaction.commit()
91
92
93-class BinaryPackageBuildFlattener(TunableLoop):
94- """Populates the new denormalised columns on BinaryPackageBuild."""
95-
96- maximum_chunk_size = 5000
97-
98- def __init__(self, log, abort_time=None):
99- super(BinaryPackageBuildFlattener, self).__init__(log, abort_time)
100-
101- self.memcache_key = '%s:bpb-flattener' % config.instance_name
102- watermark = getUtility(IMemcacheClient).get(self.memcache_key)
103- self.start_at = watermark or 0
104- self.store = IMasterStore(BinaryPackageBuild)
105-
106- def findIDs(self):
107- return self.store.find(
108- BinaryPackageBuild.id,
109- BinaryPackageBuild.id >= self.start_at,
110- ).order_by(BinaryPackageBuild.id)
111-
112- def isDone(self):
113- return (
114- not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled')
115- or self.findIDs().is_empty())
116-
117- def __call__(self, chunk_size):
118- """See `ITunableLoop`."""
119- ids = list(self.findIDs()[:chunk_size])
120- updated_columns = {
121- BinaryPackageBuild._new_archive_id: PackageBuild.archive_id,
122- BinaryPackageBuild._new_pocket: PackageBuild.pocket,
123- BinaryPackageBuild._new_processor_id: BuildFarmJob.processor_id,
124- BinaryPackageBuild._new_virtualized: BuildFarmJob.virtualized,
125- BinaryPackageBuild._new_date_created: BuildFarmJob.date_created,
126- BinaryPackageBuild._new_date_started: BuildFarmJob.date_started,
127- BinaryPackageBuild._new_date_finished: BuildFarmJob.date_finished,
128- BinaryPackageBuild._new_date_first_dispatched:
129- BuildFarmJob.date_first_dispatched,
130- BinaryPackageBuild._new_builder_id: BuildFarmJob.builder_id,
131- BinaryPackageBuild._new_status: BuildFarmJob.status,
132- BinaryPackageBuild._new_log_id: BuildFarmJob.log_id,
133- BinaryPackageBuild._new_upload_log_id: PackageBuild.upload_log_id,
134- BinaryPackageBuild._new_dependencies: PackageBuild.dependencies,
135- BinaryPackageBuild._new_failure_count: BuildFarmJob.failure_count,
136- BinaryPackageBuild._new_build_farm_job_id: BuildFarmJob.id,
137- BinaryPackageBuild._new_distribution_id:
138- DistroSeries.distributionID,
139- BinaryPackageBuild._new_distro_series_id: DistroSeries.id,
140- BinaryPackageBuild._new_source_package_name_id:
141- SourcePackageRelease.sourcepackagenameID,
142- BinaryPackageBuild._new_is_distro_archive:
143- Archive.purpose.is_in(MAIN_ARCHIVE_PURPOSES),
144- }
145- condition = And(
146- BinaryPackageBuild.id.is_in(ids),
147- PackageBuild.id == BinaryPackageBuild.package_build_id,
148- BuildFarmJob.id == PackageBuild.build_farm_job_id)
149- extra_condition = And(
150- condition,
151- SourcePackageRelease.id ==
152- BinaryPackageBuild.source_package_release_id,
153- Archive.id == PackageBuild.archive_id,
154- DistroArchSeries.id == BinaryPackageBuild.distro_arch_series_id,
155- DistroSeries.id == DistroArchSeries.distroseriesID)
156- self.store.execute(
157- BulkUpdate(
158- updated_columns, table=BinaryPackageBuild,
159- values=(
160- PackageBuild, BuildFarmJob, Archive, DistroArchSeries,
161- DistroSeries, SourcePackageRelease),
162- where=And(condition, extra_condition)))
163- self.store.execute(
164- BulkUpdate(
165- {BuildFarmJob.archive_id: PackageBuild.archive_id},
166- table=BuildFarmJob, values=(PackageBuild, BinaryPackageBuild),
167- where=condition))
168- transaction.commit()
169- self.start_at = ids[-1] + 1
170- getUtility(IMemcacheClient).set(self.memcache_key, self.start_at)
171-
172-
173-class SourcePackageRecipeBuildFlattener(TunableLoop):
174- """Populates the new denormalised columns on SourcePackageRecipeBuild."""
175-
176- maximum_chunk_size = 5000
177-
178- def __init__(self, log, abort_time=None):
179- super(SourcePackageRecipeBuildFlattener, self).__init__(
180- log, abort_time)
181-
182- self.memcache_key = '%s:sprb-flattener' % config.instance_name
183- watermark = getUtility(IMemcacheClient).get(self.memcache_key)
184- self.start_at = watermark or 0
185- self.store = IMasterStore(SourcePackageRecipeBuild)
186-
187- def findIDs(self):
188- return self.store.find(
189- SourcePackageRecipeBuild.id,
190- SourcePackageRecipeBuild.id >= self.start_at,
191- ).order_by(SourcePackageRecipeBuild.id)
192-
193- def isDone(self):
194- return (
195- not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled')
196- or self.findIDs().is_empty())
197-
198- def __call__(self, chunk_size):
199- """See `ITunableLoop`."""
200- ids = list(self.findIDs()[:chunk_size])
201- updated_columns = {
202- SourcePackageRecipeBuild._new_archive_id: PackageBuild.archive_id,
203- SourcePackageRecipeBuild._new_pocket: PackageBuild.pocket,
204- SourcePackageRecipeBuild._new_processor_id:
205- BuildFarmJob.processor_id,
206- SourcePackageRecipeBuild._new_virtualized:
207- BuildFarmJob.virtualized,
208- SourcePackageRecipeBuild._new_date_created:
209- BuildFarmJob.date_created,
210- SourcePackageRecipeBuild._new_date_started:
211- BuildFarmJob.date_started,
212- SourcePackageRecipeBuild._new_date_finished:
213- BuildFarmJob.date_finished,
214- SourcePackageRecipeBuild._new_date_first_dispatched:
215- BuildFarmJob.date_first_dispatched,
216- SourcePackageRecipeBuild._new_builder_id: BuildFarmJob.builder_id,
217- SourcePackageRecipeBuild._new_status: BuildFarmJob.status,
218- SourcePackageRecipeBuild._new_log_id: BuildFarmJob.log_id,
219- SourcePackageRecipeBuild._new_upload_log_id:
220- PackageBuild.upload_log_id,
221- SourcePackageRecipeBuild._new_dependencies:
222- PackageBuild.dependencies,
223- SourcePackageRecipeBuild._new_failure_count:
224- BuildFarmJob.failure_count,
225- SourcePackageRecipeBuild._new_build_farm_job_id: BuildFarmJob.id,
226- }
227- condition = And(
228- SourcePackageRecipeBuild.id.is_in(ids),
229- PackageBuild.id == SourcePackageRecipeBuild.package_build_id,
230- BuildFarmJob.id == PackageBuild.build_farm_job_id)
231- self.store.execute(
232- BulkUpdate(
233- updated_columns, table=SourcePackageRecipeBuild,
234- values=(PackageBuild, BuildFarmJob), where=condition))
235- self.store.execute(
236- BulkUpdate(
237- {BuildFarmJob.archive_id: PackageBuild.archive_id},
238- table=BuildFarmJob,
239- values=(PackageBuild, SourcePackageRecipeBuild),
240- where=condition))
241- transaction.commit()
242- self.start_at = ids[-1] + 1
243- getUtility(IMemcacheClient).set(self.memcache_key, self.start_at)
244-
245-
246-class TranslationTemplatesBuildFlattener(TunableLoop):
247- """Populates the new denormalised columns on TranslationTemplatesBuild."""
248-
249- maximum_chunk_size = 5000
250-
251- def __init__(self, log, abort_time=None):
252- super(TranslationTemplatesBuildFlattener, self).__init__(
253- log, abort_time)
254-
255- self.memcache_key = '%s:ttb-flattener' % config.instance_name
256- watermark = getUtility(IMemcacheClient).get(self.memcache_key)
257- self.start_at = watermark or 0
258- self.store = IMasterStore(TranslationTemplatesBuild)
259-
260- def findIDs(self):
261- return self.store.find(
262- TranslationTemplatesBuild.id,
263- TranslationTemplatesBuild.id >= self.start_at,
264- ).order_by(TranslationTemplatesBuild.id)
265-
266- def isDone(self):
267- return (
268- not getFeatureFlag('soyuz.flatten_bfj.garbo.enabled')
269- or self.findIDs().is_empty())
270-
271- def __call__(self, chunk_size):
272- """See `ITunableLoop`."""
273- ids = list(self.findIDs()[:chunk_size])
274- updated_columns = {
275- TranslationTemplatesBuild._new_processor_id:
276- BuildFarmJob.processor_id,
277- TranslationTemplatesBuild._new_virtualized:
278- BuildFarmJob.virtualized,
279- TranslationTemplatesBuild._new_date_created:
280- BuildFarmJob.date_created,
281- TranslationTemplatesBuild._new_date_started:
282- BuildFarmJob.date_started,
283- TranslationTemplatesBuild._new_date_finished:
284- BuildFarmJob.date_finished,
285- TranslationTemplatesBuild._new_date_first_dispatched:
286- BuildFarmJob.date_first_dispatched,
287- TranslationTemplatesBuild._new_builder_id: BuildFarmJob.builder_id,
288- TranslationTemplatesBuild._new_status: BuildFarmJob.status,
289- TranslationTemplatesBuild._new_log_id: BuildFarmJob.log_id,
290- TranslationTemplatesBuild._new_failure_count:
291- BuildFarmJob.failure_count,
292- }
293- self.store.execute(
294- BulkUpdate(
295- updated_columns, table=TranslationTemplatesBuild,
296- values=BuildFarmJob,
297- where=And(
298- TranslationTemplatesBuild.id.is_in(ids),
299- BuildFarmJob.id ==
300- TranslationTemplatesBuild.build_farm_job_id)))
301- transaction.commit()
302- self.start_at = ids[-1] + 1
303- getUtility(IMemcacheClient).set(self.memcache_key, self.start_at)
304-
305-
306 class BaseDatabaseGarbageCollector(LaunchpadCronScript):
307 """Abstract base class to run a collection of TunableLoops."""
308 script_name = None # Script name for locking and database user. Override.
309@@ -1814,9 +1590,6 @@
310 UnusedSessionPruner,
311 DuplicateSessionPruner,
312 BugHeatUpdater,
313- BinaryPackageBuildFlattener,
314- SourcePackageRecipeBuildFlattener,
315- TranslationTemplatesBuildFlattener,
316 ]
317 experimental_tunable_loops = []
318
319
320=== modified file 'lib/lp/scripts/tests/test_garbo.py'
321--- lib/lp/scripts/tests/test_garbo.py 2013-02-01 02:16:55 +0000
322+++ lib/lp/scripts/tests/test_garbo.py 2013-02-04 03:57:20 +0000
323@@ -31,7 +31,6 @@
324 from testtools.matchers import (
325 Equals,
326 GreaterThan,
327- MatchesStructure,
328 )
329 import transaction
330 from zope.component import getUtility
331@@ -43,7 +42,6 @@
332 BugNotification,
333 BugNotificationRecipient,
334 )
335-from lp.buildmaster.enums import BuildStatus
336 from lp.code.bzr import (
337 BranchFormat,
338 RepositoryFormat,
339@@ -60,7 +58,6 @@
340 BranchSharingPolicy,
341 BugSharingPolicy,
342 )
343-from lp.code.model.sourcepackagerecipebuild import SourcePackageRecipeBuild
344 from lp.registry.interfaces.accesspolicy import IAccessPolicySource
345 from lp.registry.interfaces.person import IPersonSet
346 from lp.registry.interfaces.teammembership import TeamMembershipStatus
347@@ -117,7 +114,6 @@
348 from lp.services.verification.model.logintoken import LoginToken
349 from lp.services.worlddata.interfaces.language import ILanguageSet
350 from lp.soyuz.enums import PackagePublishingStatus
351-from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
352 from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache
353 from lp.testing import (
354 FakeAdapterMixin,
355@@ -125,10 +121,7 @@
356 TestCase,
357 TestCaseWithFactory,
358 )
359-from lp.testing.dbuser import (
360- dbuser,
361- switch_dbuser,
362- )
363+from lp.testing.dbuser import switch_dbuser
364 from lp.testing.layers import (
365 DatabaseLayer,
366 LaunchpadScriptLayer,
367@@ -140,9 +133,6 @@
368 from lp.translations.model.translationtemplateitem import (
369 TranslationTemplateItem,
370 )
371-from lp.translations.model.translationtemplatesbuild import (
372- TranslationTemplatesBuild,
373- )
374
375
376 class TestGarboScript(TestCase):
377@@ -1283,131 +1273,6 @@
378 'PopulateLatestPersonSourcePackageReleaseCache')
379 self.assertEqual(spph_2.id, job_data['last_spph_id'])
380
381- def test_BinaryPackageBuildFlattener(self):
382- store = IMasterStore(BinaryPackageBuild)
383- # Sampledata builds start off with the new columns set to None,
384- # and garbo won't run without a feature flag set.
385- self.runHourly()
386- self.assertNotEqual(
387- 0, store.find(BinaryPackageBuild, _new_archive=None).count())
388-
389- # But after a garbo run they're all set properly.
390- with dbuser('testadmin'):
391- IMasterStore(FeatureFlag).add(FeatureFlag(
392- u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true'))
393- self.runHourly()
394- self.assertEqual(
395- 0, store.find(BinaryPackageBuild, _new_archive=None).count())
396-
397- with dbuser('testadmin'):
398- # Create a build with lots of attributes set.
399- build = self.factory.makeBinaryPackageBuild()
400- build.gotFailure()
401- build.updateStatus(
402- BuildStatus.BUILDING, builder=self.factory.makeBuilder())
403- build.updateStatus(BuildStatus.FULLYBUILT)
404- build.setLog(self.factory.makeLibraryFileAlias())
405- build.storeUploadLog('uploaded')
406-
407- # Manually unset the build's denormed columns.
408- attrs = (
409- 'archive', 'pocket', 'processor', 'virtualized',
410- 'date_created', 'date_started', 'date_finished',
411- 'date_first_dispatched', 'builder', 'status', 'log',
412- 'upload_log', 'dependencies', 'failure_count',
413- 'build_farm_job', 'distribution', 'distro_series',
414- 'source_package_name', 'is_distro_archive')
415- for attr in attrs:
416- setattr(removeSecurityProxy(build), '_new_' + attr, None)
417- removeSecurityProxy(build.build_farm_job).archive = None
418- self.assertEqual(
419- 1, store.find(BinaryPackageBuild, _new_archive=None).count())
420- self.runHourly()
421- self.assertEqual(
422- 0, store.find(BinaryPackageBuild, _new_archive=None).count())
423-
424- self.assertThat(
425- removeSecurityProxy(build),
426- MatchesStructure.byEquality(
427- **dict(
428- ('_new_' + attr, getattr(build, attr)) for attr in attrs)))
429- self.assertEqual(
430- build.archive, removeSecurityProxy(build.build_farm_job).archive)
431-
432- def test_SourcePackageRecipeBuildFlattener(self):
433- store = IMasterStore(BinaryPackageBuild)
434- with dbuser('testadmin'):
435- IMasterStore(FeatureFlag).add(FeatureFlag(
436- u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true'))
437-
438- with dbuser('testadmin'):
439- # Create a build with lots of attributes set.
440- build = self.factory.makeSourcePackageRecipeBuild()
441- build.gotFailure()
442- build.updateStatus(
443- BuildStatus.BUILDING, builder=self.factory.makeBuilder())
444- build.updateStatus(BuildStatus.FULLYBUILT)
445- build.setLog(self.factory.makeLibraryFileAlias())
446- build.storeUploadLog('uploaded')
447-
448- # Manually unset the build's denormed columns.
449- attrs = (
450- 'archive', 'pocket', 'processor', 'virtualized',
451- 'date_created', 'date_started', 'date_finished',
452- 'date_first_dispatched', 'builder', 'status', 'log',
453- 'upload_log', 'dependencies', 'failure_count',
454- 'build_farm_job')
455- for attr in attrs:
456- setattr(removeSecurityProxy(build), '_new_' + attr, None)
457- removeSecurityProxy(build).build_farm_job.archive = None
458- self.assertEqual(
459- 1, store.find(SourcePackageRecipeBuild, _new_archive=None).count())
460- self.runHourly()
461- self.assertEqual(
462- 0, store.find(SourcePackageRecipeBuild, _new_archive=None).count())
463-
464- self.assertThat(
465- removeSecurityProxy(build),
466- MatchesStructure.byEquality(
467- **dict(
468- ('_new_' + attr, getattr(build, attr)) for attr in attrs)))
469- self.assertEqual(
470- build.archive, removeSecurityProxy(build.build_farm_job).archive)
471-
472- def test_TranslationTemplatesBuildFlattener(self):
473- store = IMasterStore(BinaryPackageBuild)
474- with dbuser('testadmin'):
475- IMasterStore(FeatureFlag).add(FeatureFlag(
476- u'default', 0, u'soyuz.flatten_bfj.garbo.enabled', u'true'))
477-
478- with dbuser('testadmin'):
479- # Create a build with lots of attributes set.
480- build = self.factory.makeTranslationTemplatesBuildJob().build
481- build.gotFailure()
482- build.updateStatus(
483- BuildStatus.BUILDING, builder=self.factory.makeBuilder())
484- build.updateStatus(BuildStatus.FULLYBUILT)
485- build.setLog(self.factory.makeLibraryFileAlias())
486-
487- # Manually unset the build's denormed columns.
488- attrs = (
489- 'processor', 'virtualized', 'date_created', 'date_started',
490- 'date_finished', 'date_first_dispatched', 'builder', 'status',
491- 'log', 'failure_count')
492- for attr in attrs:
493- setattr(removeSecurityProxy(build), '_new_' + attr, None)
494- self.assertEqual(
495- 1, store.find(TranslationTemplatesBuild, _new_status=None).count())
496- self.runHourly()
497- self.assertEqual(
498- 0, store.find(TranslationTemplatesBuild, _new_status=None).count())
499-
500- self.assertThat(
501- removeSecurityProxy(build),
502- MatchesStructure.byEquality(
503- **dict(
504- ('_new_' + attr, getattr(build, attr)) for attr in attrs)))
505-
506
507 class TestGarboTasks(TestCaseWithFactory):
508 layer = LaunchpadZopelessLayer
509
510=== modified file 'lib/lp/soyuz/interfaces/binarypackagebuild.py'
511--- lib/lp/soyuz/interfaces/binarypackagebuild.py 2013-02-01 02:16:55 +0000
512+++ lib/lp/soyuz/interfaces/binarypackagebuild.py 2013-02-04 03:57:20 +0000
513@@ -100,9 +100,6 @@
514 distro_series = Attribute("Direct parent needed by CanonicalURL")
515 arch_tag = exported(
516 Text(title=_("Architecture tag"), required=False))
517- source_package_name = Attribute("Source package name")
518- is_distro_archive = Attribute(
519- "Whether the target archive belongs to the distro")
520 distributionsourcepackagerelease = Attribute("The page showing the "
521 "details for this sourcepackagerelease in this distribution.")
522 binarypackages = Attribute(
523
524=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
525--- lib/lp/soyuz/model/binarypackagebuild.py 2013-02-04 03:48:35 +0000
526+++ lib/lp/soyuz/model/binarypackagebuild.py 2013-02-04 03:57:20 +0000
527@@ -265,16 +265,6 @@
528 return self.distro_series.distribution
529
530 @property
531- def source_package_name(self):
532- """See `IBinaryPackageBuild`."""
533- return self.source_package_release.sourcepackagename
534-
535- @property
536- def is_distro_archive(self):
537- """See `IBinaryPackageBuild`."""
538- return self.archive.is_main
539-
540- @property
541 def is_virtualized(self):
542 """See `IBuild`"""
543 return self.archive.require_virtualized