Merge lp:~abentley/launchpad/archivejob into lp:launchpad

Proposed by Aaron Bentley on 2012-04-24
Status: Merged
Approved by: Aaron Bentley on 2012-04-24
Approved revision: no longer in the source branch.
Merged at revision: 15148
Proposed branch: lp:~abentley/launchpad/archivejob
Merge into: lp:launchpad
Diff against target: 843 lines (+0/-801)
6 files modified
lib/lp/soyuz/enums.py (+0/-12)
lib/lp/soyuz/interfaces/archivejob.py (+0/-54)
lib/lp/soyuz/model/archivejob.py (+0/-142)
lib/lp/soyuz/model/copyarchivejob.py (+0/-153)
lib/lp/soyuz/tests/test_archivejob.py (+0/-47)
lib/lp/soyuz/tests/test_copyarchivejob.py (+0/-393)
To merge this branch: bzr merge lp:~abentley/launchpad/archivejob
Reviewer Review Type Date Requested Status
Deryck Hodge (community) 2012-04-24 Approve on 2012-04-24
Review via email: mp+103331@code.launchpad.net

Commit Message

Remove unused code.

Description of the Change

= Summary =
Remove usused CopyArchiveJob

== Pre-implementation notes ==
Discussed with julian

== LOC Rationale ==
Reduces LOC count.

== Implementation details ==
CopyArchiveJob turns out to be dead code. It is the only kind of ArchiveJob, so ArchiveJob is also dead code, as is ArchiveJobType, the enum used to specify ArchiveJob. This branch removes them an their associated tests.

== Tests ==
None

== Demo and Q/A ==
None

= Launchpad lint =

Checking for conflicts and issues in changed files.

Linting changed files:
  lib/lp/soyuz/enums.py

To post a comment you must log in.
Deryck Hodge (deryck) wrote :

Yay, for LOC credit!

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'lib/lp/soyuz/enums.py'
2--- lib/lp/soyuz/enums.py 2011-12-19 23:38:16 +0000
3+++ lib/lp/soyuz/enums.py 2012-04-24 17:38:01 +0000
4@@ -5,7 +5,6 @@
5
6 __metaclass__ = type
7 __all__ = [
8- 'ArchiveJobType',
9 'ArchivePermissionType',
10 'ArchivePurpose',
11 'ArchiveStatus',
12@@ -40,17 +39,6 @@
13 re_bug_numbers = re.compile(r"\#?\s?(\d+)")
14
15
16-class ArchiveJobType(DBEnumeratedType):
17- """Values that IArchiveJob.job_type can take."""
18-
19- COPY_ARCHIVE = DBItem(0, """
20- Create a copy archive.
21-
22- This job creates a copy archive from the current state of
23- the archive.
24- """)
25-
26-
27 class ArchivePermissionType(DBEnumeratedType):
28 """Archive Permission Type.
29
30
31=== removed file 'lib/lp/soyuz/interfaces/archivejob.py'
32--- lib/lp/soyuz/interfaces/archivejob.py 2011-12-24 16:54:44 +0000
33+++ lib/lp/soyuz/interfaces/archivejob.py 1970-01-01 00:00:00 +0000
34@@ -1,54 +0,0 @@
35-# Copyright 2010 Canonical Ltd. This software is licensed under the
36-# GNU Affero General Public License version 3 (see the file LICENSE).
37-
38-from zope.interface import (
39- Attribute,
40- Interface,
41- )
42-from zope.schema import (
43- Int,
44- Object,
45- )
46-
47-from lp import _
48-from lp.services.job.interfaces.job import (
49- IJob,
50- IJobSource,
51- IRunnableJob,
52- )
53-from lp.soyuz.interfaces.archive import IArchive
54-
55-
56-class IArchiveJob(Interface):
57- """A Job related to an Archive."""
58-
59- id = Int(
60- title=_('DB ID'), required=True, readonly=True,
61- description=_("The tracking number for this job."))
62-
63- archive = Object(
64- title=_('The archive this job is about.'), schema=IArchive,
65- required=True)
66-
67- job = Object(
68- title=_('The common Job attributes'), schema=IJob, required=True)
69-
70- metadata = Attribute('A dict of data about the job.')
71-
72- def destroySelf():
73- """Destroy this object."""
74-
75-
76-class IArchiveJobSource(IJobSource):
77- """An interface for acquiring IArchiveJobs."""
78-
79- def create(archive):
80- """Create a new IArchiveJobs for an archive."""
81-
82-
83-class ICopyArchiveJob(IRunnableJob):
84- """A Job to copy archives."""
85-
86-
87-class ICopyArchiveJobSource(IArchiveJobSource):
88- """Interface for acquiring CopyArchiveJobs."""
89
90=== removed file 'lib/lp/soyuz/model/archivejob.py'
91--- lib/lp/soyuz/model/archivejob.py 2012-03-04 09:53:04 +0000
92+++ lib/lp/soyuz/model/archivejob.py 1970-01-01 00:00:00 +0000
93@@ -1,142 +0,0 @@
94-# Copyright 2010 Canonical Ltd. This software is licensed under the
95-# GNU Affero General Public License version 3 (see the file LICENSE).
96-
97-__metaclass__ = object
98-
99-from lazr.delegates import delegates
100-import simplejson
101-from sqlobject import SQLObjectNotFound
102-from storm.expr import And
103-from storm.locals import (
104- Int,
105- Reference,
106- Unicode,
107- )
108-from zope.component import getUtility
109-from zope.interface import (
110- classProvides,
111- implements,
112- )
113-
114-from lp.services.database.enumcol import EnumCol
115-from lp.services.database.stormbase import StormBase
116-from lp.services.job.model.job import Job
117-from lp.services.job.runner import BaseRunnableJob
118-from lp.services.webapp.interfaces import (
119- DEFAULT_FLAVOR,
120- IStoreSelector,
121- MAIN_STORE,
122- MASTER_FLAVOR,
123- )
124-from lp.soyuz.enums import ArchiveJobType
125-from lp.soyuz.interfaces.archivejob import (
126- IArchiveJob,
127- IArchiveJobSource,
128- )
129-from lp.soyuz.model.archive import Archive
130-
131-
132-class ArchiveJob(StormBase):
133- """Base class for jobs related to Archives."""
134-
135- implements(IArchiveJob)
136-
137- __storm_table__ = 'archivejob'
138-
139- id = Int(primary=True)
140-
141- job_id = Int(name='job')
142- job = Reference(job_id, Job.id)
143-
144- archive_id = Int(name='archive')
145- archive = Reference(archive_id, Archive.id)
146-
147- job_type = EnumCol(enum=ArchiveJobType, notNull=True)
148-
149- _json_data = Unicode('json_data')
150-
151- @property
152- def metadata(self):
153- return simplejson.loads(self._json_data)
154-
155- def __init__(self, archive, job_type, metadata):
156- """Create an ArchiveJob.
157-
158- :param archive: the archive this job relates to.
159- :param job_type: the bugjobtype of this job.
160- :param metadata: the type-specific variables, as a json-compatible
161- dict.
162- """
163- super(ArchiveJob, self).__init__()
164- json_data = simplejson.dumps(metadata)
165- self.job = Job()
166- self.archive = archive
167- self.job_type = job_type
168- # XXX AaronBentley 2009-01-29 bug=322819: This should be a bytestring,
169- # but the db representation is unicode.
170- self._json_data = json_data.decode('utf-8')
171-
172- @classmethod
173- def get(cls, key):
174- """Return the instance of this class whose key is supplied."""
175- store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
176- instance = store.get(cls, key)
177- if instance is None:
178- raise SQLObjectNotFound(
179- 'No occurence of %s has key %s' % (cls.__name__, key))
180- return instance
181-
182-
183-class ArchiveJobDerived(BaseRunnableJob):
184- """Intermediate class for deriving from ArchiveJob."""
185- delegates(IArchiveJob)
186- classProvides(IArchiveJobSource)
187-
188- def __init__(self, job):
189- self.context = job
190-
191- @classmethod
192- def create(cls, archive, metadata=None):
193- """See `IArchiveJob`."""
194- if metadata is None:
195- metadata = {}
196- job = ArchiveJob(archive, cls.class_job_type, metadata)
197- return cls(job)
198-
199- @classmethod
200- def get(cls, job_id):
201- """Get a job by id.
202-
203- :return: the ArchiveJob with the specified id, as the current
204- ArchiveJobDerived subclass.
205- :raises: SQLObjectNotFound if there is no job with the specified id,
206- or its job_type does not match the desired subclass.
207- """
208- job = ArchiveJob.get(job_id)
209- if job.job_type != cls.class_job_type:
210- raise SQLObjectNotFound(
211- 'No object found with id %d and type %s' % (job_id,
212- cls.class_job_type.title))
213- return cls(job)
214-
215- @classmethod
216- def iterReady(cls):
217- """Iterate through all ready ArchiveJobs."""
218- store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
219- jobs = store.find(
220- ArchiveJob,
221- And(ArchiveJob.job_type == cls.class_job_type,
222- ArchiveJob.job == Job.id,
223- Job.id.is_in(Job.ready_jobs),
224- ArchiveJob.archive == Archive.id))
225- return (cls(job) for job in jobs)
226-
227- def getOopsVars(self):
228- """See `IRunnableJob`."""
229- vars = BaseRunnableJob.getOopsVars(self)
230- vars.extend([
231- ('archive_id', self.context.archive.id),
232- ('archive_job_id', self.context.id),
233- ('archive_job_type', self.context.job_type.title),
234- ])
235- return vars
236
237=== removed file 'lib/lp/soyuz/model/copyarchivejob.py'
238--- lib/lp/soyuz/model/copyarchivejob.py 2012-01-01 02:58:52 +0000
239+++ lib/lp/soyuz/model/copyarchivejob.py 1970-01-01 00:00:00 +0000
240@@ -1,153 +0,0 @@
241-# Copyright 2010 Canonical Ltd. This software is licensed under the
242-# GNU Affero General Public License version 3 (see the file LICENSE).
243-
244-__metaclass__ = object
245-
246-from zope.component import getUtility
247-from zope.interface import (
248- classProvides,
249- implements,
250- )
251-
252-from lp.registry.interfaces.distroseries import IDistroSeriesSet
253-from lp.registry.interfaces.pocket import PackagePublishingPocket
254-from lp.services.job.model.job import Job
255-from lp.services.webapp.interfaces import (
256- DEFAULT_FLAVOR,
257- IStoreSelector,
258- MAIN_STORE,
259- )
260-from lp.soyuz.adapters.packagelocation import PackageLocation
261-from lp.soyuz.enums import ArchiveJobType
262-from lp.soyuz.interfaces.archive import IArchiveSet
263-from lp.soyuz.interfaces.archivejob import (
264- ICopyArchiveJob,
265- ICopyArchiveJobSource,
266- )
267-from lp.soyuz.interfaces.component import IComponentSet
268-from lp.soyuz.interfaces.packagecloner import IPackageCloner
269-from lp.soyuz.interfaces.packageset import IPackagesetSet
270-from lp.soyuz.interfaces.processor import IProcessorFamilySet
271-from lp.soyuz.model.archivejob import (
272- ArchiveJob,
273- ArchiveJobDerived,
274- )
275-
276-
277-class CopyArchiveJob(ArchiveJobDerived):
278-
279- implements(ICopyArchiveJob)
280-
281- class_job_type = ArchiveJobType.COPY_ARCHIVE
282- classProvides(ICopyArchiveJobSource)
283-
284- @classmethod
285- def create(cls, target_archive, source_archive,
286- source_series, source_pocket, target_series, target_pocket,
287- target_component=None, proc_families=None, packagesets=None,
288- merge=False):
289- """See `ICopyArchiveJobSource`."""
290- store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
291- job_for_archive = store.find(
292- ArchiveJob,
293- ArchiveJob.archive == target_archive,
294- ArchiveJob.job_type == cls.class_job_type,
295- ArchiveJob.job == Job.id,
296- Job.id.is_in(Job.ready_jobs)
297- ).any()
298-
299- if job_for_archive is not None:
300- raise ValueError(
301- "CopyArchiveJob already in progress for %s" % target_archive)
302- else:
303- if proc_families is None:
304- proc_families = []
305- if len(proc_families) > 0 and merge:
306- raise ValueError("Can't specify the architectures for merge.")
307- proc_family_names = [p.name for p in proc_families]
308- if packagesets is None:
309- packagesets = []
310- packageset_names = [p.name for p in packagesets]
311- target_component_id = None
312- if target_component is not None:
313- target_component_id = target_component.id
314- metadata = {
315- 'source_archive_id': source_archive.id,
316- 'source_distroseries_id': source_series.id,
317- 'source_pocket_value': source_pocket.value,
318- 'target_distroseries_id': target_series.id,
319- 'target_pocket_value': target_pocket.value,
320- 'target_component_id': target_component_id,
321- 'proc_family_names': proc_family_names,
322- 'packageset_names': packageset_names,
323- 'merge': merge,
324- }
325- return super(CopyArchiveJob, cls).create(target_archive, metadata)
326-
327- def getOopsVars(self):
328- """See `ArchiveJobDerived`."""
329- vars = ArchiveJobDerived.getOopsVars(self)
330- vars.extend([
331- ('source_archive_id', self.metadata['source_archive_id']),
332- ('source_distroseries_id',
333- self.metadata['source_distroseries_id']),
334- ('target_distroseries_id',
335- self.metadata['target_distroseries_id']),
336- ('source_pocket_value', self.metadata['source_pocket_value']),
337- ('target_pocket_value', self.metadata['target_pocket_value']),
338- ('target_component_id', self.metadata['target_component_id']),
339- ('merge', self.metadata['merge']),
340- ])
341- return vars
342-
343- def getSourceLocation(self):
344- """Get the PackageLocation for the source."""
345- # TODO: handle things going bye-bye before we get here.
346- source_archive_id = self.metadata['source_archive_id']
347- source_archive = getUtility(IArchiveSet).get(source_archive_id)
348- source_distroseries_id = self.metadata['source_distroseries_id']
349- source_distroseries = getUtility(IDistroSeriesSet).get(
350- source_distroseries_id)
351- source_distribution = source_distroseries.distribution
352- source_pocket_value = self.metadata['source_pocket_value']
353- source_pocket = PackagePublishingPocket.items[source_pocket_value]
354- packageset_names = self.metadata['packageset_names']
355- packagesets = [getUtility(IPackagesetSet).getByName(name)
356- for name in packageset_names]
357- source_location = PackageLocation(
358- source_archive, source_distribution, source_distroseries,
359- source_pocket, packagesets=packagesets)
360- return source_location
361-
362- def getTargetLocation(self):
363- """Get the PackageLocation for the target."""
364- # TODO: handle things going bye-bye before we get here.
365- target_distroseries_id = self.metadata['target_distroseries_id']
366- target_distroseries = getUtility(IDistroSeriesSet).get(
367- target_distroseries_id)
368- target_distribution = target_distroseries.distribution
369- target_pocket_value = self.metadata['target_pocket_value']
370- target_pocket = PackagePublishingPocket.items[target_pocket_value]
371- target_location = PackageLocation(
372- self.archive, target_distribution, target_distroseries,
373- target_pocket)
374- target_component_id = self.metadata['target_component_id']
375- if target_component_id is not None:
376- target_location.component = getUtility(IComponentSet).get(
377- target_component_id)
378- return target_location
379-
380- def run(self):
381- """See `IRunnableJob`."""
382- source_location = self.getSourceLocation()
383- target_location = self.getTargetLocation()
384- proc_family_names = self.metadata['proc_family_names']
385- proc_family_set = getUtility(IProcessorFamilySet)
386- proc_families = [proc_family_set.getByName(p)
387- for p in proc_family_names]
388- package_cloner = getUtility(IPackageCloner)
389- if self.metadata['merge']:
390- package_cloner.mergeCopy(source_location, target_location)
391- else:
392- package_cloner.clonePackages(
393- source_location, target_location, proc_families=proc_families)
394
395=== removed file 'lib/lp/soyuz/tests/test_archivejob.py'
396--- lib/lp/soyuz/tests/test_archivejob.py 2012-01-01 02:58:52 +0000
397+++ lib/lp/soyuz/tests/test_archivejob.py 1970-01-01 00:00:00 +0000
398@@ -1,47 +0,0 @@
399-# Copyright 2010 Canonical Ltd. This software is licensed under the
400-# GNU Affero General Public License version 3 (see the file LICENSE).
401-
402-from lp.soyuz.enums import ArchiveJobType
403-from lp.soyuz.model.archivejob import (
404- ArchiveJob,
405- ArchiveJobDerived,
406- )
407-from lp.testing import TestCaseWithFactory
408-from lp.testing.layers import DatabaseFunctionalLayer
409-
410-
411-class ArchiveJobTestCase(TestCaseWithFactory):
412- """Test case for basic ArchiveJob gubbins."""
413-
414- layer = DatabaseFunctionalLayer
415-
416- def test_instantiate(self):
417- # ArchiveJob.__init__() instantiates a ArchiveJob instance.
418- archive = self.factory.makeArchive()
419-
420- metadata = ('some', 'arbitrary', 'metadata')
421- archive_job = ArchiveJob(
422- archive, ArchiveJobType.COPY_ARCHIVE, metadata)
423-
424- self.assertEqual(archive, archive_job.archive)
425- self.assertEqual(ArchiveJobType.COPY_ARCHIVE, archive_job.job_type)
426-
427- # When we actually access the ArchiveJob's metadata it gets
428- # deserialized from JSON, so the representation returned by
429- # archive_job.metadata will be different from what we originally
430- # passed in.
431- metadata_expected = [u'some', u'arbitrary', u'metadata']
432- self.assertEqual(metadata_expected, archive_job.metadata)
433-
434-
435-class ArchiveJobDerivedTestCase(TestCaseWithFactory):
436- """Test case for the ArchiveJobDerived class."""
437-
438- layer = DatabaseFunctionalLayer
439-
440- def test_create_explodes(self):
441- # ArchiveJobDerived.create() will blow up because it needs to be
442- # subclassed to work properly.
443- archive = self.factory.makeArchive()
444- self.assertRaises(
445- AttributeError, ArchiveJobDerived.create, archive)
446
447=== removed file 'lib/lp/soyuz/tests/test_copyarchivejob.py'
448--- lib/lp/soyuz/tests/test_copyarchivejob.py 2012-01-01 02:58:52 +0000
449+++ lib/lp/soyuz/tests/test_copyarchivejob.py 1970-01-01 00:00:00 +0000
450@@ -1,393 +0,0 @@
451-# Copyright 2009 Canonical Ltd. This software is licensed under the
452-# GNU Affero General Public License version 3 (see the file LICENSE).
453-
454-__metaclass__ = type
455-
456-from zope.component import getUtility
457-from zope.security.proxy import removeSecurityProxy
458-
459-from lp.buildmaster.enums import BuildStatus
460-from lp.registry.interfaces.pocket import PackagePublishingPocket
461-from lp.soyuz.adapters.packagelocation import PackageLocation
462-from lp.soyuz.enums import (
463- ArchivePurpose,
464- PackagePublishingStatus,
465- )
466-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
467-from lp.soyuz.model.copyarchivejob import CopyArchiveJob
468-from lp.soyuz.model.processor import ProcessorFamilySet
469-from lp.testing import (
470- celebrity_logged_in,
471- TestCaseWithFactory,
472- )
473-from lp.testing.layers import DatabaseFunctionalLayer
474-
475-
476-class CopyArchiveJobTests(TestCaseWithFactory):
477- """Tests for CopyArchiveJob."""
478-
479- layer = DatabaseFunctionalLayer
480-
481- def test_getOopsVars(self):
482- archive = self.factory.makeArchive()
483- args = self.makeDummyArgs()
484- target_distroseries = self.factory.makeDistroSeries()
485- source_pocket = PackagePublishingPocket.RELEASE
486- target_pocket = PackagePublishingPocket.BACKPORTS
487- target_component = self.factory.makeComponent()
488- job = CopyArchiveJob.create(
489- archive, args['source_archive'], args['distroseries'],
490- source_pocket, target_distroseries, target_pocket,
491- target_component=target_component)
492- vars = job.getOopsVars()
493- self.assertIn(('archive_id', archive.id), vars)
494- self.assertIn(('archive_job_id', job.context.id), vars)
495- self.assertIn(('archive_job_type', job.context.job_type.title), vars)
496- self.assertIn(('source_archive_id', args['source_archive'].id), vars)
497- self.assertIn(
498- ('source_distroseries_id', args['distroseries'].id), vars)
499- self.assertIn(
500- ('target_distroseries_id', target_distroseries.id), vars)
501- self.assertIn(('source_pocket_value', source_pocket.value), vars)
502- self.assertIn(('target_pocket_value', target_pocket.value), vars)
503- self.assertIn(
504- ('target_component_id', target_component.id), vars)
505- self.assertIn(('merge', False), vars)
506-
507- def makeDummyArgs(self):
508- args = {}
509- distro = self.factory.makeDistribution()
510- args['distroseries'] = self.factory.makeDistroSeries(
511- distribution=distro)
512- args['pocket'] = self.factory.getAnyPocket()
513- args['source_archive'] = self.factory.makeArchive(
514- distribution=distro)
515- return args
516-
517- def test_error_if_already_exists(self):
518- target_archive = self.factory.makeArchive()
519- args = self.makeDummyArgs()
520- CopyArchiveJob.create(
521- target_archive, args['source_archive'], args['distroseries'],
522- args['pocket'], args['distroseries'], args['pocket'])
523- self.assertEqual(1, self._getJobCount())
524- args = self.makeDummyArgs()
525- self.assertRaises(
526- ValueError, CopyArchiveJob.create, target_archive,
527- args['source_archive'], args['distroseries'], args['pocket'],
528- args['distroseries'], args['pocket'])
529-
530- def test_create_sets_source_archive_id(self):
531- target_archive = self.factory.makeArchive()
532- args = self.makeDummyArgs()
533- source_archive = self.factory.makeArchive()
534- job = CopyArchiveJob.create(
535- target_archive, source_archive, args['distroseries'],
536- args['pocket'], args['distroseries'], args['pocket'])
537- self.assertEqual(
538- source_archive.id, job.metadata['source_archive_id'])
539-
540- def test_create_sets_source_series_id(self):
541- target_archive = self.factory.makeArchive()
542- args = self.makeDummyArgs()
543- source_distroseries = self.factory.makeDistroSeries()
544- job = CopyArchiveJob.create(
545- target_archive, args['source_archive'], source_distroseries,
546- args['pocket'], args['distroseries'], args['pocket'])
547- self.assertEqual(
548- source_distroseries.id, job.metadata['source_distroseries_id'])
549-
550- def test_create_sets_source_pocket_value(self):
551- target_archive = self.factory.makeArchive()
552- args = self.makeDummyArgs()
553- source_pocket = PackagePublishingPocket.RELEASE
554- target_pocket = PackagePublishingPocket.BACKPORTS
555- job = CopyArchiveJob.create(
556- target_archive, args['source_archive'], args['distroseries'],
557- source_pocket, args['distroseries'], target_pocket)
558- self.assertEqual(
559- source_pocket.value, job.metadata['source_pocket_value'])
560-
561- def test_create_sets_target_pocket_value(self):
562- target_archive = self.factory.makeArchive()
563- args = self.makeDummyArgs()
564- source_pocket = PackagePublishingPocket.RELEASE
565- target_pocket = PackagePublishingPocket.BACKPORTS
566- job = CopyArchiveJob.create(
567- target_archive, args['source_archive'], args['distroseries'],
568- source_pocket, args['distroseries'], target_pocket)
569- self.assertEqual(
570- target_pocket.value, job.metadata['target_pocket_value'])
571-
572- def test_create_sets_target_distroseries_id(self):
573- target_archive = self.factory.makeArchive()
574- args = self.makeDummyArgs()
575- target_distroseries = self.factory.makeDistroSeries()
576- job = CopyArchiveJob.create(
577- target_archive, args['source_archive'], args['distroseries'],
578- args['pocket'], target_distroseries, args['pocket'])
579- self.assertEqual(
580- target_distroseries.id, job.metadata['target_distroseries_id'])
581-
582- def test_create_sets_target_component_id(self):
583- target_archive = self.factory.makeArchive()
584- args = self.makeDummyArgs()
585- target_component = self.factory.makeComponent()
586- job = CopyArchiveJob.create(
587- target_archive, args['source_archive'], args['distroseries'],
588- args['pocket'], args['distroseries'], args['pocket'],
589- target_component=target_component)
590- self.assertEqual(
591- target_component.id, job.metadata['target_component_id'])
592-
593- def test_create_sets_target_component_id_to_None_if_unspecified(self):
594- target_archive = self.factory.makeArchive()
595- args = self.makeDummyArgs()
596- job = CopyArchiveJob.create(
597- target_archive, args['source_archive'], args['distroseries'],
598- args['pocket'], args['distroseries'], args['pocket'])
599- self.assertEqual(None, job.metadata['target_component_id'])
600-
601- def test_create_sets_proc_family_ids(self):
602- target_archive = self.factory.makeArchive()
603- args = self.makeDummyArgs()
604- family1 = self.factory.makeProcessorFamily(name="armel")
605- family2 = self.factory.makeProcessorFamily(name="ia64")
606- job = CopyArchiveJob.create(
607- target_archive, args['source_archive'], args['distroseries'],
608- args['pocket'], args['distroseries'], args['pocket'],
609- proc_families=[family1, family2])
610- self.assertEqual(
611- [f.name for f in [family1, family2]],
612- job.metadata['proc_family_names'])
613-
614- def test_error_on_merge_with_proc_families(self):
615- target_archive = self.factory.makeArchive()
616- args = self.makeDummyArgs()
617- family1 = self.factory.makeProcessorFamily(name="armel")
618- family2 = self.factory.makeProcessorFamily(name="ia64")
619- self.assertRaises(
620- ValueError, CopyArchiveJob.create, target_archive,
621- args['source_archive'], args['distroseries'], args['pocket'],
622- args['distroseries'], args['pocket'],
623- proc_families=[family1, family2], merge=True)
624-
625- def test_create_sets_source_package_set_ids(self):
626- target_archive = self.factory.makeArchive()
627- args = self.makeDummyArgs()
628- packagesets = [
629- self.factory.makePackageset(),
630- self.factory.makePackageset(),
631- ]
632- job = CopyArchiveJob.create(
633- target_archive, args['source_archive'], args['distroseries'],
634- args['pocket'], args['distroseries'], args['pocket'],
635- packagesets=packagesets)
636- self.assertEqual(
637- [p.name for p in packagesets], job.metadata['packageset_names'])
638-
639- def test_create_sets_merge_False_by_default(self):
640- target_archive = self.factory.makeArchive()
641- args = self.makeDummyArgs()
642- job = CopyArchiveJob.create(
643- target_archive, args['source_archive'], args['distroseries'],
644- args['pocket'], args['distroseries'], args['pocket'])
645- self.assertEqual(False, job.metadata['merge'])
646-
647- def test_create_sets_merge_True_on_request(self):
648- target_archive = self.factory.makeArchive()
649- args = self.makeDummyArgs()
650- job = CopyArchiveJob.create(
651- target_archive, args['source_archive'], args['distroseries'],
652- args['pocket'], args['distroseries'], args['pocket'], merge=True)
653- self.assertEqual(True, job.metadata['merge'])
654-
655- def test_get_source_location(self):
656- target_archive = self.factory.makeArchive()
657- args = self.makeDummyArgs()
658- source_distroseries = self.factory.makeDistroSeries()
659- source_pocket = PackagePublishingPocket.RELEASE
660- target_pocket = PackagePublishingPocket.BACKPORTS
661- job = CopyArchiveJob.create(
662- target_archive, args['source_archive'], source_distroseries,
663- source_pocket, args['distroseries'], target_pocket)
664- location = job.getSourceLocation()
665- expected_location = PackageLocation(
666- args['source_archive'], source_distroseries.distribution,
667- source_distroseries, source_pocket)
668- self.assertEqual(expected_location, location)
669-
670- def test_get_source_location_with_packagesets(self):
671- target_archive = self.factory.makeArchive()
672- args = self.makeDummyArgs()
673- source_distroseries = self.factory.makeDistroSeries()
674- source_pocket = PackagePublishingPocket.RELEASE
675- target_pocket = PackagePublishingPocket.BACKPORTS
676- packagesets = [
677- self.factory.makePackageset(),
678- self.factory.makePackageset(),
679- ]
680- job = CopyArchiveJob.create(
681- target_archive, args['source_archive'], source_distroseries,
682- source_pocket, args['distroseries'], target_pocket,
683- packagesets=packagesets)
684- location = job.getSourceLocation()
685- expected_location = PackageLocation(
686- args['source_archive'], source_distroseries.distribution,
687- source_distroseries, source_pocket, packagesets=packagesets)
688- self.assertEqual(expected_location, location)
689-
690- def test_get_target_location(self):
691- target_archive = self.factory.makeArchive()
692- args = self.makeDummyArgs()
693- target_distroseries = self.factory.makeDistroSeries()
694- source_pocket = PackagePublishingPocket.RELEASE
695- target_pocket = PackagePublishingPocket.BACKPORTS
696- job = CopyArchiveJob.create(
697- target_archive, args['source_archive'], args['distroseries'],
698- source_pocket, target_distroseries, target_pocket)
699- location = job.getTargetLocation()
700- expected_location = PackageLocation(
701- target_archive, target_distroseries.distribution,
702- target_distroseries, target_pocket)
703- self.assertEqual(expected_location, location)
704-
705- def test_get_target_location_with_component(self):
706- target_archive = self.factory.makeArchive()
707- args = self.makeDummyArgs()
708- target_distroseries = self.factory.makeDistroSeries()
709- source_pocket = PackagePublishingPocket.RELEASE
710- target_pocket = PackagePublishingPocket.BACKPORTS
711- target_component = self.factory.makeComponent()
712- job = CopyArchiveJob.create(
713- target_archive, args['source_archive'], args['distroseries'],
714- source_pocket, target_distroseries, target_pocket,
715- target_component=target_component)
716- location = job.getTargetLocation()
717- expected_location = PackageLocation(
718- target_archive, target_distroseries.distribution,
719- target_distroseries, target_pocket)
720- expected_location.component = target_component
721- self.assertEqual(expected_location, location)
722-
723- def _getJobs(self):
724- """Return the pending CopyArchiveJobs as a list."""
725- return list(CopyArchiveJob.iterReady())
726-
727- def _getJobCount(self):
728- """Return the number of CopyArchiveJobs in the queue."""
729- return len(self._getJobs())
730-
731- def makeSourceAndTarget(self):
732- distribution = self.factory.makeDistribution(name="foobuntu")
733- distroseries = self.factory.makeDistroSeries(
734- distribution=distribution, name="maudlin")
735- source_archive_owner = self.factory.makePerson(name="source-owner")
736- source_archive = self.factory.makeArchive(
737- name="source", owner=source_archive_owner,
738- purpose=ArchivePurpose.PPA, distribution=distribution)
739- self.factory.makeSourcePackagePublishingHistory(
740- sourcepackagename=self.factory.getOrMakeSourcePackageName(
741- name='bzr'),
742- distroseries=distroseries, component=self.factory.makeComponent(),
743- version="2.1", architecturehintlist='any',
744- archive=source_archive, status=PackagePublishingStatus.PUBLISHED,
745- pocket=PackagePublishingPocket.RELEASE)
746- das = self.factory.makeDistroArchSeries(
747- distroseries=distroseries, architecturetag="i386",
748- processorfamily=ProcessorFamilySet().getByName("x86"),
749- supports_virtualized=True)
750- with celebrity_logged_in('admin'):
751- distroseries.nominatedarchindep = das
752- target_archive_owner = self.factory.makePerson()
753- target_archive = self.factory.makeArchive(
754- purpose=ArchivePurpose.COPY, owner=target_archive_owner,
755- name="test-copy-archive", distribution=distribution,
756- description="Test copy archive", enabled=False)
757- return source_archive, target_archive, distroseries
758-
759- def checkPublishedSources(self, expected, archive, series):
760- # We need to be admin as the archive is disabled at this point.
761- with celebrity_logged_in('admin'):
762- sources = archive.getPublishedSources(
763- distroseries=series,
764- status=(
765- PackagePublishingStatus.PENDING,
766- PackagePublishingStatus.PUBLISHED))
767- actual = []
768- for source in sources:
769- actual.append(
770- (source.source_package_name,
771- source.source_package_version))
772- self.assertEqual(sorted(expected), sorted(actual))
773-
774- def test_run(self):
775- """Test that CopyArchiveJob.run() actually copies the archive.
776-
777- We just make a simple test here, and rely on PackageCloner tests
778- to cover the functionality.
779- """
780- source_archive, target_archive, series = self.makeSourceAndTarget()
781- job = CopyArchiveJob.create(
782- target_archive, source_archive, series,
783- PackagePublishingPocket.RELEASE, series,
784- PackagePublishingPocket.RELEASE)
785- job.run()
786- self.checkPublishedSources([("bzr", "2.1")], target_archive, series)
787-
788- def test_run_mergeCopy(self):
789- """Test that CopyArchiveJob.run() when merge=True does a mergeCopy."""
790- source_archive, target_archive, series = self.makeSourceAndTarget()
791- # Create the copy archive
792- job = CopyArchiveJob.create(
793- target_archive, source_archive, series,
794- PackagePublishingPocket.RELEASE, series,
795- PackagePublishingPocket.RELEASE)
796- job.start()
797- job.run()
798- job.complete()
799- # Now the two archives are in the same state, so we change the
800- # source archive and request a merge to check that it works.
801- # Create a new version of the apt package in the source
802- self.factory.makeSourcePackagePublishingHistory(
803- sourcepackagename=self.factory.getOrMakeSourcePackageName(
804- name='apt'),
805- distroseries=series, component=self.factory.makeComponent(),
806- version="1.2", architecturehintlist='any',
807- archive=source_archive, status=PackagePublishingStatus.PUBLISHED,
808- pocket=PackagePublishingPocket.RELEASE)
809- # Create a job to merge
810- job = CopyArchiveJob.create(
811- target_archive, source_archive, series,
812- PackagePublishingPocket.RELEASE, series,
813- PackagePublishingPocket.RELEASE, merge=True)
814- job.run()
815- # Check that the new apt package is in the target
816- self.checkPublishedSources(
817- [("bzr", "2.1"), ("apt", "1.2")], target_archive, series)
818-
819- def test_run_with_proc_families(self):
820- """Test that a CopyArchiveJob job with proc_families uses them.
821-
822- If we create a CopyArchiveJob with proc_families != None then
823- they should be used when cloning packages.
824- """
825- source_archive, target_archive, series = self.makeSourceAndTarget()
826- proc_families = [ProcessorFamilySet().getByName("x86")]
827- job = CopyArchiveJob.create(
828- target_archive, source_archive, series,
829- PackagePublishingPocket.RELEASE, series,
830- PackagePublishingPocket.RELEASE, proc_families=proc_families)
831- job.run()
832- builds = list(
833- getUtility(IBinaryPackageBuildSet).getBuildsForArchive(
834- target_archive, status=BuildStatus.NEEDSBUILD))
835- actual_builds = list()
836- for build in builds:
837- naked_build = removeSecurityProxy(build)
838- spr = naked_build.source_package_release
839- actual_builds.append(
840- (spr.name, spr.version, naked_build.processor.family.name))
841- # One build for the one package, as we specified one processor
842- # family.
843- self.assertEqual([("bzr", "2.1", "x86")], actual_builds)