Merge launchpad:master into launchpad:db-devel
- Git
- lp:launchpad
- master
- Merge into db-devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Approved by: | Colin Watson |
Approved revision: | 8d28b7c56f3ac7f0d39ec1c263268ffb0b42c676 |
Merge reported by: | Otto Co-Pilot |
Merged at revision: | not available |
Proposed branch: | launchpad:master |
Merge into: | launchpad:db-devel |
Diff against target: |
1621 lines (+273/-801) 18 files modified
dev/null (+0/-139) lib/lp/archivepublisher/publishing.py (+3/-28) lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py (+11/-206) lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py (+1/-351) lib/lp/archivepublisher/tests/test_publisher.py (+0/-39) lib/lp/oci/model/ocirecipebuildjob.py (+8/-4) lib/lp/oci/tests/test_ocirecipebuildjob.py (+5/-10) lib/lp/registry/browser/distribution.py (+0/-3) lib/lp/registry/interfaces/distribution.py (+42/-1) lib/lp/registry/model/distribution.py (+28/-0) lib/lp/registry/scripts/closeaccount.py (+3/-6) lib/lp/registry/tests/test_distribution.py (+91/-1) lib/lp/registry/tests/test_personmerge.py (+2/-2) lib/lp/services/scripts/base.py (+0/-4) lib/lp/snappy/model/snap.py (+0/-7) lib/lp/soyuz/scripts/expire_archive_files.py (+3/-0) lib/lp/testing/layers.py (+18/-0) utilities/manage-celery-workers.sh (+58/-0) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Colin Watson (community) | Approve | ||
Review via email: mp+402439@code.launchpad.net |
Commit message
Manually merge from master to fix test failure on Python 2
Description of the change
The bug fixed in https:/
To post a comment you must log in.
Revision history for this message
Colin Watson (cjwatson) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/lib/lp/archivepublisher/htaccess.py b/lib/lp/archivepublisher/htaccess.py |
2 | deleted file mode 100644 |
3 | index 613cfde..0000000 |
4 | --- a/lib/lp/archivepublisher/htaccess.py |
5 | +++ /dev/null |
6 | @@ -1,124 +0,0 @@ |
7 | -#!/usr/bin/python2 |
8 | -# |
9 | -# Copyright 2010-2017 Canonical Ltd. This software is licensed under the |
10 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
11 | - |
12 | -"""Writing of htaccess and htpasswd files.""" |
13 | - |
14 | -__metaclass__ = type |
15 | - |
16 | -__all__ = [ |
17 | - 'htpasswd_credentials_for_archive', |
18 | - 'write_htaccess', |
19 | - 'write_htpasswd', |
20 | - ] |
21 | - |
22 | -import base64 |
23 | -import crypt |
24 | -import os |
25 | - |
26 | -from lp.registry.model.person import Person |
27 | -from lp.services.database.interfaces import IStore |
28 | -from lp.soyuz.model.archiveauthtoken import ArchiveAuthToken |
29 | - |
30 | - |
31 | -HTACCESS_TEMPLATE = """ |
32 | -AuthType Basic |
33 | -AuthName "Token Required" |
34 | -AuthUserFile %(path)s/.htpasswd |
35 | -Require valid-user |
36 | -""" |
37 | - |
38 | -BUILDD_USER_NAME = "buildd" |
39 | - |
40 | - |
41 | -def write_htaccess(htaccess_filename, distroot): |
42 | - """Write a htaccess file for a private archive. |
43 | - |
44 | - :param htaccess_filename: Filename of the htaccess file. |
45 | - :param distroot: Archive root path |
46 | - """ |
47 | - interpolations = {"path": distroot} |
48 | - file = open(htaccess_filename, "w") |
49 | - try: |
50 | - file.write(HTACCESS_TEMPLATE % interpolations) |
51 | - finally: |
52 | - file.close() |
53 | - |
54 | - |
55 | -def write_htpasswd(filename, users): |
56 | - """Write out a new htpasswd file. |
57 | - |
58 | - :param filename: The file to create. |
59 | - :param users: Iterable over (user, password, salt) tuples. |
60 | - """ |
61 | - if os.path.isfile(filename): |
62 | - os.remove(filename) |
63 | - |
64 | - file = open(filename, "a") |
65 | - try: |
66 | - for user, password, salt in users: |
67 | - encrypted = crypt.crypt(password, salt) |
68 | - file.write("%s:%s\n" % (user, encrypted)) |
69 | - finally: |
70 | - file.close() |
71 | - |
72 | - |
73 | -# XXX cjwatson 2017-10-09: This whole mechanism of writing password files to |
74 | -# disk (as opposed to e.g. using a WSGI authentication provider that checks |
75 | -# passwords against the database) is terrible, but as long as we're using it |
76 | -# we should use something like bcrypt rather than DES-based crypt. |
77 | -def make_salt(s): |
78 | - """Produce a salt from an input string. |
79 | - |
80 | - This ensures that salts are drawn from the correct alphabet |
81 | - ([./a-zA-Z0-9]). |
82 | - """ |
83 | - # As long as the input string is at least one character long, there will |
84 | - # be no padding within the first two characters. |
85 | - return base64.b64encode( |
86 | - (s or " ").encode("UTF-8"), altchars=b"./")[:2].decode("ASCII") |
87 | - |
88 | - |
89 | -def htpasswd_credentials_for_archive(archive): |
90 | - """Return credentials for an archive for use with write_htpasswd. |
91 | - |
92 | - :param archive: An `IArchive` (must be private) |
93 | - :return: Iterable of tuples with (user, password, salt) for use with |
94 | - write_htpasswd. |
95 | - """ |
96 | - assert archive.private, "Archive %r must be private" % archive |
97 | - |
98 | - tokens = IStore(ArchiveAuthToken).find( |
99 | - (ArchiveAuthToken.person_id, ArchiveAuthToken.name, |
100 | - ArchiveAuthToken.token), |
101 | - ArchiveAuthToken.archive == archive, |
102 | - ArchiveAuthToken.date_deactivated == None) |
103 | - # We iterate tokens more than once - materialise it. |
104 | - tokens = list(tokens) |
105 | - |
106 | - # Preload map with person ID to person name. |
107 | - person_ids = {token[0] for token in tokens} |
108 | - names = dict( |
109 | - IStore(Person).find( |
110 | - (Person.id, Person.name), Person.id.is_in(person_ids))) |
111 | - |
112 | - # Format the user field by combining the token list with the person list |
113 | - # (when token has person_id) or prepending a '+' (for named tokens). |
114 | - output = [] |
115 | - for person_id, token_name, token in tokens: |
116 | - if token_name: |
117 | - # A named auth token. |
118 | - output.append(('+' + token_name, token, make_salt(token_name))) |
119 | - else: |
120 | - # A subscription auth token. |
121 | - output.append( |
122 | - (names[person_id], token, make_salt(names[person_id]))) |
123 | - |
124 | - # The first .htpasswd entry is the buildd_secret. |
125 | - yield (BUILDD_USER_NAME, archive.buildd_secret, BUILDD_USER_NAME[:2]) |
126 | - |
127 | - # Iterate over tokens and write the appropriate htpasswd entries for them. |
128 | - # Sort by name/person ID so the file can be compared later. |
129 | - for user, password, salt in sorted(output): |
130 | - yield (user, password, salt) |
131 | diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py |
132 | index 55614f9..b87c7ce 100644 |
133 | --- a/lib/lp/archivepublisher/publishing.py |
134 | +++ b/lib/lp/archivepublisher/publishing.py |
135 | @@ -50,17 +50,14 @@ from lp.archivepublisher import HARDCODED_COMPONENT_ORDER |
136 | from lp.archivepublisher.config import getPubConfig |
137 | from lp.archivepublisher.diskpool import DiskPool |
138 | from lp.archivepublisher.domination import Dominator |
139 | -from lp.archivepublisher.htaccess import ( |
140 | - htpasswd_credentials_for_archive, |
141 | - write_htaccess, |
142 | - write_htpasswd, |
143 | - ) |
144 | from lp.archivepublisher.indices import ( |
145 | build_binary_stanza_fields, |
146 | build_source_stanza_fields, |
147 | build_translations_stanza_fields, |
148 | ) |
149 | -from lp.archivepublisher.interfaces.archivegpgsigningkey import ISignableArchive |
150 | +from lp.archivepublisher.interfaces.archivegpgsigningkey import ( |
151 | + ISignableArchive, |
152 | + ) |
153 | from lp.archivepublisher.model.ftparchive import FTPArchiveHandler |
154 | from lp.archivepublisher.utils import ( |
155 | get_ppa_reference, |
156 | @@ -166,27 +163,6 @@ def _getDiskPool(pubconf, log): |
157 | return dp |
158 | |
159 | |
160 | -def _setupHtaccess(archive, pubconf, log): |
161 | - """Setup .htaccess/.htpasswd files for an archive. |
162 | - """ |
163 | - if not archive.private: |
164 | - # FIXME: JRV 20101108 leftover .htaccess and .htpasswd files |
165 | - # should be removed when support for making existing 3PA's public |
166 | - # is added; bug=376072 |
167 | - return |
168 | - |
169 | - htaccess_path = os.path.join(pubconf.archiveroot, ".htaccess") |
170 | - htpasswd_path = os.path.join(pubconf.archiveroot, ".htpasswd") |
171 | - # After the initial htaccess/htpasswd files |
172 | - # are created generate_ppa_htaccess is responsible for |
173 | - # updating the tokens. |
174 | - if not os.path.exists(htaccess_path): |
175 | - log.debug("Writing htaccess file.") |
176 | - write_htaccess(htaccess_path, pubconf.archiveroot) |
177 | - passwords = htpasswd_credentials_for_archive(archive) |
178 | - write_htpasswd(htpasswd_path, passwords) |
179 | - |
180 | - |
181 | def getPublisher(archive, allowed_suites, log, distsroot=None): |
182 | """Return an initialized Publisher instance for the given context. |
183 | |
184 | @@ -472,7 +448,6 @@ class Publisher(object): |
185 | def setupArchiveDirs(self): |
186 | self.log.debug("Setting up archive directories.") |
187 | self._config.setupArchiveDirs() |
188 | - _setupHtaccess(self.archive, self._config, self.log) |
189 | |
190 | def isDirty(self, distroseries, pocket): |
191 | """True if a publication has happened in this release and pocket.""" |
192 | diff --git a/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py b/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py |
193 | index a272540..26e8db8 100644 |
194 | --- a/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py |
195 | +++ b/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py |
196 | @@ -3,22 +3,10 @@ |
197 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the |
198 | # GNU Affero General Public License version 3 (see the file LICENSE). |
199 | |
200 | -from datetime import ( |
201 | - datetime, |
202 | - timedelta, |
203 | - ) |
204 | -import filecmp |
205 | -import os |
206 | -import tempfile |
207 | +from datetime import datetime |
208 | |
209 | import pytz |
210 | |
211 | -from lp.archivepublisher.config import getPubConfig |
212 | -from lp.archivepublisher.htaccess import ( |
213 | - htpasswd_credentials_for_archive, |
214 | - write_htaccess, |
215 | - write_htpasswd, |
216 | - ) |
217 | from lp.registry.model.teammembership import TeamParticipation |
218 | from lp.services.config import config |
219 | from lp.services.database.interfaces import IStore |
220 | @@ -30,23 +18,19 @@ from lp.services.mail.sendmail import ( |
221 | ) |
222 | from lp.services.scripts.base import LaunchpadCronScript |
223 | from lp.services.webapp import canonical_url |
224 | -from lp.soyuz.enums import ( |
225 | - ArchiveStatus, |
226 | - ArchiveSubscriberStatus, |
227 | - ) |
228 | -from lp.soyuz.model.archive import Archive |
229 | +from lp.soyuz.enums import ArchiveSubscriberStatus |
230 | from lp.soyuz.model.archiveauthtoken import ArchiveAuthToken |
231 | from lp.soyuz.model.archivesubscriber import ArchiveSubscriber |
232 | |
233 | -# These PPAs should never have their htaccess/pwd files touched. |
234 | -BLACKLISTED_PPAS = { |
235 | - 'ubuntuone': ['ppa'], |
236 | - } |
237 | - |
238 | |
239 | class HtaccessTokenGenerator(LaunchpadCronScript): |
240 | - """Helper class for generating .htaccess files for private PPAs.""" |
241 | - blacklist = BLACKLISTED_PPAS |
242 | + """Expire archive subscriptions and deactivate invalid tokens.""" |
243 | + |
244 | + # XXX cjwatson 2021-04-21: This script and class are now misnamed, as we |
245 | + # no longer generate .htaccess or .htpasswd files, but instead check |
246 | + # archive authentication dynamically. We can remove this script once we |
247 | + # stop running it on production and move its remaining functions |
248 | + # elsewhere (probably garbo). |
249 | |
250 | def add_my_options(self): |
251 | """Add script command line options.""" |
252 | @@ -60,68 +44,6 @@ class HtaccessTokenGenerator(LaunchpadCronScript): |
253 | dest="no_deactivation", default=False, |
254 | help="If set, tokens are not deactivated.") |
255 | |
256 | - def ensureHtaccess(self, ppa): |
257 | - """Generate a .htaccess for `ppa`.""" |
258 | - if self.options.dryrun: |
259 | - return |
260 | - |
261 | - # The publisher Config object does not have an |
262 | - # interface, so we need to remove the security wrapper. |
263 | - pub_config = getPubConfig(ppa) |
264 | - htaccess_filename = os.path.join(pub_config.archiveroot, ".htaccess") |
265 | - if not os.path.exists(htaccess_filename): |
266 | - # It's not there, so create it. |
267 | - if not os.path.exists(pub_config.archiveroot): |
268 | - os.makedirs(pub_config.archiveroot) |
269 | - write_htaccess(htaccess_filename, pub_config.archiveroot) |
270 | - self.logger.debug("Created .htaccess for %s" % ppa.displayname) |
271 | - |
272 | - def generateHtpasswd(self, ppa): |
273 | - """Generate a htpasswd file for `ppa`s `tokens`. |
274 | - |
275 | - :param ppa: The context PPA (an `IArchive`). |
276 | - :return: The filename of the htpasswd file that was generated. |
277 | - """ |
278 | - # Create a temporary file that will be a new .htpasswd. |
279 | - pub_config = getPubConfig(ppa) |
280 | - if not os.path.exists(pub_config.temproot): |
281 | - os.makedirs(pub_config.temproot) |
282 | - fd, temp_filename = tempfile.mkstemp(dir=pub_config.temproot) |
283 | - os.close(fd) |
284 | - |
285 | - write_htpasswd(temp_filename, htpasswd_credentials_for_archive(ppa)) |
286 | - |
287 | - return temp_filename |
288 | - |
289 | - def replaceUpdatedHtpasswd(self, ppa, temp_htpasswd_file): |
290 | - """Compare the new and the old htpasswd and replace if changed. |
291 | - |
292 | - :return: True if the file was replaced. |
293 | - """ |
294 | - try: |
295 | - if self.options.dryrun: |
296 | - return False |
297 | - |
298 | - # The publisher Config object does not have an |
299 | - # interface, so we need to remove the security wrapper. |
300 | - pub_config = getPubConfig(ppa) |
301 | - if not os.path.exists(pub_config.archiveroot): |
302 | - os.makedirs(pub_config.archiveroot) |
303 | - htpasswd_filename = os.path.join( |
304 | - pub_config.archiveroot, ".htpasswd") |
305 | - |
306 | - if (not os.path.isfile(htpasswd_filename) or |
307 | - not filecmp.cmp(htpasswd_filename, temp_htpasswd_file)): |
308 | - # Atomically replace the old file or create a new file. |
309 | - os.rename(temp_htpasswd_file, htpasswd_filename) |
310 | - self.logger.debug("Replaced htpasswd for %s" % ppa.displayname) |
311 | - return True |
312 | - |
313 | - return False |
314 | - finally: |
315 | - if os.path.exists(temp_htpasswd_file): |
316 | - os.unlink(temp_htpasswd_file) |
317 | - |
318 | def sendCancellationEmail(self, token): |
319 | """Send an email to the person whose subscription was cancelled.""" |
320 | if token.archive.suppress_subscription_notifications: |
321 | @@ -220,8 +142,7 @@ class HtaccessTokenGenerator(LaunchpadCronScript): |
322 | :param send_email: Whether to send a cancellation email to the owner |
323 | of the token. This defaults to False to speed up the test |
324 | suite. |
325 | - :return: the set of ppas affected by token deactivations so that we |
326 | - can later update their htpasswd files. |
327 | + :return: the set of ppas affected by token deactivations. |
328 | """ |
329 | invalid_tokens = self._getInvalidTokens() |
330 | return self.deactivateTokens(invalid_tokens, send_email=send_email) |
331 | @@ -249,129 +170,13 @@ class HtaccessTokenGenerator(LaunchpadCronScript): |
332 | self.logger.info( |
333 | "Expired subscriptions: %s" % ", ".join(subscription_names)) |
334 | |
335 | - def getTimeToSyncFrom(self): |
336 | - """Return the time we'll synchronize from. |
337 | - |
338 | - Any new PPAs or tokens created since this time will be used to |
339 | - generate passwords. |
340 | - """ |
341 | - # NTP is running on our servers and therefore we can assume |
342 | - # only minimal skew, we include a fudge-factor of 1s so that |
343 | - # even the minimal skew cannot demonstrate bug 627608. |
344 | - last_activity = self.get_last_activity() |
345 | - if not last_activity: |
346 | - return |
347 | - return last_activity.date_started - timedelta(seconds=1) |
348 | - |
349 | - def getNewTokens(self, since=None): |
350 | - """Return result set of new tokens created since the given time.""" |
351 | - store = IStore(ArchiveAuthToken) |
352 | - extra_expr = [] |
353 | - if since: |
354 | - extra_expr = [ArchiveAuthToken.date_created >= since] |
355 | - new_ppa_tokens = store.find( |
356 | - ArchiveAuthToken, |
357 | - ArchiveAuthToken.date_deactivated == None, |
358 | - *extra_expr) |
359 | - return new_ppa_tokens |
360 | - |
361 | - def getDeactivatedNamedTokens(self, since=None): |
362 | - """Return result set of named tokens deactivated since given time.""" |
363 | - now = datetime.now(pytz.UTC) |
364 | - |
365 | - store = IStore(ArchiveAuthToken) |
366 | - extra_expr = [] |
367 | - if since: |
368 | - extra_expr = [ArchiveAuthToken.date_deactivated >= since] |
369 | - tokens = store.find( |
370 | - ArchiveAuthToken, |
371 | - ArchiveAuthToken.name != None, |
372 | - ArchiveAuthToken.date_deactivated != None, |
373 | - ArchiveAuthToken.date_deactivated <= now, |
374 | - *extra_expr) |
375 | - return tokens |
376 | - |
377 | - def getNewPrivatePPAs(self, since=None): |
378 | - """Return the recently created private PPAs.""" |
379 | - store = IStore(Archive) |
380 | - extra_expr = [] |
381 | - if since: |
382 | - extra_expr = [Archive.date_created >= since] |
383 | - return store.find( |
384 | - Archive, Archive._private == True, *extra_expr) |
385 | - |
386 | def main(self): |
387 | """Script entry point.""" |
388 | self.logger.info('Starting the PPA .htaccess generation') |
389 | self.expireSubscriptions() |
390 | affected_ppas = self.deactivateInvalidTokens(send_email=True) |
391 | - current_ppa_count = len(affected_ppas) |
392 | - self.logger.debug( |
393 | - '%s PPAs with deactivated tokens' % current_ppa_count) |
394 | - |
395 | - last_success = self.getTimeToSyncFrom() |
396 | - |
397 | - # Include ppas with named tokens deactivated since last time we ran. |
398 | - num_tokens = 0 |
399 | - for token in self.getDeactivatedNamedTokens(since=last_success): |
400 | - affected_ppas.add(token.archive) |
401 | - num_tokens += 1 |
402 | - |
403 | - new_ppa_count = len(affected_ppas) |
404 | - self.logger.debug( |
405 | - "%s deactivated named tokens since last run, %s PPAs affected" |
406 | - % (num_tokens, new_ppa_count - current_ppa_count)) |
407 | - current_ppa_count = new_ppa_count |
408 | - |
409 | - # In addition to the ppas that are affected by deactivated |
410 | - # tokens, we also want to include any ppas that have tokens |
411 | - # created since the last time we ran. |
412 | - num_tokens = 0 |
413 | - for token in self.getNewTokens(since=last_success): |
414 | - affected_ppas.add(token.archive) |
415 | - num_tokens += 1 |
416 | - |
417 | - new_ppa_count = len(affected_ppas) |
418 | - self.logger.debug( |
419 | - "%s new tokens since last run, %s PPAs affected" |
420 | - % (num_tokens, new_ppa_count - current_ppa_count)) |
421 | - current_ppa_count = new_ppa_count |
422 | - |
423 | - affected_ppas.update(self.getNewPrivatePPAs(since=last_success)) |
424 | - new_ppa_count = len(affected_ppas) |
425 | self.logger.debug( |
426 | - "%s new private PPAs since last run" |
427 | - % (new_ppa_count - current_ppa_count)) |
428 | - |
429 | - self.logger.debug('%s PPAs require updating' % new_ppa_count) |
430 | - for ppa in affected_ppas: |
431 | - # If this PPA is blacklisted, do not touch its htaccess/pwd |
432 | - # files. |
433 | - blacklisted_ppa_names_for_owner = self.blacklist.get( |
434 | - ppa.owner.name, []) |
435 | - if ppa.name in blacklisted_ppa_names_for_owner: |
436 | - self.logger.info( |
437 | - "Skipping htaccess updates for blacklisted PPA " |
438 | - " '%s' owned by %s.", |
439 | - ppa.name, |
440 | - ppa.owner.displayname) |
441 | - continue |
442 | - elif ppa.status == ArchiveStatus.DELETED or ppa.enabled is False: |
443 | - self.logger.info( |
444 | - "Skipping htaccess updates for deleted or disabled PPA " |
445 | - " '%s' owned by %s.", |
446 | - ppa.name, |
447 | - ppa.owner.displayname) |
448 | - continue |
449 | - |
450 | - self.ensureHtaccess(ppa) |
451 | - htpasswd_write_start = datetime.now() |
452 | - temp_htpasswd = self.generateHtpasswd(ppa) |
453 | - self.replaceUpdatedHtpasswd(ppa, temp_htpasswd) |
454 | - htpasswd_write_duration = datetime.now() - htpasswd_write_start |
455 | - self.logger.debug( |
456 | - "Wrote htpasswd for '%s': %ss" |
457 | - % (ppa.name, htpasswd_write_duration.total_seconds())) |
458 | + '%s PPAs with deactivated tokens' % len(affected_ppas)) |
459 | |
460 | if self.options.no_deactivation or self.options.dryrun: |
461 | self.logger.info('Dry run, so not committing transaction.') |
462 | diff --git a/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py b/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py |
463 | index f11dba1..472b7bf 100644 |
464 | --- a/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py |
465 | +++ b/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py |
466 | @@ -5,7 +5,6 @@ |
467 | |
468 | from __future__ import absolute_import, print_function, unicode_literals |
469 | |
470 | -import crypt |
471 | from datetime import ( |
472 | datetime, |
473 | timedelta, |
474 | @@ -13,20 +12,10 @@ from datetime import ( |
475 | import os |
476 | import subprocess |
477 | import sys |
478 | -import tempfile |
479 | |
480 | import pytz |
481 | -from testtools.matchers import ( |
482 | - AllMatch, |
483 | - FileContains, |
484 | - FileExists, |
485 | - Not, |
486 | - ) |
487 | -import transaction |
488 | from zope.component import getUtility |
489 | -from zope.security.proxy import removeSecurityProxy |
490 | |
491 | -from lp.archivepublisher.config import getPubConfig |
492 | from lp.archivepublisher.scripts.generate_ppa_htaccess import ( |
493 | HtaccessTokenGenerator, |
494 | ) |
495 | @@ -36,16 +25,7 @@ from lp.registry.interfaces.teammembership import TeamMembershipStatus |
496 | from lp.services.config import config |
497 | from lp.services.features.testing import FeatureFixture |
498 | from lp.services.log.logger import BufferLogger |
499 | -from lp.services.osutils import ( |
500 | - ensure_directory_exists, |
501 | - remove_if_exists, |
502 | - write_file, |
503 | - ) |
504 | -from lp.services.scripts.interfaces.scriptactivity import IScriptActivitySet |
505 | -from lp.soyuz.enums import ( |
506 | - ArchiveStatus, |
507 | - ArchiveSubscriberStatus, |
508 | - ) |
509 | +from lp.soyuz.enums import ArchiveSubscriberStatus |
510 | from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG |
511 | from lp.testing import TestCaseWithFactory |
512 | from lp.testing.dbuser import ( |
513 | @@ -102,102 +82,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): |
514 | stdout, stderr = process.communicate() |
515 | return process.returncode, stdout, stderr |
516 | |
517 | - def testEnsureHtaccess(self): |
518 | - """Ensure that the .htaccess file is generated correctly.""" |
519 | - # The publisher Config object does not have an interface, so we |
520 | - # need to remove the security wrapper. |
521 | - pub_config = getPubConfig(self.ppa) |
522 | - |
523 | - filename = os.path.join(pub_config.archiveroot, ".htaccess") |
524 | - remove_if_exists(filename) |
525 | - script = self.getScript() |
526 | - script.ensureHtaccess(self.ppa) |
527 | - self.addCleanup(remove_if_exists, filename) |
528 | - |
529 | - contents = [ |
530 | - "", |
531 | - "AuthType Basic", |
532 | - "AuthName \"Token Required\"", |
533 | - "AuthUserFile %s/.htpasswd" % pub_config.archiveroot, |
534 | - "Require valid-user", |
535 | - "", |
536 | - ] |
537 | - self.assertThat(filename, FileContains('\n'.join(contents))) |
538 | - |
539 | - def testGenerateHtpasswd(self): |
540 | - """Given some `ArchiveAuthToken`s, test generating htpasswd.""" |
541 | - # Make some subscriptions and tokens. |
542 | - tokens = [] |
543 | - for name in ['name12', 'name16']: |
544 | - person = getUtility(IPersonSet).getByName(name) |
545 | - self.ppa.newSubscription(person, self.ppa.owner) |
546 | - tokens.append(self.ppa.newAuthToken(person)) |
547 | - token_usernames = [token.person.name for token in tokens] |
548 | - |
549 | - # Generate the passwd file. |
550 | - script = self.getScript() |
551 | - filename = script.generateHtpasswd(self.ppa) |
552 | - self.addCleanup(remove_if_exists, filename) |
553 | - |
554 | - # It should be a temp file on the same filesystem as the target |
555 | - # file, so os.rename() won't explode. temproot is relied on |
556 | - # elsewhere for this same purpose, so it should be safe. |
557 | - pub_config = getPubConfig(self.ppa) |
558 | - self.assertEqual(pub_config.temproot, os.path.dirname(filename)) |
559 | - |
560 | - # Read it back in. |
561 | - file_contents = [ |
562 | - line.strip().split(':', 1) for line in open(filename, 'r')] |
563 | - |
564 | - # First entry is buildd secret, rest are from tokens. |
565 | - usernames = list(list(zip(*file_contents))[0]) |
566 | - self.assertEqual(['buildd'] + token_usernames, usernames) |
567 | - |
568 | - # We can re-encrypt the buildd_secret and it should match the |
569 | - # one in the .htpasswd file. |
570 | - password = file_contents[0][1] |
571 | - encrypted_secret = crypt.crypt(self.ppa.buildd_secret, password) |
572 | - self.assertEqual(encrypted_secret, password) |
573 | - |
574 | - def testReplaceUpdatedHtpasswd(self): |
575 | - """Test that the htpasswd file is only replaced if it changes.""" |
576 | - FILE_CONTENT = b"Kneel before Zod!" |
577 | - # The publisher Config object does not have an interface, so we |
578 | - # need to remove the security wrapper. |
579 | - pub_config = getPubConfig(self.ppa) |
580 | - filename = os.path.join(pub_config.archiveroot, ".htpasswd") |
581 | - |
582 | - # Write out a dummy .htpasswd |
583 | - ensure_directory_exists(pub_config.archiveroot) |
584 | - write_file(filename, FILE_CONTENT) |
585 | - |
586 | - # Write the same contents in a temp file. |
587 | - def write_tempfile(): |
588 | - fd, temp_filename = tempfile.mkstemp(dir=pub_config.archiveroot) |
589 | - file = os.fdopen(fd, "wb") |
590 | - file.write(FILE_CONTENT) |
591 | - file.close() |
592 | - return temp_filename |
593 | - |
594 | - # Replacement should not happen. |
595 | - temp_filename = write_tempfile() |
596 | - script = self.getScript() |
597 | - self.assertTrue(os.path.exists(temp_filename)) |
598 | - self.assertFalse( |
599 | - script.replaceUpdatedHtpasswd(self.ppa, temp_filename)) |
600 | - self.assertFalse(os.path.exists(temp_filename)) |
601 | - |
602 | - # Writing a different .htpasswd should see it get replaced. |
603 | - write_file(filename, b"Come to me, son of Jor-El!") |
604 | - |
605 | - temp_filename = write_tempfile() |
606 | - self.assertTrue(os.path.exists(temp_filename)) |
607 | - self.assertTrue( |
608 | - script.replaceUpdatedHtpasswd(self.ppa, temp_filename)) |
609 | - self.assertFalse(os.path.exists(temp_filename)) |
610 | - |
611 | - os.remove(filename) |
612 | - |
613 | def assertDeactivated(self, token): |
614 | """Helper function to test token deactivation state.""" |
615 | return self.assertNotEqual(token.date_deactivated, None) |
616 | @@ -341,15 +225,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): |
617 | self.layer.txn.commit() |
618 | return (sub1, sub2), (token1, token2, token3) |
619 | |
620 | - def ensureNoFiles(self): |
621 | - """Ensure the .ht* files don't already exist.""" |
622 | - pub_config = getPubConfig(self.ppa) |
623 | - htaccess = os.path.join(pub_config.archiveroot, ".htaccess") |
624 | - htpasswd = os.path.join(pub_config.archiveroot, ".htpasswd") |
625 | - remove_if_exists(htaccess) |
626 | - remove_if_exists(htpasswd) |
627 | - return htaccess, htpasswd |
628 | - |
629 | def testSubscriptionExpiry(self): |
630 | """Ensure subscriptions' statuses are set to EXPIRED properly.""" |
631 | subs, tokens = self.setupDummyTokens() |
632 | @@ -369,51 +244,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): |
633 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.EXPIRED) |
634 | self.assertEqual(subs[1].status, ArchiveSubscriberStatus.CURRENT) |
635 | |
636 | - def testBasicOperation(self): |
637 | - """Invoke the actual script and make sure it generates some files.""" |
638 | - self.setupDummyTokens() |
639 | - htaccess, htpasswd = self.ensureNoFiles() |
640 | - |
641 | - # Call the script and check that we have a .htaccess and a |
642 | - # .htpasswd. |
643 | - return_code, stdout, stderr = self.runScript() |
644 | - self.assertEqual( |
645 | - return_code, 0, "Got a bad return code of %s\nOutput:\n%s" % |
646 | - (return_code, stderr)) |
647 | - self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) |
648 | - os.remove(htaccess) |
649 | - os.remove(htpasswd) |
650 | - |
651 | - def testBasicOperation_with_named_tokens(self): |
652 | - """Invoke the actual script and make sure it generates some files.""" |
653 | - token1 = self.ppa.newNamedAuthToken("tokenname1") |
654 | - token2 = self.ppa.newNamedAuthToken("tokenname2") |
655 | - token3 = self.ppa.newNamedAuthToken("tokenname3") |
656 | - token3.deactivate() |
657 | - |
658 | - # Call the script and check that we have a .htaccess and a .htpasswd. |
659 | - htaccess, htpasswd = self.ensureNoFiles() |
660 | - script = self.getScript() |
661 | - script.main() |
662 | - self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) |
663 | - with open(htpasswd) as htpasswd_file: |
664 | - contents = htpasswd_file.read() |
665 | - self.assertIn('+' + token1.name, contents) |
666 | - self.assertIn('+' + token2.name, contents) |
667 | - self.assertNotIn('+' + token3.name, contents) |
668 | - |
669 | - # Deactivate a named token and verify it is removed from .htpasswd. |
670 | - token2.deactivate() |
671 | - script.main() |
672 | - self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) |
673 | - with open(htpasswd) as htpasswd_file: |
674 | - contents = htpasswd_file.read() |
675 | - self.assertIn('+' + token1.name, contents) |
676 | - self.assertNotIn('+' + token2.name, contents) |
677 | - self.assertNotIn('+' + token3.name, contents) |
678 | - os.remove(htaccess) |
679 | - os.remove(htpasswd) |
680 | - |
681 | def _setupOptionsData(self): |
682 | """Setup test data for option testing.""" |
683 | subs, tokens = self.setupDummyTokens() |
684 | @@ -427,13 +257,9 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): |
685 | """Test that the dryrun and no-deactivation option works.""" |
686 | subs, tokens = self._setupOptionsData() |
687 | |
688 | - htaccess, htpasswd = self.ensureNoFiles() |
689 | script = self.getScript(test_args=["--dry-run"]) |
690 | script.main() |
691 | |
692 | - # Assert no files were written. |
693 | - self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) |
694 | - |
695 | # Assert that the cancelled subscription did not cause the token |
696 | # to get deactivated. |
697 | self.assertNotDeactivated(tokens[0]) |
698 | @@ -448,65 +274,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): |
699 | script.main() |
700 | self.assertDeactivated(tokens[0]) |
701 | |
702 | - def testBlacklistingPPAs(self): |
703 | - """Test that the htaccess for blacklisted PPAs are not touched.""" |
704 | - subs, tokens = self.setupDummyTokens() |
705 | - htaccess, htpasswd = self.ensureNoFiles() |
706 | - |
707 | - # Setup the first subscription so that it is due to be expired. |
708 | - now = datetime.now(pytz.UTC) |
709 | - subs[0].date_expires = now - timedelta(minutes=3) |
710 | - self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT) |
711 | - |
712 | - script = self.getScript() |
713 | - script.blacklist = {'joe': ['my_other_ppa', 'myppa', 'and_another']} |
714 | - script.main() |
715 | - |
716 | - # The tokens will still be deactivated, and subscriptions expired. |
717 | - self.assertDeactivated(tokens[0]) |
718 | - self.assertEqual(subs[0].status, ArchiveSubscriberStatus.EXPIRED) |
719 | - # But the htaccess is not touched. |
720 | - self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) |
721 | - |
722 | - def testSkippingOfDisabledPPAs(self): |
723 | - """Test that the htaccess for disabled PPAs are not touched.""" |
724 | - subs, tokens = self.setupDummyTokens() |
725 | - htaccess, htpasswd = self.ensureNoFiles() |
726 | - |
727 | - # Setup subscription so that htaccess/htpasswd is pending generation. |
728 | - now = datetime.now(pytz.UTC) |
729 | - subs[0].date_expires = now + timedelta(minutes=3) |
730 | - self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT) |
731 | - |
732 | - # Set the PPA as disabled. |
733 | - self.ppa.disable() |
734 | - self.assertFalse(self.ppa.enabled) |
735 | - |
736 | - script = self.getScript() |
737 | - script.main() |
738 | - |
739 | - # The htaccess and htpasswd files should not be generated. |
740 | - self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) |
741 | - |
742 | - def testSkippingOfDeletedPPAs(self): |
743 | - """Test that the htaccess for deleted PPAs are not touched.""" |
744 | - subs, tokens = self.setupDummyTokens() |
745 | - htaccess, htpasswd = self.ensureNoFiles() |
746 | - |
747 | - # Setup subscription so that htaccess/htpasswd is pending generation. |
748 | - now = datetime.now(pytz.UTC) |
749 | - subs[0].date_expires = now + timedelta(minutes=3) |
750 | - self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT) |
751 | - |
752 | - # Set the PPA as deleted. |
753 | - self.ppa.status = ArchiveStatus.DELETED |
754 | - |
755 | - script = self.getScript() |
756 | - script.main() |
757 | - |
758 | - # The htaccess and htpasswd files should not be generated. |
759 | - self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) |
760 | - |
761 | def testSendingCancellationEmail(self): |
762 | """Test that when a token is deactivated, its user gets an email. |
763 | |
764 | @@ -568,120 +335,3 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): |
765 | script.sendCancellationEmail(token) |
766 | |
767 | self.assertEmailQueueLength(0) |
768 | - |
769 | - def test_getTimeToSyncFrom(self): |
770 | - # Sync from 1s before previous start to catch anything made during the |
771 | - # last script run, and to handle NTP clock skew. |
772 | - now = datetime.now(pytz.UTC) |
773 | - script_start_time = now - timedelta(seconds=2) |
774 | - script_end_time = now |
775 | - |
776 | - getUtility(IScriptActivitySet).recordSuccess( |
777 | - self.SCRIPT_NAME, script_start_time, script_end_time) |
778 | - script = self.getScript() |
779 | - self.assertEqual( |
780 | - script_start_time - timedelta(seconds=1), |
781 | - script.getTimeToSyncFrom()) |
782 | - |
783 | - def test_getNewPrivatePPAs_no_previous_run(self): |
784 | - # All private PPAs are returned if there was no previous run. |
785 | - # This happens even if they have no tokens. |
786 | - |
787 | - # Create a public PPA that should not be in the list. |
788 | - self.factory.makeArchive(private=False) |
789 | - |
790 | - script = self.getScript() |
791 | - self.assertContentEqual([self.ppa], script.getNewPrivatePPAs()) |
792 | - |
793 | - def test_getNewPrivatePPAs_only_those_since_last_run(self): |
794 | - # Only private PPAs created since the last run are returned. |
795 | - # This happens even if they have no tokens. |
796 | - last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) |
797 | - before_last_start = last_start - timedelta(seconds=30) |
798 | - removeSecurityProxy(self.ppa).date_created = before_last_start |
799 | - |
800 | - # Create a new PPA that should show up. |
801 | - new_ppa = self.factory.makeArchive(private=True) |
802 | - |
803 | - script = self.getScript() |
804 | - new_ppas = script.getNewPrivatePPAs(since=last_start) |
805 | - self.assertContentEqual([new_ppa], new_ppas) |
806 | - |
807 | - def test_getNewTokens_no_previous_run(self): |
808 | - """All valid tokens returned if there is no record of previous run.""" |
809 | - tokens = self.setupDummyTokens()[1] |
810 | - |
811 | - # If there is no record of the script running previously, all |
812 | - # valid tokens are returned. |
813 | - script = self.getScript() |
814 | - self.assertContentEqual(tokens, script.getNewTokens()) |
815 | - |
816 | - def test_getNewTokens_only_those_since_last_run(self): |
817 | - """Only tokens created since the last run are returned.""" |
818 | - last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) |
819 | - before_last_start = last_start - timedelta(seconds=30) |
820 | - |
821 | - tokens = self.setupDummyTokens()[1] |
822 | - # This token will not be included. |
823 | - removeSecurityProxy(tokens[0]).date_created = before_last_start |
824 | - |
825 | - script = self.getScript() |
826 | - new_tokens = script.getNewTokens(since=last_start) |
827 | - self.assertContentEqual(tokens[1:], new_tokens) |
828 | - |
829 | - def test_getNewTokens_only_active_tokens(self): |
830 | - """Only active tokens are returned.""" |
831 | - tokens = self.setupDummyTokens()[1] |
832 | - tokens[0].deactivate() |
833 | - |
834 | - script = self.getScript() |
835 | - self.assertContentEqual(tokens[1:], script.getNewTokens()) |
836 | - |
837 | - def test_getDeactivatedNamedTokens_no_previous_run(self): |
838 | - """All deactivated named tokens returned if there is no record |
839 | - of previous run.""" |
840 | - last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) |
841 | - before_last_start = last_start - timedelta(seconds=30) |
842 | - |
843 | - self.ppa.newNamedAuthToken("tokenname1") |
844 | - token2 = self.ppa.newNamedAuthToken("tokenname2") |
845 | - token2.deactivate() |
846 | - token3 = self.ppa.newNamedAuthToken("tokenname3") |
847 | - token3.date_deactivated = before_last_start |
848 | - |
849 | - script = self.getScript() |
850 | - self.assertContentEqual( |
851 | - [token2, token3], script.getDeactivatedNamedTokens()) |
852 | - |
853 | - def test_getDeactivatedNamedTokens_only_those_since_last_run(self): |
854 | - """Only named tokens deactivated since last run are returned.""" |
855 | - last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) |
856 | - before_last_start = last_start - timedelta(seconds=30) |
857 | - tomorrow = datetime.now(pytz.UTC) + timedelta(days=1) |
858 | - |
859 | - self.ppa.newNamedAuthToken("tokenname1") |
860 | - token2 = self.ppa.newNamedAuthToken("tokenname2") |
861 | - token2.deactivate() |
862 | - token3 = self.ppa.newNamedAuthToken("tokenname3") |
863 | - token3.date_deactivated = before_last_start |
864 | - token4 = self.ppa.newNamedAuthToken("tokenname4") |
865 | - token4.date_deactivated = tomorrow |
866 | - |
867 | - script = self.getScript() |
868 | - self.assertContentEqual( |
869 | - [token2], script.getDeactivatedNamedTokens(last_start)) |
870 | - |
871 | - def test_processes_PPAs_without_subscription(self): |
872 | - # A .htaccess file is written for Private PPAs even if they don't have |
873 | - # any subscriptions. |
874 | - htaccess, htpasswd = self.ensureNoFiles() |
875 | - transaction.commit() |
876 | - |
877 | - # Call the script and check that we have a .htaccess and a .htpasswd. |
878 | - return_code, stdout, stderr = self.runScript() |
879 | - self.assertEqual( |
880 | - return_code, 0, "Got a bad return code of %s\nOutput:\n%s" % |
881 | - (return_code, stderr)) |
882 | - self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) |
883 | - os.remove(htaccess) |
884 | - os.remove(htpasswd) |
885 | diff --git a/lib/lp/archivepublisher/tests/test_htaccess.py b/lib/lp/archivepublisher/tests/test_htaccess.py |
886 | deleted file mode 100644 |
887 | index d435a2d..0000000 |
888 | --- a/lib/lp/archivepublisher/tests/test_htaccess.py |
889 | +++ /dev/null |
890 | @@ -1,139 +0,0 @@ |
891 | -# Copyright 2009-2018 Canonical Ltd. This software is licensed under the |
892 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
893 | - |
894 | -"""Test htaccess/htpasswd file generation. """ |
895 | - |
896 | -from __future__ import absolute_import, print_function, unicode_literals |
897 | - |
898 | -import os |
899 | -import tempfile |
900 | - |
901 | -from zope.component import getUtility |
902 | - |
903 | -from lp.archivepublisher.htaccess import ( |
904 | - htpasswd_credentials_for_archive, |
905 | - write_htaccess, |
906 | - write_htpasswd, |
907 | - ) |
908 | -from lp.registry.interfaces.distribution import IDistributionSet |
909 | -from lp.registry.interfaces.person import IPersonSet |
910 | -from lp.services.features.testing import FeatureFixture |
911 | -from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG |
912 | -from lp.testing import TestCaseWithFactory |
913 | -from lp.testing.layers import LaunchpadZopelessLayer |
914 | - |
915 | - |
916 | -class TestHtpasswdGeneration(TestCaseWithFactory): |
917 | - """Test htpasswd generation.""" |
918 | - |
919 | - layer = LaunchpadZopelessLayer |
920 | - |
921 | - def setUp(self): |
922 | - super(TestHtpasswdGeneration, self).setUp() |
923 | - self.owner = self.factory.makePerson( |
924 | - name="joe", displayname="Joe Smith") |
925 | - self.ppa = self.factory.makeArchive( |
926 | - owner=self.owner, name="myppa", private=True) |
927 | - |
928 | - # "Ubuntu" doesn't have a proper publisher config but Ubuntutest |
929 | - # does, so override the PPA's distro here. |
930 | - ubuntutest = getUtility(IDistributionSet)['ubuntutest'] |
931 | - self.ppa.distribution = ubuntutest |
932 | - |
933 | - # Enable named auth tokens. |
934 | - self.useFixture(FeatureFixture({NAMED_AUTH_TOKEN_FEATURE_FLAG: "on"})) |
935 | - |
936 | - def test_write_htpasswd(self): |
937 | - """Test that writing the .htpasswd file works properly.""" |
938 | - fd, filename = tempfile.mkstemp() |
939 | - os.close(fd) |
940 | - |
941 | - TEST_PASSWORD = "password" |
942 | - TEST_PASSWORD2 = "passwor2" |
943 | - |
944 | - # We provide a constant salt to the crypt function so that we |
945 | - # can test the encrypted result. |
946 | - SALT = "XX" |
947 | - |
948 | - user1 = ("user", TEST_PASSWORD, SALT) |
949 | - user2 = ("user2", TEST_PASSWORD2, SALT) |
950 | - list_of_users = [user1] |
951 | - list_of_users.append(user2) |
952 | - |
953 | - write_htpasswd(filename, list_of_users) |
954 | - |
955 | - expected_contents = [ |
956 | - "user:XXq2wKiyI43A2", |
957 | - "user2:XXaQB8b5Gtwi.", |
958 | - ] |
959 | - |
960 | - file = open(filename, "r") |
961 | - file_contents = file.read().splitlines() |
962 | - file.close() |
963 | - os.remove(filename) |
964 | - |
965 | - self.assertEqual(expected_contents, file_contents) |
966 | - |
967 | - def test_write_htaccess(self): |
968 | - # write_access can write a correct htaccess file. |
969 | - fd, filename = tempfile.mkstemp() |
970 | - os.close(fd) |
971 | - |
972 | - write_htaccess(filename, "/some/distroot") |
973 | - self.assertTrue( |
974 | - os.path.isfile(filename), |
975 | - "%s is not present when it should be" % filename) |
976 | - self.addCleanup(os.remove, filename) |
977 | - |
978 | - contents = [ |
979 | - "", |
980 | - "AuthType Basic", |
981 | - "AuthName \"Token Required\"", |
982 | - "AuthUserFile /some/distroot/.htpasswd", |
983 | - "Require valid-user", |
984 | - ] |
985 | - |
986 | - file = open(filename, "r") |
987 | - file_contents = file.read().splitlines() |
988 | - file.close() |
989 | - |
990 | - self.assertEqual(contents, file_contents) |
991 | - |
992 | - def test_credentials_for_archive_empty(self): |
993 | - # If there are no ArchiveAuthTokens for an archive just |
994 | - # the buildd secret is returned. |
995 | - self.ppa.buildd_secret = "sekr1t" |
996 | - self.assertEqual( |
997 | - [("buildd", "sekr1t", "bu")], |
998 | - list(htpasswd_credentials_for_archive(self.ppa))) |
999 | - |
1000 | - def test_credentials_for_archive(self): |
1001 | - # ArchiveAuthTokens for an archive are returned by |
1002 | - # credentials_for_archive. |
1003 | - self.ppa.buildd_secret = "geheim" |
1004 | - name12 = getUtility(IPersonSet).getByName("name12") |
1005 | - name16 = getUtility(IPersonSet).getByName("name16") |
1006 | - hyphenated = self.factory.makePerson(name="a-b-c") |
1007 | - self.ppa.newSubscription(name12, self.ppa.owner) |
1008 | - self.ppa.newSubscription(name16, self.ppa.owner) |
1009 | - self.ppa.newSubscription(hyphenated, self.ppa.owner) |
1010 | - first_created_token = self.ppa.newAuthToken(name16) |
1011 | - second_created_token = self.ppa.newAuthToken(name12) |
1012 | - third_created_token = self.ppa.newAuthToken(hyphenated) |
1013 | - named_token_20 = self.ppa.newNamedAuthToken("name20", as_dict=False) |
1014 | - named_token_14 = self.ppa.newNamedAuthToken("name14", as_dict=False) |
1015 | - named_token_99 = self.ppa.newNamedAuthToken("name99", as_dict=False) |
1016 | - named_token_99.deactivate() |
1017 | - |
1018 | - expected_credentials = [ |
1019 | - ("buildd", "geheim", "bu"), |
1020 | - ("+name14", named_token_14.token, "bm"), |
1021 | - ("+name20", named_token_20.token, "bm"), |
1022 | - ("a-b-c", third_created_token.token, "YS"), |
1023 | - ("name12", second_created_token.token, "bm"), |
1024 | - ("name16", first_created_token.token, "bm"), |
1025 | - ] |
1026 | - credentials = list(htpasswd_credentials_for_archive(self.ppa)) |
1027 | - |
1028 | - # Use assertEqual instead of assertContentEqual to verify order. |
1029 | - self.assertEqual(expected_credentials, credentials) |
1030 | diff --git a/lib/lp/archivepublisher/tests/test_publisher.py b/lib/lp/archivepublisher/tests/test_publisher.py |
1031 | index 67ec904..0c8ff87 100644 |
1032 | --- a/lib/lp/archivepublisher/tests/test_publisher.py |
1033 | +++ b/lib/lp/archivepublisher/tests/test_publisher.py |
1034 | @@ -12,7 +12,6 @@ from collections import ( |
1035 | defaultdict, |
1036 | OrderedDict, |
1037 | ) |
1038 | -import crypt |
1039 | from datetime import ( |
1040 | datetime, |
1041 | timedelta, |
1042 | @@ -2328,44 +2327,6 @@ class TestPublisher(TestPublisherBase): |
1043 | hoary_pub.requestDeletion(self.ubuntutest.owner) |
1044 | self._assertPublishesSeriesAlias(publisher, "breezy-autotest") |
1045 | |
1046 | - def testHtaccessForPrivatePPA(self): |
1047 | - # A htaccess file is created for new private PPA's. |
1048 | - |
1049 | - ppa = self.factory.makeArchive( |
1050 | - distribution=self.ubuntutest, private=True) |
1051 | - ppa.buildd_secret = "geheim" |
1052 | - |
1053 | - # Set up the publisher for it and publish its repository. |
1054 | - # setupArchiveDirs is what actually configures the htaccess file. |
1055 | - getPublisher(ppa, [], self.logger).setupArchiveDirs() |
1056 | - pubconf = getPubConfig(ppa) |
1057 | - htaccess_path = os.path.join(pubconf.archiveroot, ".htaccess") |
1058 | - self.assertTrue(os.path.exists(htaccess_path)) |
1059 | - with open(htaccess_path, 'r') as htaccess_f: |
1060 | - self.assertEqual(dedent(""" |
1061 | - AuthType Basic |
1062 | - AuthName "Token Required" |
1063 | - AuthUserFile %s/.htpasswd |
1064 | - Require valid-user |
1065 | - """) % pubconf.archiveroot, |
1066 | - htaccess_f.read()) |
1067 | - |
1068 | - htpasswd_path = os.path.join(pubconf.archiveroot, ".htpasswd") |
1069 | - |
1070 | - # Read it back in. |
1071 | - with open(htpasswd_path, "r") as htpasswd_f: |
1072 | - file_contents = htpasswd_f.readlines() |
1073 | - |
1074 | - self.assertEqual(1, len(file_contents)) |
1075 | - |
1076 | - # The first line should be the buildd_secret. |
1077 | - [user, password] = file_contents[0].strip().split(":", 1) |
1078 | - self.assertEqual("buildd", user) |
1079 | - # We can re-encrypt the buildd_secret and it should match the |
1080 | - # one in the .htpasswd file. |
1081 | - encrypted_secret = crypt.crypt(ppa.buildd_secret, password) |
1082 | - self.assertEqual(encrypted_secret, password) |
1083 | - |
1084 | def testWriteSuiteI18n(self): |
1085 | """Test i18n/Index writing.""" |
1086 | publisher = Publisher( |
1087 | diff --git a/lib/lp/oci/model/ocirecipebuildjob.py b/lib/lp/oci/model/ocirecipebuildjob.py |
1088 | index 8f55850..9ec2e91 100644 |
1089 | --- a/lib/lp/oci/model/ocirecipebuildjob.py |
1090 | +++ b/lib/lp/oci/model/ocirecipebuildjob.py |
1091 | @@ -45,10 +45,7 @@ from lp.oci.interfaces.ociregistryclient import ( |
1092 | ) |
1093 | from lp.services.config import config |
1094 | from lp.services.database.enumcol import DBEnum |
1095 | -from lp.services.database.interfaces import ( |
1096 | - IMasterStore, |
1097 | - IStore, |
1098 | - ) |
1099 | +from lp.services.database.interfaces import IStore |
1100 | from lp.services.database.locking import ( |
1101 | AdvisoryLockHeld, |
1102 | LockType, |
1103 | @@ -189,6 +186,13 @@ class OCIRegistryUploadJob(OCIRecipeBuildJobDerived): |
1104 | |
1105 | class_job_type = OCIRecipeBuildJobType.REGISTRY_UPLOAD |
1106 | |
1107 | + # This is a known slow task that will exceed the timeouts for |
1108 | + # the normal job queue, so put it on a queue with longer timeouts |
1109 | + task_queue = 'launchpad_job_slow' |
1110 | + |
1111 | + soft_time_limit = timedelta(minutes=60) |
1112 | + lease_duration = timedelta(minutes=60) |
1113 | + |
1114 | class ManifestListUploadError(Exception): |
1115 | pass |
1116 | |
1117 | diff --git a/lib/lp/oci/tests/test_ocirecipebuildjob.py b/lib/lp/oci/tests/test_ocirecipebuildjob.py |
1118 | index 95718bb..9dfb785 100644 |
1119 | --- a/lib/lp/oci/tests/test_ocirecipebuildjob.py |
1120 | +++ b/lib/lp/oci/tests/test_ocirecipebuildjob.py |
1121 | @@ -53,10 +53,7 @@ from lp.services.database.locking import ( |
1122 | from lp.services.features.testing import FeatureFixture |
1123 | from lp.services.job.interfaces.job import JobStatus |
1124 | from lp.services.job.runner import JobRunner |
1125 | -from lp.services.job.tests import ( |
1126 | - block_on_job, |
1127 | - pop_remote_notifications, |
1128 | - ) |
1129 | +from lp.services.job.tests import block_on_job |
1130 | from lp.services.statsd.tests import StatsMixin |
1131 | from lp.services.webapp import canonical_url |
1132 | from lp.services.webhooks.testing import LogsScheduledWebhooks |
1133 | @@ -71,7 +68,7 @@ from lp.testing.dbuser import ( |
1134 | from lp.testing.fakemethod import FakeMethod |
1135 | from lp.testing.fixture import ZopeUtilityFixture |
1136 | from lp.testing.layers import ( |
1137 | - CeleryJobLayer, |
1138 | + CelerySlowJobLayer, |
1139 | DatabaseFunctionalLayer, |
1140 | LaunchpadZopelessLayer, |
1141 | ) |
1142 | @@ -519,7 +516,6 @@ class TestOCIRegistryUploadJob(TestCaseWithFactory, MultiArchRecipeMixin, |
1143 | |
1144 | self.assertContentEqual([], ocibuild.registry_upload_jobs) |
1145 | job = OCIRegistryUploadJob.create(ocibuild) |
1146 | - client = FakeRegistryClient() |
1147 | switch_dbuser(config.IOCIRegistryUploadJobSource.dbuser) |
1148 | # Fork so that we can take an advisory lock from a different |
1149 | # PostgreSQL session. |
1150 | @@ -551,8 +547,6 @@ class TestOCIRegistryUploadJob(TestCaseWithFactory, MultiArchRecipeMixin, |
1151 | os.kill(pid, signal.SIGINT) |
1152 | |
1153 | |
1154 | - |
1155 | - |
1156 | class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, |
1157 | MultiArchRecipeMixin): |
1158 | """Runs OCIRegistryUploadJob via Celery, to make sure the machinery |
1159 | @@ -563,7 +557,7 @@ class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, |
1160 | so we should make sure we are not breaking anything in the interaction |
1161 | with the job lifecycle via celery. |
1162 | """ |
1163 | - layer = CeleryJobLayer |
1164 | + layer = CelerySlowJobLayer |
1165 | |
1166 | def setUp(self): |
1167 | super(TestOCIRegistryUploadJobViaCelery, self).setUp() |
1168 | @@ -583,4 +577,5 @@ class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, |
1169 | for build in builds: |
1170 | OCIRegistryUploadJob.create(build) |
1171 | transaction.commit() |
1172 | - self.assertEqual(0, len(pop_remote_notifications())) |
1173 | + messages = [message.as_string() for message in pop_notifications()] |
1174 | + self.assertEqual(0, len(messages)) |
1175 | diff --git a/lib/lp/registry/browser/distribution.py b/lib/lp/registry/browser/distribution.py |
1176 | index e5777ec..aa8a74c 100644 |
1177 | --- a/lib/lp/registry/browser/distribution.py |
1178 | +++ b/lib/lp/registry/browser/distribution.py |
1179 | @@ -82,9 +82,6 @@ from lp.bugs.browser.structuralsubscription import ( |
1180 | ) |
1181 | from lp.buildmaster.interfaces.processor import IProcessorSet |
1182 | from lp.code.browser.vcslisting import TargetDefaultVCSNavigationMixin |
1183 | -from lp.oci.interfaces.ociregistrycredentials import ( |
1184 | - IOCIRegistryCredentialsSet, |
1185 | - ) |
1186 | from lp.registry.browser import ( |
1187 | add_subscribe_link, |
1188 | RegistryEditFormView, |
1189 | diff --git a/lib/lp/registry/interfaces/distribution.py b/lib/lp/registry/interfaces/distribution.py |
1190 | index 5ec5271..8f43f58 100644 |
1191 | --- a/lib/lp/registry/interfaces/distribution.py |
1192 | +++ b/lib/lp/registry/interfaces/distribution.py |
1193 | @@ -14,15 +14,18 @@ __all__ = [ |
1194 | 'IDistributionSet', |
1195 | 'NoPartnerArchive', |
1196 | 'NoSuchDistribution', |
1197 | + 'NoOCIAdminForDistribution', |
1198 | ] |
1199 | |
1200 | from lazr.lifecycle.snapshot import doNotSnapshot |
1201 | from lazr.restful.declarations import ( |
1202 | call_with, |
1203 | collection_default_content, |
1204 | + error_status, |
1205 | export_factory_operation, |
1206 | export_operation_as, |
1207 | export_read_operation, |
1208 | + export_write_operation, |
1209 | exported, |
1210 | exported_as_webservice_collection, |
1211 | exported_as_webservice_entry, |
1212 | @@ -38,6 +41,7 @@ from lazr.restful.fields import ( |
1213 | Reference, |
1214 | ) |
1215 | from lazr.restful.interface import copy_field |
1216 | +from six.moves import http_client |
1217 | from zope.interface import ( |
1218 | Attribute, |
1219 | Interface, |
1220 | @@ -113,6 +117,15 @@ from lp.translations.interfaces.hastranslationimports import ( |
1221 | from lp.translations.interfaces.translationpolicy import ITranslationPolicy |
1222 | |
1223 | |
1224 | +@error_status(http_client.BAD_REQUEST) |
1225 | +class NoOCIAdminForDistribution(Exception): |
1226 | + """There is no OCI Project Admin for this distribution.""" |
1227 | + |
1228 | + def __init__(self): |
1229 | + super(NoOCIAdminForDistribution, self).__init__( |
1230 | + "There is no OCI Project Admin for this distribution.") |
1231 | + |
1232 | + |
1233 | class IDistributionMirrorMenuMarker(Interface): |
1234 | """Marker interface for Mirror navigation.""" |
1235 | |
1236 | @@ -129,6 +142,35 @@ class DistributionNameField(PillarNameField): |
1237 | class IDistributionEditRestricted(IOfficialBugTagTargetRestricted): |
1238 | """IDistribution properties requiring launchpad.Edit permission.""" |
1239 | |
1240 | + @call_with(registrant=REQUEST_USER) |
1241 | + @operation_parameters( |
1242 | + registry_url=TextLine( |
1243 | + title=_("The registry url."), |
1244 | + description=_("The url of the OCI registry to use."), |
1245 | + required=True), |
1246 | + region=TextLine( |
1247 | + title=_("OCI registry region."), |
1248 | + description=_("The region of the OCI registry."), |
1249 | + required=False), |
1250 | + username=TextLine( |
1251 | + title=_("Username"), |
1252 | + description=_("The username for the OCI registry."), |
1253 | + required=False), |
1254 | + password=TextLine( |
1255 | + title=_("Password"), |
1256 | + description=_("The password for the OCI registry."), |
1257 | + required=False)) |
1258 | + @export_write_operation() |
1259 | + @operation_for_version("devel") |
1260 | + def setOCICredentials(registrant, registry_url, region, |
1261 | + username, password): |
1262 | + """Set the credentials for the OCI registry for OCI projects.""" |
1263 | + |
1264 | + @export_write_operation() |
1265 | + @operation_for_version("devel") |
1266 | + def deleteOCICredentials(): |
1267 | + """Delete any existing OCI credentials for the distribution.""" |
1268 | + |
1269 | |
1270 | class IDistributionDriverRestricted(Interface): |
1271 | """IDistribution properties requiring launchpad.Driver permission.""" |
1272 | @@ -727,7 +769,6 @@ class IDistributionPublic( |
1273 | "images in this distribution to a registry."), |
1274 | required=False, readonly=False) |
1275 | |
1276 | - |
1277 | @exported_as_webservice_entry(as_of="beta") |
1278 | class IDistribution( |
1279 | IDistributionEditRestricted, IDistributionPublic, IHasBugSupervisor, |
1280 | diff --git a/lib/lp/registry/model/distribution.py b/lib/lp/registry/model/distribution.py |
1281 | index 0288c54..f76d28b 100644 |
1282 | --- a/lib/lp/registry/model/distribution.py |
1283 | +++ b/lib/lp/registry/model/distribution.py |
1284 | @@ -89,6 +89,7 @@ from lp.bugs.model.structuralsubscription import ( |
1285 | from lp.code.interfaces.seriessourcepackagebranch import ( |
1286 | IFindOfficialBranchLinks, |
1287 | ) |
1288 | +from lp.oci.interfaces.ociregistrycredentials import IOCIRegistryCredentialsSet |
1289 | from lp.registry.enums import ( |
1290 | BranchSharingPolicy, |
1291 | BugSharingPolicy, |
1292 | @@ -101,6 +102,7 @@ from lp.registry.interfaces.accesspolicy import IAccessPolicySource |
1293 | from lp.registry.interfaces.distribution import ( |
1294 | IDistribution, |
1295 | IDistributionSet, |
1296 | + NoOCIAdminForDistribution, |
1297 | ) |
1298 | from lp.registry.interfaces.distributionmirror import ( |
1299 | IDistributionMirror, |
1300 | @@ -1531,6 +1533,32 @@ class Distribution(SQLBase, BugTargetBase, MakesAnnouncements, |
1301 | pillar=self, registrant=registrant, name=name, |
1302 | description=description) |
1303 | |
1304 | + def setOCICredentials(self, registrant, registry_url, |
1305 | + region, username, password): |
1306 | + """See `IDistribution`.""" |
1307 | + if not self.oci_project_admin: |
1308 | + raise NoOCIAdminForDistribution() |
1309 | + new_credentials = getUtility(IOCIRegistryCredentialsSet).getOrCreate( |
1310 | + registrant, |
1311 | + self.oci_project_admin, |
1312 | + registry_url, |
1313 | + {"username": username, "password": password, "region": region}, |
1314 | + override_owner=True) |
1315 | + old_credentials = self.oci_registry_credentials |
1316 | + if self.oci_registry_credentials != new_credentials: |
1317 | + # Remove the old credentials as we're assigning new ones |
1318 | + # or clearing them |
1319 | + self.oci_registry_credentials = new_credentials |
1320 | + if old_credentials: |
1321 | + old_credentials.destroySelf() |
1322 | + |
1323 | + def deleteOCICredentials(self): |
1324 | + """See `IDistribution`.""" |
1325 | + old_credentials = self.oci_registry_credentials |
1326 | + if old_credentials: |
1327 | + self.oci_registry_credentials = None |
1328 | + old_credentials.destroySelf() |
1329 | + |
1330 | |
1331 | @implementer(IDistributionSet) |
1332 | class DistributionSet: |
1333 | diff --git a/lib/lp/registry/scripts/closeaccount.py b/lib/lp/registry/scripts/closeaccount.py |
1334 | index 27b2eb1..b4e505b 100644 |
1335 | --- a/lib/lp/registry/scripts/closeaccount.py |
1336 | +++ b/lib/lp/registry/scripts/closeaccount.py |
1337 | @@ -362,12 +362,9 @@ def close_account(username, log): |
1338 | # the placeholder person row. |
1339 | skip.add(('sprintattendance', 'attendee')) |
1340 | |
1341 | - # generate_ppa_htaccess currently relies on seeing active |
1342 | - # ArchiveAuthToken rows so that it knows which ones to remove from |
1343 | - # .htpasswd files on disk in response to the cancellation of the |
1344 | - # corresponding ArchiveSubscriber rows; but even once PPA authorisation |
1345 | - # is handled dynamically, we probably still want to have the per-person |
1346 | - # audit trail here. |
1347 | + # PPA authorization is now handled dynamically and checks the |
1348 | + # subscriber's account status, so this isn't strictly necessary, but |
1349 | + # it's still nice to have the per-person audit trail. |
1350 | archive_subscriber_ids = set(store.find( |
1351 | ArchiveSubscriber.id, |
1352 | ArchiveSubscriber.subscriber_id == person.id, |
1353 | diff --git a/lib/lp/registry/tests/test_distribution.py b/lib/lp/registry/tests/test_distribution.py |
1354 | index 0b9f712..005a7e6 100644 |
1355 | --- a/lib/lp/registry/tests/test_distribution.py |
1356 | +++ b/lib/lp/registry/tests/test_distribution.py |
1357 | @@ -28,6 +28,7 @@ from lp.app.enums import ( |
1358 | ) |
1359 | from lp.app.errors import NotFoundError |
1360 | from lp.app.interfaces.launchpad import ILaunchpadCelebrities |
1361 | +from lp.oci.tests.helpers import OCIConfigHelperMixin |
1362 | from lp.registry.enums import ( |
1363 | BranchSharingPolicy, |
1364 | BugSharingPolicy, |
1365 | @@ -761,7 +762,7 @@ class DistributionOCIProjectAdminPermission(TestCaseWithFactory): |
1366 | self.assertTrue(distro.canAdministerOCIProjects(admin)) |
1367 | |
1368 | |
1369 | -class TestDistributionWebservice(TestCaseWithFactory): |
1370 | +class TestDistributionWebservice(OCIConfigHelperMixin, TestCaseWithFactory): |
1371 | """Test the IDistribution API. |
1372 | |
1373 | Some tests already exist in xx-distribution.txt. |
1374 | @@ -842,3 +843,92 @@ class TestDistributionWebservice(TestCaseWithFactory): |
1375 | start_date=(now - day).isoformat(), |
1376 | end_date=now.isoformat()) |
1377 | self.assertEqual([], empty_response.jsonBody()) |
1378 | + |
1379 | + def test_setOCICredentials(self): |
1380 | + # We can add OCI Credentials to the distribution |
1381 | + self.setConfig() |
1382 | + with person_logged_in(self.person): |
1383 | + distro = self.factory.makeDistribution(owner=self.person) |
1384 | + distro.oci_project_admin = self.person |
1385 | + distro_url = api_url(distro) |
1386 | + |
1387 | + resp = self.webservice.named_post( |
1388 | + distro_url, |
1389 | + "setOCICredentials", |
1390 | + registry_url="http://registry.test", |
1391 | + username="test-username", |
1392 | + password="test-password", |
1393 | + region="test-region" |
1394 | + ) |
1395 | + |
1396 | + self.assertEqual(200, resp.status) |
1397 | + with person_logged_in(self.person): |
1398 | + self.assertEqual( |
1399 | + "http://registry.test", |
1400 | + distro.oci_registry_credentials.url |
1401 | + ) |
1402 | + credentials = distro.oci_registry_credentials.getCredentials() |
1403 | + self.assertDictEqual({ |
1404 | + "username": "test-username", |
1405 | + "password": "test-password", |
1406 | + "region": "test-region"}, |
1407 | + credentials) |
1408 | + |
1409 | + def test_setOCICredentials_no_oci_admin(self): |
1410 | + # If there's no oci_project_admin to own the credentials, error |
1411 | + self.setConfig() |
1412 | + with person_logged_in(self.person): |
1413 | + distro = self.factory.makeDistribution(owner=self.person) |
1414 | + distro_url = api_url(distro) |
1415 | + |
1416 | + resp = self.webservice.named_post( |
1417 | + distro_url, |
1418 | + "setOCICredentials", |
1419 | + registry_url="http://registry.test", |
1420 | + ) |
1421 | + |
1422 | + self.assertEqual(400, resp.status) |
1423 | + self.assertIn( |
1424 | + b"no OCI Project Admin for this distribution", |
1425 | + resp.body) |
1426 | + |
1427 | + def test_setOCICredentials_changes_credentials(self): |
1428 | + # if we have existing credentials, we should change them |
1429 | + self.setConfig() |
1430 | + with person_logged_in(self.person): |
1431 | + distro = self.factory.makeDistribution(owner=self.person) |
1432 | + distro.oci_project_admin = self.person |
1433 | + credentials = self.factory.makeOCIRegistryCredentials() |
1434 | + distro.oci_registry_credentials = credentials |
1435 | + distro_url = api_url(distro) |
1436 | + |
1437 | + resp = self.webservice.named_post( |
1438 | + distro_url, |
1439 | + "setOCICredentials", |
1440 | + registry_url="http://registry.test", |
1441 | + ) |
1442 | + |
1443 | + self.assertEqual(200, resp.status) |
1444 | + with person_logged_in(self.person): |
1445 | + self.assertEqual( |
1446 | + "http://registry.test", |
1447 | + distro.oci_registry_credentials.url |
1448 | + ) |
1449 | + |
1450 | + def test_deleteOCICredentials(self): |
1451 | + # We can remove existing credentials |
1452 | + self.setConfig() |
1453 | + with person_logged_in(self.person): |
1454 | + distro = self.factory.makeDistribution(owner=self.person) |
1455 | + distro.oci_project_admin = self.person |
1456 | + credentials = self.factory.makeOCIRegistryCredentials() |
1457 | + distro.oci_registry_credentials = credentials |
1458 | + distro_url = api_url(distro) |
1459 | + |
1460 | + resp = self.webservice.named_post( |
1461 | + distro_url, |
1462 | + "deleteOCICredentials") |
1463 | + |
1464 | + self.assertEqual(200, resp.status) |
1465 | + with person_logged_in(self.person): |
1466 | + self.assertIsNone(distro.oci_registry_credentials) |
1467 | diff --git a/lib/lp/registry/tests/test_personmerge.py b/lib/lp/registry/tests/test_personmerge.py |
1468 | index a1c00be..5c75436 100644 |
1469 | --- a/lib/lp/registry/tests/test_personmerge.py |
1470 | +++ b/lib/lp/registry/tests/test_personmerge.py |
1471 | @@ -719,8 +719,8 @@ class TestMergePeople(TestCaseWithFactory, KarmaTestMixin): |
1472 | self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: 'on'})) |
1473 | duplicate = self.factory.makePerson() |
1474 | mergee = self.factory.makePerson() |
1475 | - [ref] = self.factory.makeGitRefs(paths=['refs/heads/v1.0-20.04']) |
1476 | - [ref2] = self.factory.makeGitRefs(paths=['refs/heads/v1.0-20.04']) |
1477 | + [ref] = self.factory.makeGitRefs(paths=[u'refs/heads/v1.0-20.04']) |
1478 | + [ref2] = self.factory.makeGitRefs(paths=[u'refs/heads/v1.0-20.04']) |
1479 | self.factory.makeOCIRecipe( |
1480 | registrant=duplicate, owner=duplicate, name=u'foo', git_ref=ref) |
1481 | self.factory.makeOCIRecipe( |
1482 | diff --git a/lib/lp/services/scripts/base.py b/lib/lp/services/scripts/base.py |
1483 | index bb4490b..66ebdbf 100644 |
1484 | --- a/lib/lp/services/scripts/base.py |
1485 | +++ b/lib/lp/services/scripts/base.py |
1486 | @@ -406,10 +406,6 @@ class LaunchpadCronScript(LaunchpadScript): |
1487 | oops_hdlr = OopsHandler(self.name, logger=self.logger) |
1488 | logging.getLogger().addHandler(oops_hdlr) |
1489 | |
1490 | - def get_last_activity(self): |
1491 | - """Return the last activity, if any.""" |
1492 | - return getUtility(IScriptActivitySet).getLastActivity(self.name) |
1493 | - |
1494 | @log_unhandled_exception_and_exit |
1495 | def record_activity(self, date_started, date_completed): |
1496 | """Record the successful completion of the script.""" |
1497 | diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py |
1498 | index 58dc398..1a108b0 100644 |
1499 | --- a/lib/lp/snappy/model/snap.py |
1500 | +++ b/lib/lp/snappy/model/snap.py |
1501 | @@ -1187,13 +1187,6 @@ class Snap(Storm, WebhookTargetMixin): |
1502 | person.is_team and |
1503 | person.anyone_can_join()) |
1504 | |
1505 | - @property |
1506 | - def subscribers(self): |
1507 | - return Store.of(self).find( |
1508 | - Person, |
1509 | - SnapSubscription.person_id == Person.id, |
1510 | - SnapSubscription.snap == self) |
1511 | - |
1512 | def subscribe(self, person, subscribed_by, ignore_permissions=False): |
1513 | """See `ISnap`.""" |
1514 | if not self.userCanBeSubscribed(person): |
1515 | diff --git a/lib/lp/soyuz/scripts/expire_archive_files.py b/lib/lp/soyuz/scripts/expire_archive_files.py |
1516 | index ade45d5..7ae54e8 100755 |
1517 | --- a/lib/lp/soyuz/scripts/expire_archive_files.py |
1518 | +++ b/lib/lp/soyuz/scripts/expire_archive_files.py |
1519 | @@ -49,6 +49,9 @@ netbook-remix-team |
1520 | netbook-team |
1521 | oem-solutions-group |
1522 | payson |
1523 | +snappy-dev/edge |
1524 | +snappy-dev/image |
1525 | +snappy-dev/tools |
1526 | transyl |
1527 | ubuntu-cloud-archive |
1528 | ubuntu-mobile |
1529 | diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py |
1530 | index 1060fa9..541ef56 100644 |
1531 | --- a/lib/lp/testing/layers.py |
1532 | +++ b/lib/lp/testing/layers.py |
1533 | @@ -1899,6 +1899,24 @@ class CeleryJobLayer(AppServerLayer): |
1534 | cls.celery_worker = None |
1535 | |
1536 | |
1537 | +class CelerySlowJobLayer(AppServerLayer): |
1538 | + """Layer for tests that run jobs via Celery.""" |
1539 | + |
1540 | + celery_worker = None |
1541 | + |
1542 | + @classmethod |
1543 | + @profiled |
1544 | + def setUp(cls): |
1545 | + cls.celery_worker = celery_worker('launchpad_job_slow') |
1546 | + cls.celery_worker.__enter__() |
1547 | + |
1548 | + @classmethod |
1549 | + @profiled |
1550 | + def tearDown(cls): |
1551 | + cls.celery_worker.__exit__(None, None, None) |
1552 | + cls.celery_worker = None |
1553 | + |
1554 | + |
1555 | class CeleryBzrsyncdJobLayer(AppServerLayer): |
1556 | """Layer for tests that run jobs that read from branches via Celery.""" |
1557 | |
1558 | diff --git a/utilities/manage-celery-workers.sh b/utilities/manage-celery-workers.sh |
1559 | new file mode 100755 |
1560 | index 0000000..f83b14f |
1561 | --- /dev/null |
1562 | +++ b/utilities/manage-celery-workers.sh |
1563 | @@ -0,0 +1,58 @@ |
1564 | +#!/bin/sh |
1565 | + |
1566 | +# Used for dev and dogfood, do not use in a production like environment. |
1567 | + |
1568 | +start_worker() { |
1569 | + # Start a worker for a given queue |
1570 | + queue=$1 |
1571 | + echo "Starting worker for $queue" |
1572 | + start-stop-daemon \ |
1573 | + --start --oknodo --quiet --background \ |
1574 | + --pidfile "/var/tmp/celeryd-$queue.pid" --make-pidfile \ |
1575 | + --startas "$PWD/bin/celery" -- worker \ |
1576 | + --queues="$queue"\ |
1577 | + --config=lp.services.job.celeryconfig \ |
1578 | + --hostname="$queue@%n" \ |
1579 | + --loglevel=DEBUG \ |
1580 | + --logfile="/var/tmp/celeryd-$queue.log" |
1581 | + |
1582 | +} |
1583 | + |
1584 | +stop_worker() { |
1585 | + queue=$1 |
1586 | + echo "Stopping worker for $queue" |
1587 | + start-stop-daemon --oknodo --stop --pidfile "/var/tmp/celeryd-$queue.pid" |
1588 | +} |
1589 | + |
1590 | +case "$1" in |
1591 | + start) |
1592 | + for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat |
1593 | + do |
1594 | + start_worker $queue |
1595 | + done |
1596 | + ;; |
1597 | + stop) |
1598 | + for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat |
1599 | + do |
1600 | + stop_worker $queue |
1601 | + done |
1602 | + ;; |
1603 | + |
1604 | + restart|force-reload) |
1605 | + for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat |
1606 | + do |
1607 | + stop_worker $queue |
1608 | + done |
1609 | + sleep 1 |
1610 | + for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat |
1611 | + do |
1612 | + start_worker $queue |
1613 | + done |
1614 | + echo "$NAME." |
1615 | + ;; |
1616 | + *) |
1617 | + N=/etc/init.d/$NAME |
1618 | + echo "Usage: $N {start|stop|restart|force-reload}" >&2 |
1619 | + exit 1 |
1620 | + ;; |
1621 | +esac |