Merge launchpad:master into launchpad:db-devel
- Git
- lp:launchpad
- master
- Merge into db-devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Approved by: | Colin Watson |
Approved revision: | 8d28b7c56f3ac7f0d39ec1c263268ffb0b42c676 |
Merge reported by: | Otto Co-Pilot |
Merged at revision: | not available |
Proposed branch: | launchpad:master |
Merge into: | launchpad:db-devel |
Diff against target: |
1621 lines (+273/-801) 18 files modified
dev/null (+0/-139) lib/lp/archivepublisher/publishing.py (+3/-28) lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py (+11/-206) lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py (+1/-351) lib/lp/archivepublisher/tests/test_publisher.py (+0/-39) lib/lp/oci/model/ocirecipebuildjob.py (+8/-4) lib/lp/oci/tests/test_ocirecipebuildjob.py (+5/-10) lib/lp/registry/browser/distribution.py (+0/-3) lib/lp/registry/interfaces/distribution.py (+42/-1) lib/lp/registry/model/distribution.py (+28/-0) lib/lp/registry/scripts/closeaccount.py (+3/-6) lib/lp/registry/tests/test_distribution.py (+91/-1) lib/lp/registry/tests/test_personmerge.py (+2/-2) lib/lp/services/scripts/base.py (+0/-4) lib/lp/snappy/model/snap.py (+0/-7) lib/lp/soyuz/scripts/expire_archive_files.py (+3/-0) lib/lp/testing/layers.py (+18/-0) utilities/manage-celery-workers.sh (+58/-0) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Colin Watson (community) | Approve | ||
Review via email: mp+402439@code.launchpad.net |
Commit message
Manually merge from master to fix test failure on Python 2
Description of the change
The bug fixed in https:/
To post a comment you must log in.
Revision history for this message
Colin Watson (cjwatson) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/lib/lp/archivepublisher/htaccess.py b/lib/lp/archivepublisher/htaccess.py | |||
2 | 0 | deleted file mode 100644 | 0 | deleted file mode 100644 |
3 | index 613cfde..0000000 | |||
4 | --- a/lib/lp/archivepublisher/htaccess.py | |||
5 | +++ /dev/null | |||
6 | @@ -1,124 +0,0 @@ | |||
7 | 1 | #!/usr/bin/python2 | ||
8 | 2 | # | ||
9 | 3 | # Copyright 2010-2017 Canonical Ltd. This software is licensed under the | ||
10 | 4 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
11 | 5 | |||
12 | 6 | """Writing of htaccess and htpasswd files.""" | ||
13 | 7 | |||
14 | 8 | __metaclass__ = type | ||
15 | 9 | |||
16 | 10 | __all__ = [ | ||
17 | 11 | 'htpasswd_credentials_for_archive', | ||
18 | 12 | 'write_htaccess', | ||
19 | 13 | 'write_htpasswd', | ||
20 | 14 | ] | ||
21 | 15 | |||
22 | 16 | import base64 | ||
23 | 17 | import crypt | ||
24 | 18 | import os | ||
25 | 19 | |||
26 | 20 | from lp.registry.model.person import Person | ||
27 | 21 | from lp.services.database.interfaces import IStore | ||
28 | 22 | from lp.soyuz.model.archiveauthtoken import ArchiveAuthToken | ||
29 | 23 | |||
30 | 24 | |||
31 | 25 | HTACCESS_TEMPLATE = """ | ||
32 | 26 | AuthType Basic | ||
33 | 27 | AuthName "Token Required" | ||
34 | 28 | AuthUserFile %(path)s/.htpasswd | ||
35 | 29 | Require valid-user | ||
36 | 30 | """ | ||
37 | 31 | |||
38 | 32 | BUILDD_USER_NAME = "buildd" | ||
39 | 33 | |||
40 | 34 | |||
41 | 35 | def write_htaccess(htaccess_filename, distroot): | ||
42 | 36 | """Write a htaccess file for a private archive. | ||
43 | 37 | |||
44 | 38 | :param htaccess_filename: Filename of the htaccess file. | ||
45 | 39 | :param distroot: Archive root path | ||
46 | 40 | """ | ||
47 | 41 | interpolations = {"path": distroot} | ||
48 | 42 | file = open(htaccess_filename, "w") | ||
49 | 43 | try: | ||
50 | 44 | file.write(HTACCESS_TEMPLATE % interpolations) | ||
51 | 45 | finally: | ||
52 | 46 | file.close() | ||
53 | 47 | |||
54 | 48 | |||
55 | 49 | def write_htpasswd(filename, users): | ||
56 | 50 | """Write out a new htpasswd file. | ||
57 | 51 | |||
58 | 52 | :param filename: The file to create. | ||
59 | 53 | :param users: Iterable over (user, password, salt) tuples. | ||
60 | 54 | """ | ||
61 | 55 | if os.path.isfile(filename): | ||
62 | 56 | os.remove(filename) | ||
63 | 57 | |||
64 | 58 | file = open(filename, "a") | ||
65 | 59 | try: | ||
66 | 60 | for user, password, salt in users: | ||
67 | 61 | encrypted = crypt.crypt(password, salt) | ||
68 | 62 | file.write("%s:%s\n" % (user, encrypted)) | ||
69 | 63 | finally: | ||
70 | 64 | file.close() | ||
71 | 65 | |||
72 | 66 | |||
73 | 67 | # XXX cjwatson 2017-10-09: This whole mechanism of writing password files to | ||
74 | 68 | # disk (as opposed to e.g. using a WSGI authentication provider that checks | ||
75 | 69 | # passwords against the database) is terrible, but as long as we're using it | ||
76 | 70 | # we should use something like bcrypt rather than DES-based crypt. | ||
77 | 71 | def make_salt(s): | ||
78 | 72 | """Produce a salt from an input string. | ||
79 | 73 | |||
80 | 74 | This ensures that salts are drawn from the correct alphabet | ||
81 | 75 | ([./a-zA-Z0-9]). | ||
82 | 76 | """ | ||
83 | 77 | # As long as the input string is at least one character long, there will | ||
84 | 78 | # be no padding within the first two characters. | ||
85 | 79 | return base64.b64encode( | ||
86 | 80 | (s or " ").encode("UTF-8"), altchars=b"./")[:2].decode("ASCII") | ||
87 | 81 | |||
88 | 82 | |||
89 | 83 | def htpasswd_credentials_for_archive(archive): | ||
90 | 84 | """Return credentials for an archive for use with write_htpasswd. | ||
91 | 85 | |||
92 | 86 | :param archive: An `IArchive` (must be private) | ||
93 | 87 | :return: Iterable of tuples with (user, password, salt) for use with | ||
94 | 88 | write_htpasswd. | ||
95 | 89 | """ | ||
96 | 90 | assert archive.private, "Archive %r must be private" % archive | ||
97 | 91 | |||
98 | 92 | tokens = IStore(ArchiveAuthToken).find( | ||
99 | 93 | (ArchiveAuthToken.person_id, ArchiveAuthToken.name, | ||
100 | 94 | ArchiveAuthToken.token), | ||
101 | 95 | ArchiveAuthToken.archive == archive, | ||
102 | 96 | ArchiveAuthToken.date_deactivated == None) | ||
103 | 97 | # We iterate tokens more than once - materialise it. | ||
104 | 98 | tokens = list(tokens) | ||
105 | 99 | |||
106 | 100 | # Preload map with person ID to person name. | ||
107 | 101 | person_ids = {token[0] for token in tokens} | ||
108 | 102 | names = dict( | ||
109 | 103 | IStore(Person).find( | ||
110 | 104 | (Person.id, Person.name), Person.id.is_in(person_ids))) | ||
111 | 105 | |||
112 | 106 | # Format the user field by combining the token list with the person list | ||
113 | 107 | # (when token has person_id) or prepending a '+' (for named tokens). | ||
114 | 108 | output = [] | ||
115 | 109 | for person_id, token_name, token in tokens: | ||
116 | 110 | if token_name: | ||
117 | 111 | # A named auth token. | ||
118 | 112 | output.append(('+' + token_name, token, make_salt(token_name))) | ||
119 | 113 | else: | ||
120 | 114 | # A subscription auth token. | ||
121 | 115 | output.append( | ||
122 | 116 | (names[person_id], token, make_salt(names[person_id]))) | ||
123 | 117 | |||
124 | 118 | # The first .htpasswd entry is the buildd_secret. | ||
125 | 119 | yield (BUILDD_USER_NAME, archive.buildd_secret, BUILDD_USER_NAME[:2]) | ||
126 | 120 | |||
127 | 121 | # Iterate over tokens and write the appropriate htpasswd entries for them. | ||
128 | 122 | # Sort by name/person ID so the file can be compared later. | ||
129 | 123 | for user, password, salt in sorted(output): | ||
130 | 124 | yield (user, password, salt) | ||
131 | diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py | |||
132 | index 55614f9..b87c7ce 100644 | |||
133 | --- a/lib/lp/archivepublisher/publishing.py | |||
134 | +++ b/lib/lp/archivepublisher/publishing.py | |||
135 | @@ -50,17 +50,14 @@ from lp.archivepublisher import HARDCODED_COMPONENT_ORDER | |||
136 | 50 | from lp.archivepublisher.config import getPubConfig | 50 | from lp.archivepublisher.config import getPubConfig |
137 | 51 | from lp.archivepublisher.diskpool import DiskPool | 51 | from lp.archivepublisher.diskpool import DiskPool |
138 | 52 | from lp.archivepublisher.domination import Dominator | 52 | from lp.archivepublisher.domination import Dominator |
139 | 53 | from lp.archivepublisher.htaccess import ( | ||
140 | 54 | htpasswd_credentials_for_archive, | ||
141 | 55 | write_htaccess, | ||
142 | 56 | write_htpasswd, | ||
143 | 57 | ) | ||
144 | 58 | from lp.archivepublisher.indices import ( | 53 | from lp.archivepublisher.indices import ( |
145 | 59 | build_binary_stanza_fields, | 54 | build_binary_stanza_fields, |
146 | 60 | build_source_stanza_fields, | 55 | build_source_stanza_fields, |
147 | 61 | build_translations_stanza_fields, | 56 | build_translations_stanza_fields, |
148 | 62 | ) | 57 | ) |
150 | 63 | from lp.archivepublisher.interfaces.archivegpgsigningkey import ISignableArchive | 58 | from lp.archivepublisher.interfaces.archivegpgsigningkey import ( |
151 | 59 | ISignableArchive, | ||
152 | 60 | ) | ||
153 | 64 | from lp.archivepublisher.model.ftparchive import FTPArchiveHandler | 61 | from lp.archivepublisher.model.ftparchive import FTPArchiveHandler |
154 | 65 | from lp.archivepublisher.utils import ( | 62 | from lp.archivepublisher.utils import ( |
155 | 66 | get_ppa_reference, | 63 | get_ppa_reference, |
156 | @@ -166,27 +163,6 @@ def _getDiskPool(pubconf, log): | |||
157 | 166 | return dp | 163 | return dp |
158 | 167 | 164 | ||
159 | 168 | 165 | ||
160 | 169 | def _setupHtaccess(archive, pubconf, log): | ||
161 | 170 | """Setup .htaccess/.htpasswd files for an archive. | ||
162 | 171 | """ | ||
163 | 172 | if not archive.private: | ||
164 | 173 | # FIXME: JRV 20101108 leftover .htaccess and .htpasswd files | ||
165 | 174 | # should be removed when support for making existing 3PA's public | ||
166 | 175 | # is added; bug=376072 | ||
167 | 176 | return | ||
168 | 177 | |||
169 | 178 | htaccess_path = os.path.join(pubconf.archiveroot, ".htaccess") | ||
170 | 179 | htpasswd_path = os.path.join(pubconf.archiveroot, ".htpasswd") | ||
171 | 180 | # After the initial htaccess/htpasswd files | ||
172 | 181 | # are created generate_ppa_htaccess is responsible for | ||
173 | 182 | # updating the tokens. | ||
174 | 183 | if not os.path.exists(htaccess_path): | ||
175 | 184 | log.debug("Writing htaccess file.") | ||
176 | 185 | write_htaccess(htaccess_path, pubconf.archiveroot) | ||
177 | 186 | passwords = htpasswd_credentials_for_archive(archive) | ||
178 | 187 | write_htpasswd(htpasswd_path, passwords) | ||
179 | 188 | |||
180 | 189 | |||
181 | 190 | def getPublisher(archive, allowed_suites, log, distsroot=None): | 166 | def getPublisher(archive, allowed_suites, log, distsroot=None): |
182 | 191 | """Return an initialized Publisher instance for the given context. | 167 | """Return an initialized Publisher instance for the given context. |
183 | 192 | 168 | ||
184 | @@ -472,7 +448,6 @@ class Publisher(object): | |||
185 | 472 | def setupArchiveDirs(self): | 448 | def setupArchiveDirs(self): |
186 | 473 | self.log.debug("Setting up archive directories.") | 449 | self.log.debug("Setting up archive directories.") |
187 | 474 | self._config.setupArchiveDirs() | 450 | self._config.setupArchiveDirs() |
188 | 475 | _setupHtaccess(self.archive, self._config, self.log) | ||
189 | 476 | 451 | ||
190 | 477 | def isDirty(self, distroseries, pocket): | 452 | def isDirty(self, distroseries, pocket): |
191 | 478 | """True if a publication has happened in this release and pocket.""" | 453 | """True if a publication has happened in this release and pocket.""" |
192 | diff --git a/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py b/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py | |||
193 | index a272540..26e8db8 100644 | |||
194 | --- a/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py | |||
195 | +++ b/lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py | |||
196 | @@ -3,22 +3,10 @@ | |||
197 | 3 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the | 3 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the |
198 | 4 | # GNU Affero General Public License version 3 (see the file LICENSE). | 4 | # GNU Affero General Public License version 3 (see the file LICENSE). |
199 | 5 | 5 | ||
207 | 6 | from datetime import ( | 6 | from datetime import datetime |
201 | 7 | datetime, | ||
202 | 8 | timedelta, | ||
203 | 9 | ) | ||
204 | 10 | import filecmp | ||
205 | 11 | import os | ||
206 | 12 | import tempfile | ||
208 | 13 | 7 | ||
209 | 14 | import pytz | 8 | import pytz |
210 | 15 | 9 | ||
211 | 16 | from lp.archivepublisher.config import getPubConfig | ||
212 | 17 | from lp.archivepublisher.htaccess import ( | ||
213 | 18 | htpasswd_credentials_for_archive, | ||
214 | 19 | write_htaccess, | ||
215 | 20 | write_htpasswd, | ||
216 | 21 | ) | ||
217 | 22 | from lp.registry.model.teammembership import TeamParticipation | 10 | from lp.registry.model.teammembership import TeamParticipation |
218 | 23 | from lp.services.config import config | 11 | from lp.services.config import config |
219 | 24 | from lp.services.database.interfaces import IStore | 12 | from lp.services.database.interfaces import IStore |
220 | @@ -30,23 +18,19 @@ from lp.services.mail.sendmail import ( | |||
221 | 30 | ) | 18 | ) |
222 | 31 | from lp.services.scripts.base import LaunchpadCronScript | 19 | from lp.services.scripts.base import LaunchpadCronScript |
223 | 32 | from lp.services.webapp import canonical_url | 20 | from lp.services.webapp import canonical_url |
229 | 33 | from lp.soyuz.enums import ( | 21 | from lp.soyuz.enums import ArchiveSubscriberStatus |
225 | 34 | ArchiveStatus, | ||
226 | 35 | ArchiveSubscriberStatus, | ||
227 | 36 | ) | ||
228 | 37 | from lp.soyuz.model.archive import Archive | ||
230 | 38 | from lp.soyuz.model.archiveauthtoken import ArchiveAuthToken | 22 | from lp.soyuz.model.archiveauthtoken import ArchiveAuthToken |
231 | 39 | from lp.soyuz.model.archivesubscriber import ArchiveSubscriber | 23 | from lp.soyuz.model.archivesubscriber import ArchiveSubscriber |
232 | 40 | 24 | ||
233 | 41 | # These PPAs should never have their htaccess/pwd files touched. | ||
234 | 42 | BLACKLISTED_PPAS = { | ||
235 | 43 | 'ubuntuone': ['ppa'], | ||
236 | 44 | } | ||
237 | 45 | |||
238 | 46 | 25 | ||
239 | 47 | class HtaccessTokenGenerator(LaunchpadCronScript): | 26 | class HtaccessTokenGenerator(LaunchpadCronScript): |
242 | 48 | """Helper class for generating .htaccess files for private PPAs.""" | 27 | """Expire archive subscriptions and deactivate invalid tokens.""" |
243 | 49 | blacklist = BLACKLISTED_PPAS | 28 | |
244 | 29 | # XXX cjwatson 2021-04-21: This script and class are now misnamed, as we | ||
245 | 30 | # no longer generate .htaccess or .htpasswd files, but instead check | ||
246 | 31 | # archive authentication dynamically. We can remove this script once we | ||
247 | 32 | # stop running it on production and move its remaining functions | ||
248 | 33 | # elsewhere (probably garbo). | ||
249 | 50 | 34 | ||
250 | 51 | def add_my_options(self): | 35 | def add_my_options(self): |
251 | 52 | """Add script command line options.""" | 36 | """Add script command line options.""" |
252 | @@ -60,68 +44,6 @@ class HtaccessTokenGenerator(LaunchpadCronScript): | |||
253 | 60 | dest="no_deactivation", default=False, | 44 | dest="no_deactivation", default=False, |
254 | 61 | help="If set, tokens are not deactivated.") | 45 | help="If set, tokens are not deactivated.") |
255 | 62 | 46 | ||
256 | 63 | def ensureHtaccess(self, ppa): | ||
257 | 64 | """Generate a .htaccess for `ppa`.""" | ||
258 | 65 | if self.options.dryrun: | ||
259 | 66 | return | ||
260 | 67 | |||
261 | 68 | # The publisher Config object does not have an | ||
262 | 69 | # interface, so we need to remove the security wrapper. | ||
263 | 70 | pub_config = getPubConfig(ppa) | ||
264 | 71 | htaccess_filename = os.path.join(pub_config.archiveroot, ".htaccess") | ||
265 | 72 | if not os.path.exists(htaccess_filename): | ||
266 | 73 | # It's not there, so create it. | ||
267 | 74 | if not os.path.exists(pub_config.archiveroot): | ||
268 | 75 | os.makedirs(pub_config.archiveroot) | ||
269 | 76 | write_htaccess(htaccess_filename, pub_config.archiveroot) | ||
270 | 77 | self.logger.debug("Created .htaccess for %s" % ppa.displayname) | ||
271 | 78 | |||
272 | 79 | def generateHtpasswd(self, ppa): | ||
273 | 80 | """Generate a htpasswd file for `ppa`s `tokens`. | ||
274 | 81 | |||
275 | 82 | :param ppa: The context PPA (an `IArchive`). | ||
276 | 83 | :return: The filename of the htpasswd file that was generated. | ||
277 | 84 | """ | ||
278 | 85 | # Create a temporary file that will be a new .htpasswd. | ||
279 | 86 | pub_config = getPubConfig(ppa) | ||
280 | 87 | if not os.path.exists(pub_config.temproot): | ||
281 | 88 | os.makedirs(pub_config.temproot) | ||
282 | 89 | fd, temp_filename = tempfile.mkstemp(dir=pub_config.temproot) | ||
283 | 90 | os.close(fd) | ||
284 | 91 | |||
285 | 92 | write_htpasswd(temp_filename, htpasswd_credentials_for_archive(ppa)) | ||
286 | 93 | |||
287 | 94 | return temp_filename | ||
288 | 95 | |||
289 | 96 | def replaceUpdatedHtpasswd(self, ppa, temp_htpasswd_file): | ||
290 | 97 | """Compare the new and the old htpasswd and replace if changed. | ||
291 | 98 | |||
292 | 99 | :return: True if the file was replaced. | ||
293 | 100 | """ | ||
294 | 101 | try: | ||
295 | 102 | if self.options.dryrun: | ||
296 | 103 | return False | ||
297 | 104 | |||
298 | 105 | # The publisher Config object does not have an | ||
299 | 106 | # interface, so we need to remove the security wrapper. | ||
300 | 107 | pub_config = getPubConfig(ppa) | ||
301 | 108 | if not os.path.exists(pub_config.archiveroot): | ||
302 | 109 | os.makedirs(pub_config.archiveroot) | ||
303 | 110 | htpasswd_filename = os.path.join( | ||
304 | 111 | pub_config.archiveroot, ".htpasswd") | ||
305 | 112 | |||
306 | 113 | if (not os.path.isfile(htpasswd_filename) or | ||
307 | 114 | not filecmp.cmp(htpasswd_filename, temp_htpasswd_file)): | ||
308 | 115 | # Atomically replace the old file or create a new file. | ||
309 | 116 | os.rename(temp_htpasswd_file, htpasswd_filename) | ||
310 | 117 | self.logger.debug("Replaced htpasswd for %s" % ppa.displayname) | ||
311 | 118 | return True | ||
312 | 119 | |||
313 | 120 | return False | ||
314 | 121 | finally: | ||
315 | 122 | if os.path.exists(temp_htpasswd_file): | ||
316 | 123 | os.unlink(temp_htpasswd_file) | ||
317 | 124 | |||
318 | 125 | def sendCancellationEmail(self, token): | 47 | def sendCancellationEmail(self, token): |
319 | 126 | """Send an email to the person whose subscription was cancelled.""" | 48 | """Send an email to the person whose subscription was cancelled.""" |
320 | 127 | if token.archive.suppress_subscription_notifications: | 49 | if token.archive.suppress_subscription_notifications: |
321 | @@ -220,8 +142,7 @@ class HtaccessTokenGenerator(LaunchpadCronScript): | |||
322 | 220 | :param send_email: Whether to send a cancellation email to the owner | 142 | :param send_email: Whether to send a cancellation email to the owner |
323 | 221 | of the token. This defaults to False to speed up the test | 143 | of the token. This defaults to False to speed up the test |
324 | 222 | suite. | 144 | suite. |
327 | 223 | :return: the set of ppas affected by token deactivations so that we | 145 | :return: the set of ppas affected by token deactivations. |
326 | 224 | can later update their htpasswd files. | ||
328 | 225 | """ | 146 | """ |
329 | 226 | invalid_tokens = self._getInvalidTokens() | 147 | invalid_tokens = self._getInvalidTokens() |
330 | 227 | return self.deactivateTokens(invalid_tokens, send_email=send_email) | 148 | return self.deactivateTokens(invalid_tokens, send_email=send_email) |
331 | @@ -249,129 +170,13 @@ class HtaccessTokenGenerator(LaunchpadCronScript): | |||
332 | 249 | self.logger.info( | 170 | self.logger.info( |
333 | 250 | "Expired subscriptions: %s" % ", ".join(subscription_names)) | 171 | "Expired subscriptions: %s" % ", ".join(subscription_names)) |
334 | 251 | 172 | ||
335 | 252 | def getTimeToSyncFrom(self): | ||
336 | 253 | """Return the time we'll synchronize from. | ||
337 | 254 | |||
338 | 255 | Any new PPAs or tokens created since this time will be used to | ||
339 | 256 | generate passwords. | ||
340 | 257 | """ | ||
341 | 258 | # NTP is running on our servers and therefore we can assume | ||
342 | 259 | # only minimal skew, we include a fudge-factor of 1s so that | ||
343 | 260 | # even the minimal skew cannot demonstrate bug 627608. | ||
344 | 261 | last_activity = self.get_last_activity() | ||
345 | 262 | if not last_activity: | ||
346 | 263 | return | ||
347 | 264 | return last_activity.date_started - timedelta(seconds=1) | ||
348 | 265 | |||
349 | 266 | def getNewTokens(self, since=None): | ||
350 | 267 | """Return result set of new tokens created since the given time.""" | ||
351 | 268 | store = IStore(ArchiveAuthToken) | ||
352 | 269 | extra_expr = [] | ||
353 | 270 | if since: | ||
354 | 271 | extra_expr = [ArchiveAuthToken.date_created >= since] | ||
355 | 272 | new_ppa_tokens = store.find( | ||
356 | 273 | ArchiveAuthToken, | ||
357 | 274 | ArchiveAuthToken.date_deactivated == None, | ||
358 | 275 | *extra_expr) | ||
359 | 276 | return new_ppa_tokens | ||
360 | 277 | |||
361 | 278 | def getDeactivatedNamedTokens(self, since=None): | ||
362 | 279 | """Return result set of named tokens deactivated since given time.""" | ||
363 | 280 | now = datetime.now(pytz.UTC) | ||
364 | 281 | |||
365 | 282 | store = IStore(ArchiveAuthToken) | ||
366 | 283 | extra_expr = [] | ||
367 | 284 | if since: | ||
368 | 285 | extra_expr = [ArchiveAuthToken.date_deactivated >= since] | ||
369 | 286 | tokens = store.find( | ||
370 | 287 | ArchiveAuthToken, | ||
371 | 288 | ArchiveAuthToken.name != None, | ||
372 | 289 | ArchiveAuthToken.date_deactivated != None, | ||
373 | 290 | ArchiveAuthToken.date_deactivated <= now, | ||
374 | 291 | *extra_expr) | ||
375 | 292 | return tokens | ||
376 | 293 | |||
377 | 294 | def getNewPrivatePPAs(self, since=None): | ||
378 | 295 | """Return the recently created private PPAs.""" | ||
379 | 296 | store = IStore(Archive) | ||
380 | 297 | extra_expr = [] | ||
381 | 298 | if since: | ||
382 | 299 | extra_expr = [Archive.date_created >= since] | ||
383 | 300 | return store.find( | ||
384 | 301 | Archive, Archive._private == True, *extra_expr) | ||
385 | 302 | |||
386 | 303 | def main(self): | 173 | def main(self): |
387 | 304 | """Script entry point.""" | 174 | """Script entry point.""" |
388 | 305 | self.logger.info('Starting the PPA .htaccess generation') | 175 | self.logger.info('Starting the PPA .htaccess generation') |
389 | 306 | self.expireSubscriptions() | 176 | self.expireSubscriptions() |
390 | 307 | affected_ppas = self.deactivateInvalidTokens(send_email=True) | 177 | affected_ppas = self.deactivateInvalidTokens(send_email=True) |
391 | 308 | current_ppa_count = len(affected_ppas) | ||
392 | 309 | self.logger.debug( | ||
393 | 310 | '%s PPAs with deactivated tokens' % current_ppa_count) | ||
394 | 311 | |||
395 | 312 | last_success = self.getTimeToSyncFrom() | ||
396 | 313 | |||
397 | 314 | # Include ppas with named tokens deactivated since last time we ran. | ||
398 | 315 | num_tokens = 0 | ||
399 | 316 | for token in self.getDeactivatedNamedTokens(since=last_success): | ||
400 | 317 | affected_ppas.add(token.archive) | ||
401 | 318 | num_tokens += 1 | ||
402 | 319 | |||
403 | 320 | new_ppa_count = len(affected_ppas) | ||
404 | 321 | self.logger.debug( | ||
405 | 322 | "%s deactivated named tokens since last run, %s PPAs affected" | ||
406 | 323 | % (num_tokens, new_ppa_count - current_ppa_count)) | ||
407 | 324 | current_ppa_count = new_ppa_count | ||
408 | 325 | |||
409 | 326 | # In addition to the ppas that are affected by deactivated | ||
410 | 327 | # tokens, we also want to include any ppas that have tokens | ||
411 | 328 | # created since the last time we ran. | ||
412 | 329 | num_tokens = 0 | ||
413 | 330 | for token in self.getNewTokens(since=last_success): | ||
414 | 331 | affected_ppas.add(token.archive) | ||
415 | 332 | num_tokens += 1 | ||
416 | 333 | |||
417 | 334 | new_ppa_count = len(affected_ppas) | ||
418 | 335 | self.logger.debug( | ||
419 | 336 | "%s new tokens since last run, %s PPAs affected" | ||
420 | 337 | % (num_tokens, new_ppa_count - current_ppa_count)) | ||
421 | 338 | current_ppa_count = new_ppa_count | ||
422 | 339 | |||
423 | 340 | affected_ppas.update(self.getNewPrivatePPAs(since=last_success)) | ||
424 | 341 | new_ppa_count = len(affected_ppas) | ||
425 | 342 | self.logger.debug( | 178 | self.logger.debug( |
458 | 343 | "%s new private PPAs since last run" | 179 | '%s PPAs with deactivated tokens' % len(affected_ppas)) |
427 | 344 | % (new_ppa_count - current_ppa_count)) | ||
428 | 345 | |||
429 | 346 | self.logger.debug('%s PPAs require updating' % new_ppa_count) | ||
430 | 347 | for ppa in affected_ppas: | ||
431 | 348 | # If this PPA is blacklisted, do not touch its htaccess/pwd | ||
432 | 349 | # files. | ||
433 | 350 | blacklisted_ppa_names_for_owner = self.blacklist.get( | ||
434 | 351 | ppa.owner.name, []) | ||
435 | 352 | if ppa.name in blacklisted_ppa_names_for_owner: | ||
436 | 353 | self.logger.info( | ||
437 | 354 | "Skipping htaccess updates for blacklisted PPA " | ||
438 | 355 | " '%s' owned by %s.", | ||
439 | 356 | ppa.name, | ||
440 | 357 | ppa.owner.displayname) | ||
441 | 358 | continue | ||
442 | 359 | elif ppa.status == ArchiveStatus.DELETED or ppa.enabled is False: | ||
443 | 360 | self.logger.info( | ||
444 | 361 | "Skipping htaccess updates for deleted or disabled PPA " | ||
445 | 362 | " '%s' owned by %s.", | ||
446 | 363 | ppa.name, | ||
447 | 364 | ppa.owner.displayname) | ||
448 | 365 | continue | ||
449 | 366 | |||
450 | 367 | self.ensureHtaccess(ppa) | ||
451 | 368 | htpasswd_write_start = datetime.now() | ||
452 | 369 | temp_htpasswd = self.generateHtpasswd(ppa) | ||
453 | 370 | self.replaceUpdatedHtpasswd(ppa, temp_htpasswd) | ||
454 | 371 | htpasswd_write_duration = datetime.now() - htpasswd_write_start | ||
455 | 372 | self.logger.debug( | ||
456 | 373 | "Wrote htpasswd for '%s': %ss" | ||
457 | 374 | % (ppa.name, htpasswd_write_duration.total_seconds())) | ||
459 | 375 | 180 | ||
460 | 376 | if self.options.no_deactivation or self.options.dryrun: | 181 | if self.options.no_deactivation or self.options.dryrun: |
461 | 377 | self.logger.info('Dry run, so not committing transaction.') | 182 | self.logger.info('Dry run, so not committing transaction.') |
462 | diff --git a/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py b/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py | |||
463 | index f11dba1..472b7bf 100644 | |||
464 | --- a/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py | |||
465 | +++ b/lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py | |||
466 | @@ -5,7 +5,6 @@ | |||
467 | 5 | 5 | ||
468 | 6 | from __future__ import absolute_import, print_function, unicode_literals | 6 | from __future__ import absolute_import, print_function, unicode_literals |
469 | 7 | 7 | ||
470 | 8 | import crypt | ||
471 | 9 | from datetime import ( | 8 | from datetime import ( |
472 | 10 | datetime, | 9 | datetime, |
473 | 11 | timedelta, | 10 | timedelta, |
474 | @@ -13,20 +12,10 @@ from datetime import ( | |||
475 | 13 | import os | 12 | import os |
476 | 14 | import subprocess | 13 | import subprocess |
477 | 15 | import sys | 14 | import sys |
478 | 16 | import tempfile | ||
479 | 17 | 15 | ||
480 | 18 | import pytz | 16 | import pytz |
481 | 19 | from testtools.matchers import ( | ||
482 | 20 | AllMatch, | ||
483 | 21 | FileContains, | ||
484 | 22 | FileExists, | ||
485 | 23 | Not, | ||
486 | 24 | ) | ||
487 | 25 | import transaction | ||
488 | 26 | from zope.component import getUtility | 17 | from zope.component import getUtility |
489 | 27 | from zope.security.proxy import removeSecurityProxy | ||
490 | 28 | 18 | ||
491 | 29 | from lp.archivepublisher.config import getPubConfig | ||
492 | 30 | from lp.archivepublisher.scripts.generate_ppa_htaccess import ( | 19 | from lp.archivepublisher.scripts.generate_ppa_htaccess import ( |
493 | 31 | HtaccessTokenGenerator, | 20 | HtaccessTokenGenerator, |
494 | 32 | ) | 21 | ) |
495 | @@ -36,16 +25,7 @@ from lp.registry.interfaces.teammembership import TeamMembershipStatus | |||
496 | 36 | from lp.services.config import config | 25 | from lp.services.config import config |
497 | 37 | from lp.services.features.testing import FeatureFixture | 26 | from lp.services.features.testing import FeatureFixture |
498 | 38 | from lp.services.log.logger import BufferLogger | 27 | from lp.services.log.logger import BufferLogger |
509 | 39 | from lp.services.osutils import ( | 28 | from lp.soyuz.enums import ArchiveSubscriberStatus |
500 | 40 | ensure_directory_exists, | ||
501 | 41 | remove_if_exists, | ||
502 | 42 | write_file, | ||
503 | 43 | ) | ||
504 | 44 | from lp.services.scripts.interfaces.scriptactivity import IScriptActivitySet | ||
505 | 45 | from lp.soyuz.enums import ( | ||
506 | 46 | ArchiveStatus, | ||
507 | 47 | ArchiveSubscriberStatus, | ||
508 | 48 | ) | ||
510 | 49 | from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG | 29 | from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG |
511 | 50 | from lp.testing import TestCaseWithFactory | 30 | from lp.testing import TestCaseWithFactory |
512 | 51 | from lp.testing.dbuser import ( | 31 | from lp.testing.dbuser import ( |
513 | @@ -102,102 +82,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): | |||
514 | 102 | stdout, stderr = process.communicate() | 82 | stdout, stderr = process.communicate() |
515 | 103 | return process.returncode, stdout, stderr | 83 | return process.returncode, stdout, stderr |
516 | 104 | 84 | ||
517 | 105 | def testEnsureHtaccess(self): | ||
518 | 106 | """Ensure that the .htaccess file is generated correctly.""" | ||
519 | 107 | # The publisher Config object does not have an interface, so we | ||
520 | 108 | # need to remove the security wrapper. | ||
521 | 109 | pub_config = getPubConfig(self.ppa) | ||
522 | 110 | |||
523 | 111 | filename = os.path.join(pub_config.archiveroot, ".htaccess") | ||
524 | 112 | remove_if_exists(filename) | ||
525 | 113 | script = self.getScript() | ||
526 | 114 | script.ensureHtaccess(self.ppa) | ||
527 | 115 | self.addCleanup(remove_if_exists, filename) | ||
528 | 116 | |||
529 | 117 | contents = [ | ||
530 | 118 | "", | ||
531 | 119 | "AuthType Basic", | ||
532 | 120 | "AuthName \"Token Required\"", | ||
533 | 121 | "AuthUserFile %s/.htpasswd" % pub_config.archiveroot, | ||
534 | 122 | "Require valid-user", | ||
535 | 123 | "", | ||
536 | 124 | ] | ||
537 | 125 | self.assertThat(filename, FileContains('\n'.join(contents))) | ||
538 | 126 | |||
539 | 127 | def testGenerateHtpasswd(self): | ||
540 | 128 | """Given some `ArchiveAuthToken`s, test generating htpasswd.""" | ||
541 | 129 | # Make some subscriptions and tokens. | ||
542 | 130 | tokens = [] | ||
543 | 131 | for name in ['name12', 'name16']: | ||
544 | 132 | person = getUtility(IPersonSet).getByName(name) | ||
545 | 133 | self.ppa.newSubscription(person, self.ppa.owner) | ||
546 | 134 | tokens.append(self.ppa.newAuthToken(person)) | ||
547 | 135 | token_usernames = [token.person.name for token in tokens] | ||
548 | 136 | |||
549 | 137 | # Generate the passwd file. | ||
550 | 138 | script = self.getScript() | ||
551 | 139 | filename = script.generateHtpasswd(self.ppa) | ||
552 | 140 | self.addCleanup(remove_if_exists, filename) | ||
553 | 141 | |||
554 | 142 | # It should be a temp file on the same filesystem as the target | ||
555 | 143 | # file, so os.rename() won't explode. temproot is relied on | ||
556 | 144 | # elsewhere for this same purpose, so it should be safe. | ||
557 | 145 | pub_config = getPubConfig(self.ppa) | ||
558 | 146 | self.assertEqual(pub_config.temproot, os.path.dirname(filename)) | ||
559 | 147 | |||
560 | 148 | # Read it back in. | ||
561 | 149 | file_contents = [ | ||
562 | 150 | line.strip().split(':', 1) for line in open(filename, 'r')] | ||
563 | 151 | |||
564 | 152 | # First entry is buildd secret, rest are from tokens. | ||
565 | 153 | usernames = list(list(zip(*file_contents))[0]) | ||
566 | 154 | self.assertEqual(['buildd'] + token_usernames, usernames) | ||
567 | 155 | |||
568 | 156 | # We can re-encrypt the buildd_secret and it should match the | ||
569 | 157 | # one in the .htpasswd file. | ||
570 | 158 | password = file_contents[0][1] | ||
571 | 159 | encrypted_secret = crypt.crypt(self.ppa.buildd_secret, password) | ||
572 | 160 | self.assertEqual(encrypted_secret, password) | ||
573 | 161 | |||
574 | 162 | def testReplaceUpdatedHtpasswd(self): | ||
575 | 163 | """Test that the htpasswd file is only replaced if it changes.""" | ||
576 | 164 | FILE_CONTENT = b"Kneel before Zod!" | ||
577 | 165 | # The publisher Config object does not have an interface, so we | ||
578 | 166 | # need to remove the security wrapper. | ||
579 | 167 | pub_config = getPubConfig(self.ppa) | ||
580 | 168 | filename = os.path.join(pub_config.archiveroot, ".htpasswd") | ||
581 | 169 | |||
582 | 170 | # Write out a dummy .htpasswd | ||
583 | 171 | ensure_directory_exists(pub_config.archiveroot) | ||
584 | 172 | write_file(filename, FILE_CONTENT) | ||
585 | 173 | |||
586 | 174 | # Write the same contents in a temp file. | ||
587 | 175 | def write_tempfile(): | ||
588 | 176 | fd, temp_filename = tempfile.mkstemp(dir=pub_config.archiveroot) | ||
589 | 177 | file = os.fdopen(fd, "wb") | ||
590 | 178 | file.write(FILE_CONTENT) | ||
591 | 179 | file.close() | ||
592 | 180 | return temp_filename | ||
593 | 181 | |||
594 | 182 | # Replacement should not happen. | ||
595 | 183 | temp_filename = write_tempfile() | ||
596 | 184 | script = self.getScript() | ||
597 | 185 | self.assertTrue(os.path.exists(temp_filename)) | ||
598 | 186 | self.assertFalse( | ||
599 | 187 | script.replaceUpdatedHtpasswd(self.ppa, temp_filename)) | ||
600 | 188 | self.assertFalse(os.path.exists(temp_filename)) | ||
601 | 189 | |||
602 | 190 | # Writing a different .htpasswd should see it get replaced. | ||
603 | 191 | write_file(filename, b"Come to me, son of Jor-El!") | ||
604 | 192 | |||
605 | 193 | temp_filename = write_tempfile() | ||
606 | 194 | self.assertTrue(os.path.exists(temp_filename)) | ||
607 | 195 | self.assertTrue( | ||
608 | 196 | script.replaceUpdatedHtpasswd(self.ppa, temp_filename)) | ||
609 | 197 | self.assertFalse(os.path.exists(temp_filename)) | ||
610 | 198 | |||
611 | 199 | os.remove(filename) | ||
612 | 200 | |||
613 | 201 | def assertDeactivated(self, token): | 85 | def assertDeactivated(self, token): |
614 | 202 | """Helper function to test token deactivation state.""" | 86 | """Helper function to test token deactivation state.""" |
615 | 203 | return self.assertNotEqual(token.date_deactivated, None) | 87 | return self.assertNotEqual(token.date_deactivated, None) |
616 | @@ -341,15 +225,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): | |||
617 | 341 | self.layer.txn.commit() | 225 | self.layer.txn.commit() |
618 | 342 | return (sub1, sub2), (token1, token2, token3) | 226 | return (sub1, sub2), (token1, token2, token3) |
619 | 343 | 227 | ||
620 | 344 | def ensureNoFiles(self): | ||
621 | 345 | """Ensure the .ht* files don't already exist.""" | ||
622 | 346 | pub_config = getPubConfig(self.ppa) | ||
623 | 347 | htaccess = os.path.join(pub_config.archiveroot, ".htaccess") | ||
624 | 348 | htpasswd = os.path.join(pub_config.archiveroot, ".htpasswd") | ||
625 | 349 | remove_if_exists(htaccess) | ||
626 | 350 | remove_if_exists(htpasswd) | ||
627 | 351 | return htaccess, htpasswd | ||
628 | 352 | |||
629 | 353 | def testSubscriptionExpiry(self): | 228 | def testSubscriptionExpiry(self): |
630 | 354 | """Ensure subscriptions' statuses are set to EXPIRED properly.""" | 229 | """Ensure subscriptions' statuses are set to EXPIRED properly.""" |
631 | 355 | subs, tokens = self.setupDummyTokens() | 230 | subs, tokens = self.setupDummyTokens() |
632 | @@ -369,51 +244,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): | |||
633 | 369 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.EXPIRED) | 244 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.EXPIRED) |
634 | 370 | self.assertEqual(subs[1].status, ArchiveSubscriberStatus.CURRENT) | 245 | self.assertEqual(subs[1].status, ArchiveSubscriberStatus.CURRENT) |
635 | 371 | 246 | ||
636 | 372 | def testBasicOperation(self): | ||
637 | 373 | """Invoke the actual script and make sure it generates some files.""" | ||
638 | 374 | self.setupDummyTokens() | ||
639 | 375 | htaccess, htpasswd = self.ensureNoFiles() | ||
640 | 376 | |||
641 | 377 | # Call the script and check that we have a .htaccess and a | ||
642 | 378 | # .htpasswd. | ||
643 | 379 | return_code, stdout, stderr = self.runScript() | ||
644 | 380 | self.assertEqual( | ||
645 | 381 | return_code, 0, "Got a bad return code of %s\nOutput:\n%s" % | ||
646 | 382 | (return_code, stderr)) | ||
647 | 383 | self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) | ||
648 | 384 | os.remove(htaccess) | ||
649 | 385 | os.remove(htpasswd) | ||
650 | 386 | |||
651 | 387 | def testBasicOperation_with_named_tokens(self): | ||
652 | 388 | """Invoke the actual script and make sure it generates some files.""" | ||
653 | 389 | token1 = self.ppa.newNamedAuthToken("tokenname1") | ||
654 | 390 | token2 = self.ppa.newNamedAuthToken("tokenname2") | ||
655 | 391 | token3 = self.ppa.newNamedAuthToken("tokenname3") | ||
656 | 392 | token3.deactivate() | ||
657 | 393 | |||
658 | 394 | # Call the script and check that we have a .htaccess and a .htpasswd. | ||
659 | 395 | htaccess, htpasswd = self.ensureNoFiles() | ||
660 | 396 | script = self.getScript() | ||
661 | 397 | script.main() | ||
662 | 398 | self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) | ||
663 | 399 | with open(htpasswd) as htpasswd_file: | ||
664 | 400 | contents = htpasswd_file.read() | ||
665 | 401 | self.assertIn('+' + token1.name, contents) | ||
666 | 402 | self.assertIn('+' + token2.name, contents) | ||
667 | 403 | self.assertNotIn('+' + token3.name, contents) | ||
668 | 404 | |||
669 | 405 | # Deactivate a named token and verify it is removed from .htpasswd. | ||
670 | 406 | token2.deactivate() | ||
671 | 407 | script.main() | ||
672 | 408 | self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) | ||
673 | 409 | with open(htpasswd) as htpasswd_file: | ||
674 | 410 | contents = htpasswd_file.read() | ||
675 | 411 | self.assertIn('+' + token1.name, contents) | ||
676 | 412 | self.assertNotIn('+' + token2.name, contents) | ||
677 | 413 | self.assertNotIn('+' + token3.name, contents) | ||
678 | 414 | os.remove(htaccess) | ||
679 | 415 | os.remove(htpasswd) | ||
680 | 416 | |||
681 | 417 | def _setupOptionsData(self): | 247 | def _setupOptionsData(self): |
682 | 418 | """Setup test data for option testing.""" | 248 | """Setup test data for option testing.""" |
683 | 419 | subs, tokens = self.setupDummyTokens() | 249 | subs, tokens = self.setupDummyTokens() |
684 | @@ -427,13 +257,9 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): | |||
685 | 427 | """Test that the dryrun and no-deactivation option works.""" | 257 | """Test that the dryrun and no-deactivation option works.""" |
686 | 428 | subs, tokens = self._setupOptionsData() | 258 | subs, tokens = self._setupOptionsData() |
687 | 429 | 259 | ||
688 | 430 | htaccess, htpasswd = self.ensureNoFiles() | ||
689 | 431 | script = self.getScript(test_args=["--dry-run"]) | 260 | script = self.getScript(test_args=["--dry-run"]) |
690 | 432 | script.main() | 261 | script.main() |
691 | 433 | 262 | ||
692 | 434 | # Assert no files were written. | ||
693 | 435 | self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) | ||
694 | 436 | |||
695 | 437 | # Assert that the cancelled subscription did not cause the token | 263 | # Assert that the cancelled subscription did not cause the token |
696 | 438 | # to get deactivated. | 264 | # to get deactivated. |
697 | 439 | self.assertNotDeactivated(tokens[0]) | 265 | self.assertNotDeactivated(tokens[0]) |
698 | @@ -448,65 +274,6 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): | |||
699 | 448 | script.main() | 274 | script.main() |
700 | 449 | self.assertDeactivated(tokens[0]) | 275 | self.assertDeactivated(tokens[0]) |
701 | 450 | 276 | ||
702 | 451 | def testBlacklistingPPAs(self): | ||
703 | 452 | """Test that the htaccess for blacklisted PPAs are not touched.""" | ||
704 | 453 | subs, tokens = self.setupDummyTokens() | ||
705 | 454 | htaccess, htpasswd = self.ensureNoFiles() | ||
706 | 455 | |||
707 | 456 | # Setup the first subscription so that it is due to be expired. | ||
708 | 457 | now = datetime.now(pytz.UTC) | ||
709 | 458 | subs[0].date_expires = now - timedelta(minutes=3) | ||
710 | 459 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT) | ||
711 | 460 | |||
712 | 461 | script = self.getScript() | ||
713 | 462 | script.blacklist = {'joe': ['my_other_ppa', 'myppa', 'and_another']} | ||
714 | 463 | script.main() | ||
715 | 464 | |||
716 | 465 | # The tokens will still be deactivated, and subscriptions expired. | ||
717 | 466 | self.assertDeactivated(tokens[0]) | ||
718 | 467 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.EXPIRED) | ||
719 | 468 | # But the htaccess is not touched. | ||
720 | 469 | self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) | ||
721 | 470 | |||
722 | 471 | def testSkippingOfDisabledPPAs(self): | ||
723 | 472 | """Test that the htaccess for disabled PPAs are not touched.""" | ||
724 | 473 | subs, tokens = self.setupDummyTokens() | ||
725 | 474 | htaccess, htpasswd = self.ensureNoFiles() | ||
726 | 475 | |||
727 | 476 | # Setup subscription so that htaccess/htpasswd is pending generation. | ||
728 | 477 | now = datetime.now(pytz.UTC) | ||
729 | 478 | subs[0].date_expires = now + timedelta(minutes=3) | ||
730 | 479 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT) | ||
731 | 480 | |||
732 | 481 | # Set the PPA as disabled. | ||
733 | 482 | self.ppa.disable() | ||
734 | 483 | self.assertFalse(self.ppa.enabled) | ||
735 | 484 | |||
736 | 485 | script = self.getScript() | ||
737 | 486 | script.main() | ||
738 | 487 | |||
739 | 488 | # The htaccess and htpasswd files should not be generated. | ||
740 | 489 | self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) | ||
741 | 490 | |||
742 | 491 | def testSkippingOfDeletedPPAs(self): | ||
743 | 492 | """Test that the htaccess for deleted PPAs are not touched.""" | ||
744 | 493 | subs, tokens = self.setupDummyTokens() | ||
745 | 494 | htaccess, htpasswd = self.ensureNoFiles() | ||
746 | 495 | |||
747 | 496 | # Setup subscription so that htaccess/htpasswd is pending generation. | ||
748 | 497 | now = datetime.now(pytz.UTC) | ||
749 | 498 | subs[0].date_expires = now + timedelta(minutes=3) | ||
750 | 499 | self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT) | ||
751 | 500 | |||
752 | 501 | # Set the PPA as deleted. | ||
753 | 502 | self.ppa.status = ArchiveStatus.DELETED | ||
754 | 503 | |||
755 | 504 | script = self.getScript() | ||
756 | 505 | script.main() | ||
757 | 506 | |||
758 | 507 | # The htaccess and htpasswd files should not be generated. | ||
759 | 508 | self.assertThat([htaccess, htpasswd], AllMatch(Not(FileExists()))) | ||
760 | 509 | |||
761 | 510 | def testSendingCancellationEmail(self): | 277 | def testSendingCancellationEmail(self): |
762 | 511 | """Test that when a token is deactivated, its user gets an email. | 278 | """Test that when a token is deactivated, its user gets an email. |
763 | 512 | 279 | ||
764 | @@ -568,120 +335,3 @@ class TestPPAHtaccessTokenGeneration(TestCaseWithFactory): | |||
765 | 568 | script.sendCancellationEmail(token) | 335 | script.sendCancellationEmail(token) |
766 | 569 | 336 | ||
767 | 570 | self.assertEmailQueueLength(0) | 337 | self.assertEmailQueueLength(0) |
768 | 571 | |||
769 | 572 | def test_getTimeToSyncFrom(self): | ||
770 | 573 | # Sync from 1s before previous start to catch anything made during the | ||
771 | 574 | # last script run, and to handle NTP clock skew. | ||
772 | 575 | now = datetime.now(pytz.UTC) | ||
773 | 576 | script_start_time = now - timedelta(seconds=2) | ||
774 | 577 | script_end_time = now | ||
775 | 578 | |||
776 | 579 | getUtility(IScriptActivitySet).recordSuccess( | ||
777 | 580 | self.SCRIPT_NAME, script_start_time, script_end_time) | ||
778 | 581 | script = self.getScript() | ||
779 | 582 | self.assertEqual( | ||
780 | 583 | script_start_time - timedelta(seconds=1), | ||
781 | 584 | script.getTimeToSyncFrom()) | ||
782 | 585 | |||
783 | 586 | def test_getNewPrivatePPAs_no_previous_run(self): | ||
784 | 587 | # All private PPAs are returned if there was no previous run. | ||
785 | 588 | # This happens even if they have no tokens. | ||
786 | 589 | |||
787 | 590 | # Create a public PPA that should not be in the list. | ||
788 | 591 | self.factory.makeArchive(private=False) | ||
789 | 592 | |||
790 | 593 | script = self.getScript() | ||
791 | 594 | self.assertContentEqual([self.ppa], script.getNewPrivatePPAs()) | ||
792 | 595 | |||
793 | 596 | def test_getNewPrivatePPAs_only_those_since_last_run(self): | ||
794 | 597 | # Only private PPAs created since the last run are returned. | ||
795 | 598 | # This happens even if they have no tokens. | ||
796 | 599 | last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) | ||
797 | 600 | before_last_start = last_start - timedelta(seconds=30) | ||
798 | 601 | removeSecurityProxy(self.ppa).date_created = before_last_start | ||
799 | 602 | |||
800 | 603 | # Create a new PPA that should show up. | ||
801 | 604 | new_ppa = self.factory.makeArchive(private=True) | ||
802 | 605 | |||
803 | 606 | script = self.getScript() | ||
804 | 607 | new_ppas = script.getNewPrivatePPAs(since=last_start) | ||
805 | 608 | self.assertContentEqual([new_ppa], new_ppas) | ||
806 | 609 | |||
807 | 610 | def test_getNewTokens_no_previous_run(self): | ||
808 | 611 | """All valid tokens returned if there is no record of previous run.""" | ||
809 | 612 | tokens = self.setupDummyTokens()[1] | ||
810 | 613 | |||
811 | 614 | # If there is no record of the script running previously, all | ||
812 | 615 | # valid tokens are returned. | ||
813 | 616 | script = self.getScript() | ||
814 | 617 | self.assertContentEqual(tokens, script.getNewTokens()) | ||
815 | 618 | |||
816 | 619 | def test_getNewTokens_only_those_since_last_run(self): | ||
817 | 620 | """Only tokens created since the last run are returned.""" | ||
818 | 621 | last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) | ||
819 | 622 | before_last_start = last_start - timedelta(seconds=30) | ||
820 | 623 | |||
821 | 624 | tokens = self.setupDummyTokens()[1] | ||
822 | 625 | # This token will not be included. | ||
823 | 626 | removeSecurityProxy(tokens[0]).date_created = before_last_start | ||
824 | 627 | |||
825 | 628 | script = self.getScript() | ||
826 | 629 | new_tokens = script.getNewTokens(since=last_start) | ||
827 | 630 | self.assertContentEqual(tokens[1:], new_tokens) | ||
828 | 631 | |||
829 | 632 | def test_getNewTokens_only_active_tokens(self): | ||
830 | 633 | """Only active tokens are returned.""" | ||
831 | 634 | tokens = self.setupDummyTokens()[1] | ||
832 | 635 | tokens[0].deactivate() | ||
833 | 636 | |||
834 | 637 | script = self.getScript() | ||
835 | 638 | self.assertContentEqual(tokens[1:], script.getNewTokens()) | ||
836 | 639 | |||
837 | 640 | def test_getDeactivatedNamedTokens_no_previous_run(self): | ||
838 | 641 | """All deactivated named tokens returned if there is no record | ||
839 | 642 | of previous run.""" | ||
840 | 643 | last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) | ||
841 | 644 | before_last_start = last_start - timedelta(seconds=30) | ||
842 | 645 | |||
843 | 646 | self.ppa.newNamedAuthToken("tokenname1") | ||
844 | 647 | token2 = self.ppa.newNamedAuthToken("tokenname2") | ||
845 | 648 | token2.deactivate() | ||
846 | 649 | token3 = self.ppa.newNamedAuthToken("tokenname3") | ||
847 | 650 | token3.date_deactivated = before_last_start | ||
848 | 651 | |||
849 | 652 | script = self.getScript() | ||
850 | 653 | self.assertContentEqual( | ||
851 | 654 | [token2, token3], script.getDeactivatedNamedTokens()) | ||
852 | 655 | |||
853 | 656 | def test_getDeactivatedNamedTokens_only_those_since_last_run(self): | ||
854 | 657 | """Only named tokens deactivated since last run are returned.""" | ||
855 | 658 | last_start = datetime.now(pytz.UTC) - timedelta(seconds=90) | ||
856 | 659 | before_last_start = last_start - timedelta(seconds=30) | ||
857 | 660 | tomorrow = datetime.now(pytz.UTC) + timedelta(days=1) | ||
858 | 661 | |||
859 | 662 | self.ppa.newNamedAuthToken("tokenname1") | ||
860 | 663 | token2 = self.ppa.newNamedAuthToken("tokenname2") | ||
861 | 664 | token2.deactivate() | ||
862 | 665 | token3 = self.ppa.newNamedAuthToken("tokenname3") | ||
863 | 666 | token3.date_deactivated = before_last_start | ||
864 | 667 | token4 = self.ppa.newNamedAuthToken("tokenname4") | ||
865 | 668 | token4.date_deactivated = tomorrow | ||
866 | 669 | |||
867 | 670 | script = self.getScript() | ||
868 | 671 | self.assertContentEqual( | ||
869 | 672 | [token2], script.getDeactivatedNamedTokens(last_start)) | ||
870 | 673 | |||
871 | 674 | def test_processes_PPAs_without_subscription(self): | ||
872 | 675 | # A .htaccess file is written for Private PPAs even if they don't have | ||
873 | 676 | # any subscriptions. | ||
874 | 677 | htaccess, htpasswd = self.ensureNoFiles() | ||
875 | 678 | transaction.commit() | ||
876 | 679 | |||
877 | 680 | # Call the script and check that we have a .htaccess and a .htpasswd. | ||
878 | 681 | return_code, stdout, stderr = self.runScript() | ||
879 | 682 | self.assertEqual( | ||
880 | 683 | return_code, 0, "Got a bad return code of %s\nOutput:\n%s" % | ||
881 | 684 | (return_code, stderr)) | ||
882 | 685 | self.assertThat([htaccess, htpasswd], AllMatch(FileExists())) | ||
883 | 686 | os.remove(htaccess) | ||
884 | 687 | os.remove(htpasswd) | ||
885 | diff --git a/lib/lp/archivepublisher/tests/test_htaccess.py b/lib/lp/archivepublisher/tests/test_htaccess.py | |||
886 | 688 | deleted file mode 100644 | 338 | deleted file mode 100644 |
887 | index d435a2d..0000000 | |||
888 | --- a/lib/lp/archivepublisher/tests/test_htaccess.py | |||
889 | +++ /dev/null | |||
890 | @@ -1,139 +0,0 @@ | |||
891 | 1 | # Copyright 2009-2018 Canonical Ltd. This software is licensed under the | ||
892 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
893 | 3 | |||
894 | 4 | """Test htaccess/htpasswd file generation. """ | ||
895 | 5 | |||
896 | 6 | from __future__ import absolute_import, print_function, unicode_literals | ||
897 | 7 | |||
898 | 8 | import os | ||
899 | 9 | import tempfile | ||
900 | 10 | |||
901 | 11 | from zope.component import getUtility | ||
902 | 12 | |||
903 | 13 | from lp.archivepublisher.htaccess import ( | ||
904 | 14 | htpasswd_credentials_for_archive, | ||
905 | 15 | write_htaccess, | ||
906 | 16 | write_htpasswd, | ||
907 | 17 | ) | ||
908 | 18 | from lp.registry.interfaces.distribution import IDistributionSet | ||
909 | 19 | from lp.registry.interfaces.person import IPersonSet | ||
910 | 20 | from lp.services.features.testing import FeatureFixture | ||
911 | 21 | from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG | ||
912 | 22 | from lp.testing import TestCaseWithFactory | ||
913 | 23 | from lp.testing.layers import LaunchpadZopelessLayer | ||
914 | 24 | |||
915 | 25 | |||
916 | 26 | class TestHtpasswdGeneration(TestCaseWithFactory): | ||
917 | 27 | """Test htpasswd generation.""" | ||
918 | 28 | |||
919 | 29 | layer = LaunchpadZopelessLayer | ||
920 | 30 | |||
921 | 31 | def setUp(self): | ||
922 | 32 | super(TestHtpasswdGeneration, self).setUp() | ||
923 | 33 | self.owner = self.factory.makePerson( | ||
924 | 34 | name="joe", displayname="Joe Smith") | ||
925 | 35 | self.ppa = self.factory.makeArchive( | ||
926 | 36 | owner=self.owner, name="myppa", private=True) | ||
927 | 37 | |||
928 | 38 | # "Ubuntu" doesn't have a proper publisher config but Ubuntutest | ||
929 | 39 | # does, so override the PPA's distro here. | ||
930 | 40 | ubuntutest = getUtility(IDistributionSet)['ubuntutest'] | ||
931 | 41 | self.ppa.distribution = ubuntutest | ||
932 | 42 | |||
933 | 43 | # Enable named auth tokens. | ||
934 | 44 | self.useFixture(FeatureFixture({NAMED_AUTH_TOKEN_FEATURE_FLAG: "on"})) | ||
935 | 45 | |||
936 | 46 | def test_write_htpasswd(self): | ||
937 | 47 | """Test that writing the .htpasswd file works properly.""" | ||
938 | 48 | fd, filename = tempfile.mkstemp() | ||
939 | 49 | os.close(fd) | ||
940 | 50 | |||
941 | 51 | TEST_PASSWORD = "password" | ||
942 | 52 | TEST_PASSWORD2 = "passwor2" | ||
943 | 53 | |||
944 | 54 | # We provide a constant salt to the crypt function so that we | ||
945 | 55 | # can test the encrypted result. | ||
946 | 56 | SALT = "XX" | ||
947 | 57 | |||
948 | 58 | user1 = ("user", TEST_PASSWORD, SALT) | ||
949 | 59 | user2 = ("user2", TEST_PASSWORD2, SALT) | ||
950 | 60 | list_of_users = [user1] | ||
951 | 61 | list_of_users.append(user2) | ||
952 | 62 | |||
953 | 63 | write_htpasswd(filename, list_of_users) | ||
954 | 64 | |||
955 | 65 | expected_contents = [ | ||
956 | 66 | "user:XXq2wKiyI43A2", | ||
957 | 67 | "user2:XXaQB8b5Gtwi.", | ||
958 | 68 | ] | ||
959 | 69 | |||
960 | 70 | file = open(filename, "r") | ||
961 | 71 | file_contents = file.read().splitlines() | ||
962 | 72 | file.close() | ||
963 | 73 | os.remove(filename) | ||
964 | 74 | |||
965 | 75 | self.assertEqual(expected_contents, file_contents) | ||
966 | 76 | |||
967 | 77 | def test_write_htaccess(self): | ||
968 | 78 | # write_access can write a correct htaccess file. | ||
969 | 79 | fd, filename = tempfile.mkstemp() | ||
970 | 80 | os.close(fd) | ||
971 | 81 | |||
972 | 82 | write_htaccess(filename, "/some/distroot") | ||
973 | 83 | self.assertTrue( | ||
974 | 84 | os.path.isfile(filename), | ||
975 | 85 | "%s is not present when it should be" % filename) | ||
976 | 86 | self.addCleanup(os.remove, filename) | ||
977 | 87 | |||
978 | 88 | contents = [ | ||
979 | 89 | "", | ||
980 | 90 | "AuthType Basic", | ||
981 | 91 | "AuthName \"Token Required\"", | ||
982 | 92 | "AuthUserFile /some/distroot/.htpasswd", | ||
983 | 93 | "Require valid-user", | ||
984 | 94 | ] | ||
985 | 95 | |||
986 | 96 | file = open(filename, "r") | ||
987 | 97 | file_contents = file.read().splitlines() | ||
988 | 98 | file.close() | ||
989 | 99 | |||
990 | 100 | self.assertEqual(contents, file_contents) | ||
991 | 101 | |||
992 | 102 | def test_credentials_for_archive_empty(self): | ||
993 | 103 | # If there are no ArchiveAuthTokens for an archive just | ||
994 | 104 | # the buildd secret is returned. | ||
995 | 105 | self.ppa.buildd_secret = "sekr1t" | ||
996 | 106 | self.assertEqual( | ||
997 | 107 | [("buildd", "sekr1t", "bu")], | ||
998 | 108 | list(htpasswd_credentials_for_archive(self.ppa))) | ||
999 | 109 | |||
1000 | 110 | def test_credentials_for_archive(self): | ||
1001 | 111 | # ArchiveAuthTokens for an archive are returned by | ||
1002 | 112 | # credentials_for_archive. | ||
1003 | 113 | self.ppa.buildd_secret = "geheim" | ||
1004 | 114 | name12 = getUtility(IPersonSet).getByName("name12") | ||
1005 | 115 | name16 = getUtility(IPersonSet).getByName("name16") | ||
1006 | 116 | hyphenated = self.factory.makePerson(name="a-b-c") | ||
1007 | 117 | self.ppa.newSubscription(name12, self.ppa.owner) | ||
1008 | 118 | self.ppa.newSubscription(name16, self.ppa.owner) | ||
1009 | 119 | self.ppa.newSubscription(hyphenated, self.ppa.owner) | ||
1010 | 120 | first_created_token = self.ppa.newAuthToken(name16) | ||
1011 | 121 | second_created_token = self.ppa.newAuthToken(name12) | ||
1012 | 122 | third_created_token = self.ppa.newAuthToken(hyphenated) | ||
1013 | 123 | named_token_20 = self.ppa.newNamedAuthToken("name20", as_dict=False) | ||
1014 | 124 | named_token_14 = self.ppa.newNamedAuthToken("name14", as_dict=False) | ||
1015 | 125 | named_token_99 = self.ppa.newNamedAuthToken("name99", as_dict=False) | ||
1016 | 126 | named_token_99.deactivate() | ||
1017 | 127 | |||
1018 | 128 | expected_credentials = [ | ||
1019 | 129 | ("buildd", "geheim", "bu"), | ||
1020 | 130 | ("+name14", named_token_14.token, "bm"), | ||
1021 | 131 | ("+name20", named_token_20.token, "bm"), | ||
1022 | 132 | ("a-b-c", third_created_token.token, "YS"), | ||
1023 | 133 | ("name12", second_created_token.token, "bm"), | ||
1024 | 134 | ("name16", first_created_token.token, "bm"), | ||
1025 | 135 | ] | ||
1026 | 136 | credentials = list(htpasswd_credentials_for_archive(self.ppa)) | ||
1027 | 137 | |||
1028 | 138 | # Use assertEqual instead of assertContentEqual to verify order. | ||
1029 | 139 | self.assertEqual(expected_credentials, credentials) | ||
1030 | diff --git a/lib/lp/archivepublisher/tests/test_publisher.py b/lib/lp/archivepublisher/tests/test_publisher.py | |||
1031 | index 67ec904..0c8ff87 100644 | |||
1032 | --- a/lib/lp/archivepublisher/tests/test_publisher.py | |||
1033 | +++ b/lib/lp/archivepublisher/tests/test_publisher.py | |||
1034 | @@ -12,7 +12,6 @@ from collections import ( | |||
1035 | 12 | defaultdict, | 12 | defaultdict, |
1036 | 13 | OrderedDict, | 13 | OrderedDict, |
1037 | 14 | ) | 14 | ) |
1038 | 15 | import crypt | ||
1039 | 16 | from datetime import ( | 15 | from datetime import ( |
1040 | 17 | datetime, | 16 | datetime, |
1041 | 18 | timedelta, | 17 | timedelta, |
1042 | @@ -2328,44 +2327,6 @@ class TestPublisher(TestPublisherBase): | |||
1043 | 2328 | hoary_pub.requestDeletion(self.ubuntutest.owner) | 2327 | hoary_pub.requestDeletion(self.ubuntutest.owner) |
1044 | 2329 | self._assertPublishesSeriesAlias(publisher, "breezy-autotest") | 2328 | self._assertPublishesSeriesAlias(publisher, "breezy-autotest") |
1045 | 2330 | 2329 | ||
1046 | 2331 | def testHtaccessForPrivatePPA(self): | ||
1047 | 2332 | # A htaccess file is created for new private PPA's. | ||
1048 | 2333 | |||
1049 | 2334 | ppa = self.factory.makeArchive( | ||
1050 | 2335 | distribution=self.ubuntutest, private=True) | ||
1051 | 2336 | ppa.buildd_secret = "geheim" | ||
1052 | 2337 | |||
1053 | 2338 | # Set up the publisher for it and publish its repository. | ||
1054 | 2339 | # setupArchiveDirs is what actually configures the htaccess file. | ||
1055 | 2340 | getPublisher(ppa, [], self.logger).setupArchiveDirs() | ||
1056 | 2341 | pubconf = getPubConfig(ppa) | ||
1057 | 2342 | htaccess_path = os.path.join(pubconf.archiveroot, ".htaccess") | ||
1058 | 2343 | self.assertTrue(os.path.exists(htaccess_path)) | ||
1059 | 2344 | with open(htaccess_path, 'r') as htaccess_f: | ||
1060 | 2345 | self.assertEqual(dedent(""" | ||
1061 | 2346 | AuthType Basic | ||
1062 | 2347 | AuthName "Token Required" | ||
1063 | 2348 | AuthUserFile %s/.htpasswd | ||
1064 | 2349 | Require valid-user | ||
1065 | 2350 | """) % pubconf.archiveroot, | ||
1066 | 2351 | htaccess_f.read()) | ||
1067 | 2352 | |||
1068 | 2353 | htpasswd_path = os.path.join(pubconf.archiveroot, ".htpasswd") | ||
1069 | 2354 | |||
1070 | 2355 | # Read it back in. | ||
1071 | 2356 | with open(htpasswd_path, "r") as htpasswd_f: | ||
1072 | 2357 | file_contents = htpasswd_f.readlines() | ||
1073 | 2358 | |||
1074 | 2359 | self.assertEqual(1, len(file_contents)) | ||
1075 | 2360 | |||
1076 | 2361 | # The first line should be the buildd_secret. | ||
1077 | 2362 | [user, password] = file_contents[0].strip().split(":", 1) | ||
1078 | 2363 | self.assertEqual("buildd", user) | ||
1079 | 2364 | # We can re-encrypt the buildd_secret and it should match the | ||
1080 | 2365 | # one in the .htpasswd file. | ||
1081 | 2366 | encrypted_secret = crypt.crypt(ppa.buildd_secret, password) | ||
1082 | 2367 | self.assertEqual(encrypted_secret, password) | ||
1083 | 2368 | |||
1084 | 2369 | def testWriteSuiteI18n(self): | 2330 | def testWriteSuiteI18n(self): |
1085 | 2370 | """Test i18n/Index writing.""" | 2331 | """Test i18n/Index writing.""" |
1086 | 2371 | publisher = Publisher( | 2332 | publisher = Publisher( |
1087 | diff --git a/lib/lp/oci/model/ocirecipebuildjob.py b/lib/lp/oci/model/ocirecipebuildjob.py | |||
1088 | index 8f55850..9ec2e91 100644 | |||
1089 | --- a/lib/lp/oci/model/ocirecipebuildjob.py | |||
1090 | +++ b/lib/lp/oci/model/ocirecipebuildjob.py | |||
1091 | @@ -45,10 +45,7 @@ from lp.oci.interfaces.ociregistryclient import ( | |||
1092 | 45 | ) | 45 | ) |
1093 | 46 | from lp.services.config import config | 46 | from lp.services.config import config |
1094 | 47 | from lp.services.database.enumcol import DBEnum | 47 | from lp.services.database.enumcol import DBEnum |
1099 | 48 | from lp.services.database.interfaces import ( | 48 | from lp.services.database.interfaces import IStore |
1096 | 49 | IMasterStore, | ||
1097 | 50 | IStore, | ||
1098 | 51 | ) | ||
1100 | 52 | from lp.services.database.locking import ( | 49 | from lp.services.database.locking import ( |
1101 | 53 | AdvisoryLockHeld, | 50 | AdvisoryLockHeld, |
1102 | 54 | LockType, | 51 | LockType, |
1103 | @@ -189,6 +186,13 @@ class OCIRegistryUploadJob(OCIRecipeBuildJobDerived): | |||
1104 | 189 | 186 | ||
1105 | 190 | class_job_type = OCIRecipeBuildJobType.REGISTRY_UPLOAD | 187 | class_job_type = OCIRecipeBuildJobType.REGISTRY_UPLOAD |
1106 | 191 | 188 | ||
1107 | 189 | # This is a known slow task that will exceed the timeouts for | ||
1108 | 190 | # the normal job queue, so put it on a queue with longer timeouts | ||
1109 | 191 | task_queue = 'launchpad_job_slow' | ||
1110 | 192 | |||
1111 | 193 | soft_time_limit = timedelta(minutes=60) | ||
1112 | 194 | lease_duration = timedelta(minutes=60) | ||
1113 | 195 | |||
1114 | 192 | class ManifestListUploadError(Exception): | 196 | class ManifestListUploadError(Exception): |
1115 | 193 | pass | 197 | pass |
1116 | 194 | 198 | ||
1117 | diff --git a/lib/lp/oci/tests/test_ocirecipebuildjob.py b/lib/lp/oci/tests/test_ocirecipebuildjob.py | |||
1118 | index 95718bb..9dfb785 100644 | |||
1119 | --- a/lib/lp/oci/tests/test_ocirecipebuildjob.py | |||
1120 | +++ b/lib/lp/oci/tests/test_ocirecipebuildjob.py | |||
1121 | @@ -53,10 +53,7 @@ from lp.services.database.locking import ( | |||
1122 | 53 | from lp.services.features.testing import FeatureFixture | 53 | from lp.services.features.testing import FeatureFixture |
1123 | 54 | from lp.services.job.interfaces.job import JobStatus | 54 | from lp.services.job.interfaces.job import JobStatus |
1124 | 55 | from lp.services.job.runner import JobRunner | 55 | from lp.services.job.runner import JobRunner |
1129 | 56 | from lp.services.job.tests import ( | 56 | from lp.services.job.tests import block_on_job |
1126 | 57 | block_on_job, | ||
1127 | 58 | pop_remote_notifications, | ||
1128 | 59 | ) | ||
1130 | 60 | from lp.services.statsd.tests import StatsMixin | 57 | from lp.services.statsd.tests import StatsMixin |
1131 | 61 | from lp.services.webapp import canonical_url | 58 | from lp.services.webapp import canonical_url |
1132 | 62 | from lp.services.webhooks.testing import LogsScheduledWebhooks | 59 | from lp.services.webhooks.testing import LogsScheduledWebhooks |
1133 | @@ -71,7 +68,7 @@ from lp.testing.dbuser import ( | |||
1134 | 71 | from lp.testing.fakemethod import FakeMethod | 68 | from lp.testing.fakemethod import FakeMethod |
1135 | 72 | from lp.testing.fixture import ZopeUtilityFixture | 69 | from lp.testing.fixture import ZopeUtilityFixture |
1136 | 73 | from lp.testing.layers import ( | 70 | from lp.testing.layers import ( |
1138 | 74 | CeleryJobLayer, | 71 | CelerySlowJobLayer, |
1139 | 75 | DatabaseFunctionalLayer, | 72 | DatabaseFunctionalLayer, |
1140 | 76 | LaunchpadZopelessLayer, | 73 | LaunchpadZopelessLayer, |
1141 | 77 | ) | 74 | ) |
1142 | @@ -519,7 +516,6 @@ class TestOCIRegistryUploadJob(TestCaseWithFactory, MultiArchRecipeMixin, | |||
1143 | 519 | 516 | ||
1144 | 520 | self.assertContentEqual([], ocibuild.registry_upload_jobs) | 517 | self.assertContentEqual([], ocibuild.registry_upload_jobs) |
1145 | 521 | job = OCIRegistryUploadJob.create(ocibuild) | 518 | job = OCIRegistryUploadJob.create(ocibuild) |
1146 | 522 | client = FakeRegistryClient() | ||
1147 | 523 | switch_dbuser(config.IOCIRegistryUploadJobSource.dbuser) | 519 | switch_dbuser(config.IOCIRegistryUploadJobSource.dbuser) |
1148 | 524 | # Fork so that we can take an advisory lock from a different | 520 | # Fork so that we can take an advisory lock from a different |
1149 | 525 | # PostgreSQL session. | 521 | # PostgreSQL session. |
1150 | @@ -551,8 +547,6 @@ class TestOCIRegistryUploadJob(TestCaseWithFactory, MultiArchRecipeMixin, | |||
1151 | 551 | os.kill(pid, signal.SIGINT) | 547 | os.kill(pid, signal.SIGINT) |
1152 | 552 | 548 | ||
1153 | 553 | 549 | ||
1154 | 554 | |||
1155 | 555 | |||
1156 | 556 | class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, | 550 | class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, |
1157 | 557 | MultiArchRecipeMixin): | 551 | MultiArchRecipeMixin): |
1158 | 558 | """Runs OCIRegistryUploadJob via Celery, to make sure the machinery | 552 | """Runs OCIRegistryUploadJob via Celery, to make sure the machinery |
1159 | @@ -563,7 +557,7 @@ class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, | |||
1160 | 563 | so we should make sure we are not breaking anything in the interaction | 557 | so we should make sure we are not breaking anything in the interaction |
1161 | 564 | with the job lifecycle via celery. | 558 | with the job lifecycle via celery. |
1162 | 565 | """ | 559 | """ |
1164 | 566 | layer = CeleryJobLayer | 560 | layer = CelerySlowJobLayer |
1165 | 567 | 561 | ||
1166 | 568 | def setUp(self): | 562 | def setUp(self): |
1167 | 569 | super(TestOCIRegistryUploadJobViaCelery, self).setUp() | 563 | super(TestOCIRegistryUploadJobViaCelery, self).setUp() |
1168 | @@ -583,4 +577,5 @@ class TestOCIRegistryUploadJobViaCelery(TestCaseWithFactory, | |||
1169 | 583 | for build in builds: | 577 | for build in builds: |
1170 | 584 | OCIRegistryUploadJob.create(build) | 578 | OCIRegistryUploadJob.create(build) |
1171 | 585 | transaction.commit() | 579 | transaction.commit() |
1173 | 586 | self.assertEqual(0, len(pop_remote_notifications())) | 580 | messages = [message.as_string() for message in pop_notifications()] |
1174 | 581 | self.assertEqual(0, len(messages)) | ||
1175 | diff --git a/lib/lp/registry/browser/distribution.py b/lib/lp/registry/browser/distribution.py | |||
1176 | index e5777ec..aa8a74c 100644 | |||
1177 | --- a/lib/lp/registry/browser/distribution.py | |||
1178 | +++ b/lib/lp/registry/browser/distribution.py | |||
1179 | @@ -82,9 +82,6 @@ from lp.bugs.browser.structuralsubscription import ( | |||
1180 | 82 | ) | 82 | ) |
1181 | 83 | from lp.buildmaster.interfaces.processor import IProcessorSet | 83 | from lp.buildmaster.interfaces.processor import IProcessorSet |
1182 | 84 | from lp.code.browser.vcslisting import TargetDefaultVCSNavigationMixin | 84 | from lp.code.browser.vcslisting import TargetDefaultVCSNavigationMixin |
1183 | 85 | from lp.oci.interfaces.ociregistrycredentials import ( | ||
1184 | 86 | IOCIRegistryCredentialsSet, | ||
1185 | 87 | ) | ||
1186 | 88 | from lp.registry.browser import ( | 85 | from lp.registry.browser import ( |
1187 | 89 | add_subscribe_link, | 86 | add_subscribe_link, |
1188 | 90 | RegistryEditFormView, | 87 | RegistryEditFormView, |
1189 | diff --git a/lib/lp/registry/interfaces/distribution.py b/lib/lp/registry/interfaces/distribution.py | |||
1190 | index 5ec5271..8f43f58 100644 | |||
1191 | --- a/lib/lp/registry/interfaces/distribution.py | |||
1192 | +++ b/lib/lp/registry/interfaces/distribution.py | |||
1193 | @@ -14,15 +14,18 @@ __all__ = [ | |||
1194 | 14 | 'IDistributionSet', | 14 | 'IDistributionSet', |
1195 | 15 | 'NoPartnerArchive', | 15 | 'NoPartnerArchive', |
1196 | 16 | 'NoSuchDistribution', | 16 | 'NoSuchDistribution', |
1197 | 17 | 'NoOCIAdminForDistribution', | ||
1198 | 17 | ] | 18 | ] |
1199 | 18 | 19 | ||
1200 | 19 | from lazr.lifecycle.snapshot import doNotSnapshot | 20 | from lazr.lifecycle.snapshot import doNotSnapshot |
1201 | 20 | from lazr.restful.declarations import ( | 21 | from lazr.restful.declarations import ( |
1202 | 21 | call_with, | 22 | call_with, |
1203 | 22 | collection_default_content, | 23 | collection_default_content, |
1204 | 24 | error_status, | ||
1205 | 23 | export_factory_operation, | 25 | export_factory_operation, |
1206 | 24 | export_operation_as, | 26 | export_operation_as, |
1207 | 25 | export_read_operation, | 27 | export_read_operation, |
1208 | 28 | export_write_operation, | ||
1209 | 26 | exported, | 29 | exported, |
1210 | 27 | exported_as_webservice_collection, | 30 | exported_as_webservice_collection, |
1211 | 28 | exported_as_webservice_entry, | 31 | exported_as_webservice_entry, |
1212 | @@ -38,6 +41,7 @@ from lazr.restful.fields import ( | |||
1213 | 38 | Reference, | 41 | Reference, |
1214 | 39 | ) | 42 | ) |
1215 | 40 | from lazr.restful.interface import copy_field | 43 | from lazr.restful.interface import copy_field |
1216 | 44 | from six.moves import http_client | ||
1217 | 41 | from zope.interface import ( | 45 | from zope.interface import ( |
1218 | 42 | Attribute, | 46 | Attribute, |
1219 | 43 | Interface, | 47 | Interface, |
1220 | @@ -113,6 +117,15 @@ from lp.translations.interfaces.hastranslationimports import ( | |||
1221 | 113 | from lp.translations.interfaces.translationpolicy import ITranslationPolicy | 117 | from lp.translations.interfaces.translationpolicy import ITranslationPolicy |
1222 | 114 | 118 | ||
1223 | 115 | 119 | ||
1224 | 120 | @error_status(http_client.BAD_REQUEST) | ||
1225 | 121 | class NoOCIAdminForDistribution(Exception): | ||
1226 | 122 | """There is no OCI Project Admin for this distribution.""" | ||
1227 | 123 | |||
1228 | 124 | def __init__(self): | ||
1229 | 125 | super(NoOCIAdminForDistribution, self).__init__( | ||
1230 | 126 | "There is no OCI Project Admin for this distribution.") | ||
1231 | 127 | |||
1232 | 128 | |||
1233 | 116 | class IDistributionMirrorMenuMarker(Interface): | 129 | class IDistributionMirrorMenuMarker(Interface): |
1234 | 117 | """Marker interface for Mirror navigation.""" | 130 | """Marker interface for Mirror navigation.""" |
1235 | 118 | 131 | ||
1236 | @@ -129,6 +142,35 @@ class DistributionNameField(PillarNameField): | |||
1237 | 129 | class IDistributionEditRestricted(IOfficialBugTagTargetRestricted): | 142 | class IDistributionEditRestricted(IOfficialBugTagTargetRestricted): |
1238 | 130 | """IDistribution properties requiring launchpad.Edit permission.""" | 143 | """IDistribution properties requiring launchpad.Edit permission.""" |
1239 | 131 | 144 | ||
1240 | 145 | @call_with(registrant=REQUEST_USER) | ||
1241 | 146 | @operation_parameters( | ||
1242 | 147 | registry_url=TextLine( | ||
1243 | 148 | title=_("The registry url."), | ||
1244 | 149 | description=_("The url of the OCI registry to use."), | ||
1245 | 150 | required=True), | ||
1246 | 151 | region=TextLine( | ||
1247 | 152 | title=_("OCI registry region."), | ||
1248 | 153 | description=_("The region of the OCI registry."), | ||
1249 | 154 | required=False), | ||
1250 | 155 | username=TextLine( | ||
1251 | 156 | title=_("Username"), | ||
1252 | 157 | description=_("The username for the OCI registry."), | ||
1253 | 158 | required=False), | ||
1254 | 159 | password=TextLine( | ||
1255 | 160 | title=_("Password"), | ||
1256 | 161 | description=_("The password for the OCI registry."), | ||
1257 | 162 | required=False)) | ||
1258 | 163 | @export_write_operation() | ||
1259 | 164 | @operation_for_version("devel") | ||
1260 | 165 | def setOCICredentials(registrant, registry_url, region, | ||
1261 | 166 | username, password): | ||
1262 | 167 | """Set the credentials for the OCI registry for OCI projects.""" | ||
1263 | 168 | |||
1264 | 169 | @export_write_operation() | ||
1265 | 170 | @operation_for_version("devel") | ||
1266 | 171 | def deleteOCICredentials(): | ||
1267 | 172 | """Delete any existing OCI credentials for the distribution.""" | ||
1268 | 173 | |||
1269 | 132 | 174 | ||
1270 | 133 | class IDistributionDriverRestricted(Interface): | 175 | class IDistributionDriverRestricted(Interface): |
1271 | 134 | """IDistribution properties requiring launchpad.Driver permission.""" | 176 | """IDistribution properties requiring launchpad.Driver permission.""" |
1272 | @@ -727,7 +769,6 @@ class IDistributionPublic( | |||
1273 | 727 | "images in this distribution to a registry."), | 769 | "images in this distribution to a registry."), |
1274 | 728 | required=False, readonly=False) | 770 | required=False, readonly=False) |
1275 | 729 | 771 | ||
1276 | 730 | |||
1277 | 731 | @exported_as_webservice_entry(as_of="beta") | 772 | @exported_as_webservice_entry(as_of="beta") |
1278 | 732 | class IDistribution( | 773 | class IDistribution( |
1279 | 733 | IDistributionEditRestricted, IDistributionPublic, IHasBugSupervisor, | 774 | IDistributionEditRestricted, IDistributionPublic, IHasBugSupervisor, |
1280 | diff --git a/lib/lp/registry/model/distribution.py b/lib/lp/registry/model/distribution.py | |||
1281 | index 0288c54..f76d28b 100644 | |||
1282 | --- a/lib/lp/registry/model/distribution.py | |||
1283 | +++ b/lib/lp/registry/model/distribution.py | |||
1284 | @@ -89,6 +89,7 @@ from lp.bugs.model.structuralsubscription import ( | |||
1285 | 89 | from lp.code.interfaces.seriessourcepackagebranch import ( | 89 | from lp.code.interfaces.seriessourcepackagebranch import ( |
1286 | 90 | IFindOfficialBranchLinks, | 90 | IFindOfficialBranchLinks, |
1287 | 91 | ) | 91 | ) |
1288 | 92 | from lp.oci.interfaces.ociregistrycredentials import IOCIRegistryCredentialsSet | ||
1289 | 92 | from lp.registry.enums import ( | 93 | from lp.registry.enums import ( |
1290 | 93 | BranchSharingPolicy, | 94 | BranchSharingPolicy, |
1291 | 94 | BugSharingPolicy, | 95 | BugSharingPolicy, |
1292 | @@ -101,6 +102,7 @@ from lp.registry.interfaces.accesspolicy import IAccessPolicySource | |||
1293 | 101 | from lp.registry.interfaces.distribution import ( | 102 | from lp.registry.interfaces.distribution import ( |
1294 | 102 | IDistribution, | 103 | IDistribution, |
1295 | 103 | IDistributionSet, | 104 | IDistributionSet, |
1296 | 105 | NoOCIAdminForDistribution, | ||
1297 | 104 | ) | 106 | ) |
1298 | 105 | from lp.registry.interfaces.distributionmirror import ( | 107 | from lp.registry.interfaces.distributionmirror import ( |
1299 | 106 | IDistributionMirror, | 108 | IDistributionMirror, |
1300 | @@ -1531,6 +1533,32 @@ class Distribution(SQLBase, BugTargetBase, MakesAnnouncements, | |||
1301 | 1531 | pillar=self, registrant=registrant, name=name, | 1533 | pillar=self, registrant=registrant, name=name, |
1302 | 1532 | description=description) | 1534 | description=description) |
1303 | 1533 | 1535 | ||
1304 | 1536 | def setOCICredentials(self, registrant, registry_url, | ||
1305 | 1537 | region, username, password): | ||
1306 | 1538 | """See `IDistribution`.""" | ||
1307 | 1539 | if not self.oci_project_admin: | ||
1308 | 1540 | raise NoOCIAdminForDistribution() | ||
1309 | 1541 | new_credentials = getUtility(IOCIRegistryCredentialsSet).getOrCreate( | ||
1310 | 1542 | registrant, | ||
1311 | 1543 | self.oci_project_admin, | ||
1312 | 1544 | registry_url, | ||
1313 | 1545 | {"username": username, "password": password, "region": region}, | ||
1314 | 1546 | override_owner=True) | ||
1315 | 1547 | old_credentials = self.oci_registry_credentials | ||
1316 | 1548 | if self.oci_registry_credentials != new_credentials: | ||
1317 | 1549 | # Remove the old credentials as we're assigning new ones | ||
1318 | 1550 | # or clearing them | ||
1319 | 1551 | self.oci_registry_credentials = new_credentials | ||
1320 | 1552 | if old_credentials: | ||
1321 | 1553 | old_credentials.destroySelf() | ||
1322 | 1554 | |||
1323 | 1555 | def deleteOCICredentials(self): | ||
1324 | 1556 | """See `IDistribution`.""" | ||
1325 | 1557 | old_credentials = self.oci_registry_credentials | ||
1326 | 1558 | if old_credentials: | ||
1327 | 1559 | self.oci_registry_credentials = None | ||
1328 | 1560 | old_credentials.destroySelf() | ||
1329 | 1561 | |||
1330 | 1534 | 1562 | ||
1331 | 1535 | @implementer(IDistributionSet) | 1563 | @implementer(IDistributionSet) |
1332 | 1536 | class DistributionSet: | 1564 | class DistributionSet: |
1333 | diff --git a/lib/lp/registry/scripts/closeaccount.py b/lib/lp/registry/scripts/closeaccount.py | |||
1334 | index 27b2eb1..b4e505b 100644 | |||
1335 | --- a/lib/lp/registry/scripts/closeaccount.py | |||
1336 | +++ b/lib/lp/registry/scripts/closeaccount.py | |||
1337 | @@ -362,12 +362,9 @@ def close_account(username, log): | |||
1338 | 362 | # the placeholder person row. | 362 | # the placeholder person row. |
1339 | 363 | skip.add(('sprintattendance', 'attendee')) | 363 | skip.add(('sprintattendance', 'attendee')) |
1340 | 364 | 364 | ||
1347 | 365 | # generate_ppa_htaccess currently relies on seeing active | 365 | # PPA authorization is now handled dynamically and checks the |
1348 | 366 | # ArchiveAuthToken rows so that it knows which ones to remove from | 366 | # subscriber's account status, so this isn't strictly necessary, but |
1349 | 367 | # .htpasswd files on disk in response to the cancellation of the | 367 | # it's still nice to have the per-person audit trail. |
1344 | 368 | # corresponding ArchiveSubscriber rows; but even once PPA authorisation | ||
1345 | 369 | # is handled dynamically, we probably still want to have the per-person | ||
1346 | 370 | # audit trail here. | ||
1350 | 371 | archive_subscriber_ids = set(store.find( | 368 | archive_subscriber_ids = set(store.find( |
1351 | 372 | ArchiveSubscriber.id, | 369 | ArchiveSubscriber.id, |
1352 | 373 | ArchiveSubscriber.subscriber_id == person.id, | 370 | ArchiveSubscriber.subscriber_id == person.id, |
1353 | diff --git a/lib/lp/registry/tests/test_distribution.py b/lib/lp/registry/tests/test_distribution.py | |||
1354 | index 0b9f712..005a7e6 100644 | |||
1355 | --- a/lib/lp/registry/tests/test_distribution.py | |||
1356 | +++ b/lib/lp/registry/tests/test_distribution.py | |||
1357 | @@ -28,6 +28,7 @@ from lp.app.enums import ( | |||
1358 | 28 | ) | 28 | ) |
1359 | 29 | from lp.app.errors import NotFoundError | 29 | from lp.app.errors import NotFoundError |
1360 | 30 | from lp.app.interfaces.launchpad import ILaunchpadCelebrities | 30 | from lp.app.interfaces.launchpad import ILaunchpadCelebrities |
1361 | 31 | from lp.oci.tests.helpers import OCIConfigHelperMixin | ||
1362 | 31 | from lp.registry.enums import ( | 32 | from lp.registry.enums import ( |
1363 | 32 | BranchSharingPolicy, | 33 | BranchSharingPolicy, |
1364 | 33 | BugSharingPolicy, | 34 | BugSharingPolicy, |
1365 | @@ -761,7 +762,7 @@ class DistributionOCIProjectAdminPermission(TestCaseWithFactory): | |||
1366 | 761 | self.assertTrue(distro.canAdministerOCIProjects(admin)) | 762 | self.assertTrue(distro.canAdministerOCIProjects(admin)) |
1367 | 762 | 763 | ||
1368 | 763 | 764 | ||
1370 | 764 | class TestDistributionWebservice(TestCaseWithFactory): | 765 | class TestDistributionWebservice(OCIConfigHelperMixin, TestCaseWithFactory): |
1371 | 765 | """Test the IDistribution API. | 766 | """Test the IDistribution API. |
1372 | 766 | 767 | ||
1373 | 767 | Some tests already exist in xx-distribution.txt. | 768 | Some tests already exist in xx-distribution.txt. |
1374 | @@ -842,3 +843,92 @@ class TestDistributionWebservice(TestCaseWithFactory): | |||
1375 | 842 | start_date=(now - day).isoformat(), | 843 | start_date=(now - day).isoformat(), |
1376 | 843 | end_date=now.isoformat()) | 844 | end_date=now.isoformat()) |
1377 | 844 | self.assertEqual([], empty_response.jsonBody()) | 845 | self.assertEqual([], empty_response.jsonBody()) |
1378 | 846 | |||
1379 | 847 | def test_setOCICredentials(self): | ||
1380 | 848 | # We can add OCI Credentials to the distribution | ||
1381 | 849 | self.setConfig() | ||
1382 | 850 | with person_logged_in(self.person): | ||
1383 | 851 | distro = self.factory.makeDistribution(owner=self.person) | ||
1384 | 852 | distro.oci_project_admin = self.person | ||
1385 | 853 | distro_url = api_url(distro) | ||
1386 | 854 | |||
1387 | 855 | resp = self.webservice.named_post( | ||
1388 | 856 | distro_url, | ||
1389 | 857 | "setOCICredentials", | ||
1390 | 858 | registry_url="http://registry.test", | ||
1391 | 859 | username="test-username", | ||
1392 | 860 | password="test-password", | ||
1393 | 861 | region="test-region" | ||
1394 | 862 | ) | ||
1395 | 863 | |||
1396 | 864 | self.assertEqual(200, resp.status) | ||
1397 | 865 | with person_logged_in(self.person): | ||
1398 | 866 | self.assertEqual( | ||
1399 | 867 | "http://registry.test", | ||
1400 | 868 | distro.oci_registry_credentials.url | ||
1401 | 869 | ) | ||
1402 | 870 | credentials = distro.oci_registry_credentials.getCredentials() | ||
1403 | 871 | self.assertDictEqual({ | ||
1404 | 872 | "username": "test-username", | ||
1405 | 873 | "password": "test-password", | ||
1406 | 874 | "region": "test-region"}, | ||
1407 | 875 | credentials) | ||
1408 | 876 | |||
1409 | 877 | def test_setOCICredentials_no_oci_admin(self): | ||
1410 | 878 | # If there's no oci_project_admin to own the credentials, error | ||
1411 | 879 | self.setConfig() | ||
1412 | 880 | with person_logged_in(self.person): | ||
1413 | 881 | distro = self.factory.makeDistribution(owner=self.person) | ||
1414 | 882 | distro_url = api_url(distro) | ||
1415 | 883 | |||
1416 | 884 | resp = self.webservice.named_post( | ||
1417 | 885 | distro_url, | ||
1418 | 886 | "setOCICredentials", | ||
1419 | 887 | registry_url="http://registry.test", | ||
1420 | 888 | ) | ||
1421 | 889 | |||
1422 | 890 | self.assertEqual(400, resp.status) | ||
1423 | 891 | self.assertIn( | ||
1424 | 892 | b"no OCI Project Admin for this distribution", | ||
1425 | 893 | resp.body) | ||
1426 | 894 | |||
1427 | 895 | def test_setOCICredentials_changes_credentials(self): | ||
1428 | 896 | # if we have existing credentials, we should change them | ||
1429 | 897 | self.setConfig() | ||
1430 | 898 | with person_logged_in(self.person): | ||
1431 | 899 | distro = self.factory.makeDistribution(owner=self.person) | ||
1432 | 900 | distro.oci_project_admin = self.person | ||
1433 | 901 | credentials = self.factory.makeOCIRegistryCredentials() | ||
1434 | 902 | distro.oci_registry_credentials = credentials | ||
1435 | 903 | distro_url = api_url(distro) | ||
1436 | 904 | |||
1437 | 905 | resp = self.webservice.named_post( | ||
1438 | 906 | distro_url, | ||
1439 | 907 | "setOCICredentials", | ||
1440 | 908 | registry_url="http://registry.test", | ||
1441 | 909 | ) | ||
1442 | 910 | |||
1443 | 911 | self.assertEqual(200, resp.status) | ||
1444 | 912 | with person_logged_in(self.person): | ||
1445 | 913 | self.assertEqual( | ||
1446 | 914 | "http://registry.test", | ||
1447 | 915 | distro.oci_registry_credentials.url | ||
1448 | 916 | ) | ||
1449 | 917 | |||
1450 | 918 | def test_deleteOCICredentials(self): | ||
1451 | 919 | # We can remove existing credentials | ||
1452 | 920 | self.setConfig() | ||
1453 | 921 | with person_logged_in(self.person): | ||
1454 | 922 | distro = self.factory.makeDistribution(owner=self.person) | ||
1455 | 923 | distro.oci_project_admin = self.person | ||
1456 | 924 | credentials = self.factory.makeOCIRegistryCredentials() | ||
1457 | 925 | distro.oci_registry_credentials = credentials | ||
1458 | 926 | distro_url = api_url(distro) | ||
1459 | 927 | |||
1460 | 928 | resp = self.webservice.named_post( | ||
1461 | 929 | distro_url, | ||
1462 | 930 | "deleteOCICredentials") | ||
1463 | 931 | |||
1464 | 932 | self.assertEqual(200, resp.status) | ||
1465 | 933 | with person_logged_in(self.person): | ||
1466 | 934 | self.assertIsNone(distro.oci_registry_credentials) | ||
1467 | diff --git a/lib/lp/registry/tests/test_personmerge.py b/lib/lp/registry/tests/test_personmerge.py | |||
1468 | index a1c00be..5c75436 100644 | |||
1469 | --- a/lib/lp/registry/tests/test_personmerge.py | |||
1470 | +++ b/lib/lp/registry/tests/test_personmerge.py | |||
1471 | @@ -719,8 +719,8 @@ class TestMergePeople(TestCaseWithFactory, KarmaTestMixin): | |||
1472 | 719 | self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: 'on'})) | 719 | self.useFixture(FeatureFixture({OCI_RECIPE_ALLOW_CREATE: 'on'})) |
1473 | 720 | duplicate = self.factory.makePerson() | 720 | duplicate = self.factory.makePerson() |
1474 | 721 | mergee = self.factory.makePerson() | 721 | mergee = self.factory.makePerson() |
1477 | 722 | [ref] = self.factory.makeGitRefs(paths=['refs/heads/v1.0-20.04']) | 722 | [ref] = self.factory.makeGitRefs(paths=[u'refs/heads/v1.0-20.04']) |
1478 | 723 | [ref2] = self.factory.makeGitRefs(paths=['refs/heads/v1.0-20.04']) | 723 | [ref2] = self.factory.makeGitRefs(paths=[u'refs/heads/v1.0-20.04']) |
1479 | 724 | self.factory.makeOCIRecipe( | 724 | self.factory.makeOCIRecipe( |
1480 | 725 | registrant=duplicate, owner=duplicate, name=u'foo', git_ref=ref) | 725 | registrant=duplicate, owner=duplicate, name=u'foo', git_ref=ref) |
1481 | 726 | self.factory.makeOCIRecipe( | 726 | self.factory.makeOCIRecipe( |
1482 | diff --git a/lib/lp/services/scripts/base.py b/lib/lp/services/scripts/base.py | |||
1483 | index bb4490b..66ebdbf 100644 | |||
1484 | --- a/lib/lp/services/scripts/base.py | |||
1485 | +++ b/lib/lp/services/scripts/base.py | |||
1486 | @@ -406,10 +406,6 @@ class LaunchpadCronScript(LaunchpadScript): | |||
1487 | 406 | oops_hdlr = OopsHandler(self.name, logger=self.logger) | 406 | oops_hdlr = OopsHandler(self.name, logger=self.logger) |
1488 | 407 | logging.getLogger().addHandler(oops_hdlr) | 407 | logging.getLogger().addHandler(oops_hdlr) |
1489 | 408 | 408 | ||
1490 | 409 | def get_last_activity(self): | ||
1491 | 410 | """Return the last activity, if any.""" | ||
1492 | 411 | return getUtility(IScriptActivitySet).getLastActivity(self.name) | ||
1493 | 412 | |||
1494 | 413 | @log_unhandled_exception_and_exit | 409 | @log_unhandled_exception_and_exit |
1495 | 414 | def record_activity(self, date_started, date_completed): | 410 | def record_activity(self, date_started, date_completed): |
1496 | 415 | """Record the successful completion of the script.""" | 411 | """Record the successful completion of the script.""" |
1497 | diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py | |||
1498 | index 58dc398..1a108b0 100644 | |||
1499 | --- a/lib/lp/snappy/model/snap.py | |||
1500 | +++ b/lib/lp/snappy/model/snap.py | |||
1501 | @@ -1187,13 +1187,6 @@ class Snap(Storm, WebhookTargetMixin): | |||
1502 | 1187 | person.is_team and | 1187 | person.is_team and |
1503 | 1188 | person.anyone_can_join()) | 1188 | person.anyone_can_join()) |
1504 | 1189 | 1189 | ||
1505 | 1190 | @property | ||
1506 | 1191 | def subscribers(self): | ||
1507 | 1192 | return Store.of(self).find( | ||
1508 | 1193 | Person, | ||
1509 | 1194 | SnapSubscription.person_id == Person.id, | ||
1510 | 1195 | SnapSubscription.snap == self) | ||
1511 | 1196 | |||
1512 | 1197 | def subscribe(self, person, subscribed_by, ignore_permissions=False): | 1190 | def subscribe(self, person, subscribed_by, ignore_permissions=False): |
1513 | 1198 | """See `ISnap`.""" | 1191 | """See `ISnap`.""" |
1514 | 1199 | if not self.userCanBeSubscribed(person): | 1192 | if not self.userCanBeSubscribed(person): |
1515 | diff --git a/lib/lp/soyuz/scripts/expire_archive_files.py b/lib/lp/soyuz/scripts/expire_archive_files.py | |||
1516 | index ade45d5..7ae54e8 100755 | |||
1517 | --- a/lib/lp/soyuz/scripts/expire_archive_files.py | |||
1518 | +++ b/lib/lp/soyuz/scripts/expire_archive_files.py | |||
1519 | @@ -49,6 +49,9 @@ netbook-remix-team | |||
1520 | 49 | netbook-team | 49 | netbook-team |
1521 | 50 | oem-solutions-group | 50 | oem-solutions-group |
1522 | 51 | payson | 51 | payson |
1523 | 52 | snappy-dev/edge | ||
1524 | 53 | snappy-dev/image | ||
1525 | 54 | snappy-dev/tools | ||
1526 | 52 | transyl | 55 | transyl |
1527 | 53 | ubuntu-cloud-archive | 56 | ubuntu-cloud-archive |
1528 | 54 | ubuntu-mobile | 57 | ubuntu-mobile |
1529 | diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py | |||
1530 | index 1060fa9..541ef56 100644 | |||
1531 | --- a/lib/lp/testing/layers.py | |||
1532 | +++ b/lib/lp/testing/layers.py | |||
1533 | @@ -1899,6 +1899,24 @@ class CeleryJobLayer(AppServerLayer): | |||
1534 | 1899 | cls.celery_worker = None | 1899 | cls.celery_worker = None |
1535 | 1900 | 1900 | ||
1536 | 1901 | 1901 | ||
1537 | 1902 | class CelerySlowJobLayer(AppServerLayer): | ||
1538 | 1903 | """Layer for tests that run jobs via Celery.""" | ||
1539 | 1904 | |||
1540 | 1905 | celery_worker = None | ||
1541 | 1906 | |||
1542 | 1907 | @classmethod | ||
1543 | 1908 | @profiled | ||
1544 | 1909 | def setUp(cls): | ||
1545 | 1910 | cls.celery_worker = celery_worker('launchpad_job_slow') | ||
1546 | 1911 | cls.celery_worker.__enter__() | ||
1547 | 1912 | |||
1548 | 1913 | @classmethod | ||
1549 | 1914 | @profiled | ||
1550 | 1915 | def tearDown(cls): | ||
1551 | 1916 | cls.celery_worker.__exit__(None, None, None) | ||
1552 | 1917 | cls.celery_worker = None | ||
1553 | 1918 | |||
1554 | 1919 | |||
1555 | 1902 | class CeleryBzrsyncdJobLayer(AppServerLayer): | 1920 | class CeleryBzrsyncdJobLayer(AppServerLayer): |
1556 | 1903 | """Layer for tests that run jobs that read from branches via Celery.""" | 1921 | """Layer for tests that run jobs that read from branches via Celery.""" |
1557 | 1904 | 1922 | ||
1558 | diff --git a/utilities/manage-celery-workers.sh b/utilities/manage-celery-workers.sh | |||
1559 | 1905 | new file mode 100755 | 1923 | new file mode 100755 |
1560 | index 0000000..f83b14f | |||
1561 | --- /dev/null | |||
1562 | +++ b/utilities/manage-celery-workers.sh | |||
1563 | @@ -0,0 +1,58 @@ | |||
1564 | 1 | #!/bin/sh | ||
1565 | 2 | |||
1566 | 3 | # Used for dev and dogfood, do not use in a production like environment. | ||
1567 | 4 | |||
1568 | 5 | start_worker() { | ||
1569 | 6 | # Start a worker for a given queue | ||
1570 | 7 | queue=$1 | ||
1571 | 8 | echo "Starting worker for $queue" | ||
1572 | 9 | start-stop-daemon \ | ||
1573 | 10 | --start --oknodo --quiet --background \ | ||
1574 | 11 | --pidfile "/var/tmp/celeryd-$queue.pid" --make-pidfile \ | ||
1575 | 12 | --startas "$PWD/bin/celery" -- worker \ | ||
1576 | 13 | --queues="$queue"\ | ||
1577 | 14 | --config=lp.services.job.celeryconfig \ | ||
1578 | 15 | --hostname="$queue@%n" \ | ||
1579 | 16 | --loglevel=DEBUG \ | ||
1580 | 17 | --logfile="/var/tmp/celeryd-$queue.log" | ||
1581 | 18 | |||
1582 | 19 | } | ||
1583 | 20 | |||
1584 | 21 | stop_worker() { | ||
1585 | 22 | queue=$1 | ||
1586 | 23 | echo "Stopping worker for $queue" | ||
1587 | 24 | start-stop-daemon --oknodo --stop --pidfile "/var/tmp/celeryd-$queue.pid" | ||
1588 | 25 | } | ||
1589 | 26 | |||
1590 | 27 | case "$1" in | ||
1591 | 28 | start) | ||
1592 | 29 | for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat | ||
1593 | 30 | do | ||
1594 | 31 | start_worker $queue | ||
1595 | 32 | done | ||
1596 | 33 | ;; | ||
1597 | 34 | stop) | ||
1598 | 35 | for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat | ||
1599 | 36 | do | ||
1600 | 37 | stop_worker $queue | ||
1601 | 38 | done | ||
1602 | 39 | ;; | ||
1603 | 40 | |||
1604 | 41 | restart|force-reload) | ||
1605 | 42 | for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat | ||
1606 | 43 | do | ||
1607 | 44 | stop_worker $queue | ||
1608 | 45 | done | ||
1609 | 46 | sleep 1 | ||
1610 | 47 | for queue in launchpad_job launchpad_job_slow bzrsyncd_job bzrsyncd_job_slow branch_write_job branch_write_job_slow celerybeat | ||
1611 | 48 | do | ||
1612 | 49 | start_worker $queue | ||
1613 | 50 | done | ||
1614 | 51 | echo "$NAME." | ||
1615 | 52 | ;; | ||
1616 | 53 | *) | ||
1617 | 54 | N=/etc/init.d/$NAME | ||
1618 | 55 | echo "Usage: $N {start|stop|restart|force-reload}" >&2 | ||
1619 | 56 | exit 1 | ||
1620 | 57 | ;; | ||
1621 | 58 | esac |