Merge lp:~jtv/maas/1.5-isolation into lp:~maas-committers/maas/trunk
- 1.5-isolation
- Merge into trunk
Proposed by
Jeroen T. Vermeulen
Status: | Superseded | ||||
---|---|---|---|---|---|
Proposed branch: | lp:~jtv/maas/1.5-isolation | ||||
Merge into: | lp:~maas-committers/maas/trunk | ||||
Diff against target: |
1315 lines (+716/-221) (has conflicts) 20 files modified
docs/conf.py (+53/-16) docs/hardware-enablement-kernels.rst (+96/-0) docs/installing-ubuntu.rst (+88/-0) etc/maas/templates/commissioning-user-data/snippets/maas_ipmi_autodetect.py (+8/-6) src/maascli/api.py (+1/-0) src/maasserver/api.py (+3/-3) src/maasserver/migrations/0072_remove_ipmi_autodetect.py (+9/-0) src/maasserver/models/node.py (+2/-42) src/maasserver/models/tests/test_node.py (+2/-117) src/maasserver/support/pertenant/tests/test_migration.py (+5/-9) src/maasserver/testing/factory.py (+2/-2) src/maasserver/tests/test_api_node.py (+4/-2) src/maasserver/views/nodes.py (+5/-0) src/maasserver/views/tests/test_clusters.py (+5/-5) src/maasserver/views/tests/test_general.py (+2/-1) src/maasserver/views/tests/test_nodes.py (+13/-4) src/provisioningserver/custom_hardware/seamicro.py (+9/-0) src/provisioningserver/import_images/boot_resources.py (+138/-0) src/provisioningserver/import_images/tests/test_boot_resources.py (+256/-14) src/provisioningserver/tests/test_tasks.py (+15/-0) Text conflict in docs/conf.py Text conflict in docs/hardware-enablement-kernels.rst Text conflict in docs/installing-ubuntu.rst Text conflict in src/maasserver/migrations/0072_remove_ipmi_autodetect.py Text conflict in src/maasserver/views/nodes.py Text conflict in src/maasserver/views/tests/test_nodes.py Text conflict in src/provisioningserver/custom_hardware/seamicro.py Text conflict in src/provisioningserver/import_images/boot_resources.py Text conflict in src/provisioningserver/import_images/tests/test_boot_resources.py Text conflict in src/provisioningserver/tests/test_tasks.py |
||||
To merge this branch: | bzr merge lp:~jtv/maas/1.5-isolation | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
MAAS Maintainers | Pending | ||
Review via email: mp+218713@code.launchpad.net |
This proposal has been superseded by a proposal from 2014-05-07.
Commit message
Backport trunk r2306: Raise isolation level from the default Read Committed to Serializable. Django 1.6 added support. In situations where previously you'd get potentially incorrect updates, you will now get an OperationalError.
Description of the change
Trivial backport. Will self-approve.
Jeroen
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'docs/conf.py' |
2 | --- docs/conf.py 2014-04-17 09:13:06 +0000 |
3 | +++ docs/conf.py 2014-05-07 22:25:58 +0000 |
4 | @@ -61,6 +61,23 @@ |
5 | |
6 | # -- General configuration ----------------------------------------------------- |
7 | |
8 | +# Add a widget to switch between different versions of the documentation to |
9 | +# each generated page. |
10 | +add_version_switcher = False |
11 | + |
12 | +# In order for the version widget to be able to redirect correctly to the |
13 | +# other versions of the documentation, each version of the documentation |
14 | +# has to be accessible at the following addresses: |
15 | +# /<doc_prefix>/ -> documentation for trunk. |
16 | +# /<doc_prefix>1.4/ -> documentation for 1.4. |
17 | +# etc. |
18 | +doc_prefix = 'docs' |
19 | + |
20 | +# Path of the JSON document, relative to homepage of the documentation for trunk |
21 | +# (i.e. '/<doc_prefix>/'), with the list of the versions to include in the |
22 | +# version switcher widget. |
23 | +versions_path = '_static/versions.js' |
24 | + |
25 | # If your documentation needs a minimal Sphinx version, state it here. |
26 | #needs_sphinx = '1.0' |
27 | |
28 | @@ -280,19 +297,39 @@ |
29 | |
30 | # Example configuration for intersphinx: refer to the Python standard library. |
31 | intersphinx_mapping = {'http://docs.python.org/': None} |
32 | - |
33 | -# Gather information about the branch and the build date. |
34 | -from subprocess import check_output |
35 | -bzr_last_revision_number = check_output(['bzr', 'revno']) |
36 | -bzr_last_revision_date = check_output(['bzr', 'version-info', '--template={date}', '--custom']) |
37 | -bzr_build_date = check_output(['bzr', 'version-info', '--template={build_date}', '--custom']) |
38 | - |
39 | -# Populate html_context with the variables used in the templates. |
40 | -html_context = { |
41 | - 'add_version_switcher': 'true' if add_version_switcher else 'false', |
42 | - 'versions_json_path': '/'.join(['', doc_prefix, versions_path]), |
43 | - 'doc_prefix': doc_prefix, |
44 | - 'bzr_last_revision_date': bzr_last_revision_date, |
45 | - 'bzr_last_revision_number': bzr_last_revision_number, |
46 | - 'bzr_build_date': bzr_build_date, |
47 | -} |
48 | +<<<<<<< TREE |
49 | + |
50 | +# Gather information about the branch and the build date. |
51 | +from subprocess import check_output |
52 | +bzr_last_revision_number = check_output(['bzr', 'revno']) |
53 | +bzr_last_revision_date = check_output(['bzr', 'version-info', '--template={date}', '--custom']) |
54 | +bzr_build_date = check_output(['bzr', 'version-info', '--template={build_date}', '--custom']) |
55 | + |
56 | +# Populate html_context with the variables used in the templates. |
57 | +html_context = { |
58 | + 'add_version_switcher': 'true' if add_version_switcher else 'false', |
59 | + 'versions_json_path': '/'.join(['', doc_prefix, versions_path]), |
60 | + 'doc_prefix': doc_prefix, |
61 | + 'bzr_last_revision_date': bzr_last_revision_date, |
62 | + 'bzr_last_revision_number': bzr_last_revision_number, |
63 | + 'bzr_build_date': bzr_build_date, |
64 | +} |
65 | +======= |
66 | + |
67 | +# Gather information about the branch and the build date. |
68 | +from subprocess import check_output |
69 | +bzr_last_revision_number = check_output(['bzr', 'revno']) |
70 | +bzr_last_revision_date = check_output(['bzr', 'version-info', '--template={date}', '--custom']) |
71 | +bzr_build_date = check_output(['bzr', 'version-info', '--template={build_date}', '--custom']) |
72 | + |
73 | +# Populate html_context with the variables used in the templates. |
74 | +html_context = { |
75 | + 'add_version_switcher': 'true' if add_version_switcher else 'false', |
76 | + 'versions_json_path': '/'.join(['', doc_prefix, versions_path]), |
77 | + 'doc_prefix': doc_prefix, |
78 | + 'bzr_last_revision_date': bzr_last_revision_date, |
79 | + 'bzr_last_revision_number': bzr_last_revision_number, |
80 | + 'bzr_build_date': bzr_build_date, |
81 | +} |
82 | + |
83 | +>>>>>>> MERGE-SOURCE |
84 | |
85 | === modified file 'docs/hardware-enablement-kernels.rst' |
86 | --- docs/hardware-enablement-kernels.rst 2014-04-14 21:42:00 +0000 |
87 | +++ docs/hardware-enablement-kernels.rst 2014-05-07 22:25:58 +0000 |
88 | @@ -1,3 +1,4 @@ |
89 | +<<<<<<< TREE |
90 | .. -*- mode: rst -*- |
91 | |
92 | .. _hardware-enablement-kernels: |
93 | @@ -91,3 +92,98 @@ |
94 | the Node's page and clicking ``Edit node``. Under the Architecture field, |
95 | you will be able to select any HWE kernels that have been imported onto |
96 | that node's cluster controller. |
97 | +======= |
98 | +.. -*- mode: rst -*- |
99 | + |
100 | +.. _hardware-enablement-kernels: |
101 | + |
102 | +================================= |
103 | +Using hardware-enablement kernels |
104 | +================================= |
105 | + |
106 | +.. note:: |
107 | + |
108 | + This feature is available in MAAS versions 1.5 and above. |
109 | + |
110 | +MAAS allows you to use hardware enablement kernels when booting nodes |
111 | +that require them. |
112 | + |
113 | +What are hardware-enablement kernels? |
114 | +------------------------------------- |
115 | + |
116 | +Brand new hardware gets released all the time. We want that hardware to |
117 | +work well wih Ubuntu and MAAS, even if it was released after the latest |
118 | +release of MAAS or Ubuntu. Hardware Enablement (HWE) is all about making |
119 | +keeping pace with the new hardware. |
120 | + |
121 | +Ubuntu's solution to this is to offer newer kernels for older releases. |
122 | +There are at least two kernels on offer for Ubuntu releases: the |
123 | +"generic" kernel -- i.e. the kernel released with the current series -- |
124 | +and the Hardware Enablement kernel, which is the most recent kernel |
125 | +release. |
126 | + |
127 | +There are separate HWE kernels for each release of Ubuntu, referred to |
128 | +as ``hwe-<release letter>``. So, the 14.04 / Trusty Tahr HWE kernel is |
129 | +called ``hwe-t``, the 12.10 / Quantal Quetzal HWE kernel is called |
130 | +``hwe-q`` and so on. This allows you to use newer kernels with older |
131 | +releases, for example running Precise with a Saucy (hwe-s) kernel. |
132 | + |
133 | +For more information see the `LTS Enablement Stack`_ page on the Ubuntu |
134 | +wiki. |
135 | + |
136 | +.. _LTS Enablement Stack: |
137 | + https://wiki.ubuntu.com/Kernel/LTSEnablementStack |
138 | + |
139 | +Importing hardware-enablement kernels |
140 | +------------------------------------- |
141 | + |
142 | +Hardware-enablement kernels need to be imported to a cluster controller |
143 | +before that cluster's nodes can use them. |
144 | + |
145 | +In order to import HWE kernels to a cluster controller you need to edit |
146 | +the controller's ``/etc/maas/bootresources.yaml`` file, and update the |
147 | +subarches that you want to import, like this:: |
148 | + |
149 | + boot: |
150 | + storage: "/var/lib/maas/boot-resources/" |
151 | + |
152 | + sources: |
153 | + - path: "http://maas.ubuntu.com/images/ephemeral-v2/releases/" |
154 | + keyring: "/usr/share/keyrings/ubuntu-cloudimage-keyring.gpg" |
155 | + selections: |
156 | + - release: "precise" |
157 | + arches: ["i386", "amd64"] |
158 | + subarches: ["generic", "hwe-q", "hwe-r", "hwe-s", "hwe-t"] |
159 | + labels: ["release"] |
160 | + |
161 | +Once you've updated ``bootresources.yaml``, you can tell the cluster to |
162 | +re-import its boot images using the ``maas`` command (You will need to |
163 | +:ref:`be logged in to the API first <api-key>`):: |
164 | + |
165 | + $ maas <profile-name> node-group import-boot-images \ |
166 | + <cluster-controller-uuid> |
167 | + |
168 | +You can also tell the cluster controller to re-import its boot images by |
169 | +clicking the ``Import boot images`` button in the ``Clusters`` page of |
170 | +the MAAS web UI. |
171 | + |
172 | +Using hardware-enablement kernels in MAAS |
173 | +----------------------------------------- |
174 | + |
175 | +A MAAS administrator can choose to use HWE kernels on a per-node basis |
176 | +in MAAS. |
177 | + |
178 | +The quickest way to do this is using the MAAS command, like this:: |
179 | + |
180 | + $ maas <profile-name> node update <system-id> |
181 | + architecture=amd64/hwe-t |
182 | + |
183 | +If you specify an architecture that doesn't exist (e.g. |
184 | +``amd64/hwe-z``), the ``maas`` command will return an error. |
185 | + |
186 | + |
187 | +It's also possible to use HWE kernels from the MAAS web UI, by visiting |
188 | +the Node's page and clicking ``Edit node``. Under the Architecture field, |
189 | +you will be able to select any HWE kernels that have been imported onto |
190 | +that node's cluster controller. |
191 | +>>>>>>> MERGE-SOURCE |
192 | |
193 | === modified file 'docs/installing-ubuntu.rst' |
194 | --- docs/installing-ubuntu.rst 2014-05-05 16:20:18 +0000 |
195 | +++ docs/installing-ubuntu.rst 2014-05-07 22:25:58 +0000 |
196 | @@ -1,3 +1,4 @@ |
197 | +<<<<<<< TREE |
198 | ===================================== |
199 | Installing Ubuntu and deploying nodes |
200 | ===================================== |
201 | @@ -80,3 +81,90 @@ |
202 | on Launchpad. |
203 | |
204 | .. _curtin project: https://launchpad.net/curtin |
205 | +======= |
206 | +===================================== |
207 | +Installing Ubuntu and deploying nodes |
208 | +===================================== |
209 | + |
210 | +Once a node has been accepted into MAAS and is ready for use, users can |
211 | +deploy services to that node. |
212 | + |
213 | +Prior to deployment, MAAS is responsible for: |
214 | + |
215 | +1. Powering up the node. |
216 | +2. Installing Ubuntu on the node. |
217 | +3. Installing the user's SSH keys on the node. |
218 | + |
219 | +Once these steps have been completed, the node is ready to have services |
220 | +deployed to it, either manually or by using a tool like Juju_. |
221 | + |
222 | +There are two ways to install Ubuntu on a node: |
223 | + |
224 | +1. :ref:`The default installer <default-installer>`. |
225 | +2. :ref:`The fast installer <fast-installer>`. |
226 | + |
227 | +.. _Juju: http://juju.ubuntu.com |
228 | + |
229 | +.. _default-installer: |
230 | + |
231 | +The default installer |
232 | +---------------------- |
233 | + |
234 | +The default installer installs Ubuntu on a node in exactly the same way |
235 | +as you would install it manually: using the `Debian Installer`_. |
236 | +Installation is handled by the Debian installer. Answers to the |
237 | +questions asked by the installer are provided in a 'preseed' file. For |
238 | +more information on preseed files, see the :ref:`Additional |
239 | +Configuration <preseed>` page. |
240 | + |
241 | +As the name suggests, the default installer is enabled by default for |
242 | +all new nodes. To enable the default installer for a node that's been |
243 | +configured to use the fast installer, visit the node's page as an |
244 | +administrator and click the ``Use the default installer`` button. |
245 | + |
246 | +.. image:: media/node-page-use-default-installer.png |
247 | + |
248 | +To set multiple nodes to use the default installer, select the ``Mark |
249 | +nodes as using the default installer`` option from the bulk action menu |
250 | +on the ``Nodes`` page in the MAAS web UI. |
251 | + |
252 | +Because it installs Ubuntu from scratch, downloading packages as |
253 | +required, the default installer is slower than the :ref:`fast installer |
254 | +<fast-installer>`. |
255 | + |
256 | +.. _Debian Installer: http://www.debian.org/devel/debian-installer/ |
257 | + |
258 | +.. _fast-installer: |
259 | + |
260 | +The fast installer |
261 | +------------------ |
262 | + |
263 | +The fast installer is, as the name suggests, a means of installing |
264 | +Ubuntu on a node more quickly than would be possible using the |
265 | +:ref:`default installer <default-installer>`. |
266 | + |
267 | +To enable the fast installer for a node, visit the node's page as an |
268 | +administrator and click the ``Use the fast installer`` button. |
269 | + |
270 | +.. image:: media/node-page-use-fast-installer.png |
271 | + |
272 | +To set multiple nodes to use the fast installer, select the ``Mark nodes |
273 | +as using the fase installer`` option from the bulk action menu on the |
274 | +``Nodes`` page in the MAAS web UI. |
275 | + |
276 | +The fast installer copies a pre-built Ubuntu image to the node, with all |
277 | +the packages installed that would be normally found in an Ubuntu |
278 | +installation. |
279 | + |
280 | +The fast installer is much quicker than the default installer, but has |
281 | +the disadvantage that it's less easy to configure a node at install |
282 | +time, since the fast installer doesn't use a :ref:`preseed file |
283 | +<preseed>`. In addition, the packages that are initially installed on a |
284 | +fast-installed node need updating manually, since they are part of the |
285 | +installation image and not downloaded fresh from an apt repository. |
286 | + |
287 | +For more information about the fast installer, see the `curtin project`_ |
288 | +on Launchpad. |
289 | + |
290 | +.. _curtin project: https://launchpad.net/curtin |
291 | +>>>>>>> MERGE-SOURCE |
292 | |
293 | === modified file 'etc/maas/templates/commissioning-user-data/snippets/maas_ipmi_autodetect.py' |
294 | --- etc/maas/templates/commissioning-user-data/snippets/maas_ipmi_autodetect.py 2014-04-04 06:46:05 +0000 |
295 | +++ etc/maas/templates/commissioning-user-data/snippets/maas_ipmi_autodetect.py 2014-05-07 22:25:58 +0000 |
296 | @@ -165,10 +165,12 @@ |
297 | |
298 | for key, expected_value in user_settings.iteritems(): |
299 | # Password isn't included in checkout. |
300 | - if key != 'Password': |
301 | - value = bmc_user_get(user_number, key) |
302 | - if value != expected_value: |
303 | - bad_values[key] = value |
304 | + if key == 'Password': |
305 | + continue |
306 | + |
307 | + actual_value = bmc_user_get(user_number, key) |
308 | + if expected_value != actual_value: |
309 | + bad_values[key] = actual_value |
310 | |
311 | if len(bad_values) == 0: |
312 | return |
313 | @@ -177,8 +179,8 @@ |
314 | "for '%s', expected '%s', actual '%s';" % ( |
315 | key, user_settings[key], actual_value) |
316 | for key, actual_value in bad_values.iteritems() |
317 | - ]).rstrip(';') |
318 | - message = "IPMI user setting verification failures: %s." % (errors_string) |
319 | + ]).rstrip(';') |
320 | + message = 'IPMI user setting verification failures: %s.' % (errors_string) |
321 | raise IPMIError(message) |
322 | |
323 | |
324 | |
325 | === modified file 'etc/maas/templates/power/amt.template' |
326 | === modified file 'src/maascli/api.py' |
327 | --- src/maascli/api.py 2014-04-04 06:46:05 +0000 |
328 | +++ src/maascli/api.py 2014-05-07 22:25:58 +0000 |
329 | @@ -21,6 +21,7 @@ |
330 | from email.message import Message |
331 | from functools import partial |
332 | import httplib |
333 | +from itertools import chain |
334 | import json |
335 | from operator import itemgetter |
336 | import re |
337 | |
338 | === modified file 'src/maasserver/api.py' |
339 | --- src/maasserver/api.py 2014-05-07 02:44:10 +0000 |
340 | +++ src/maasserver/api.py 2014-05-07 22:25:58 +0000 |
341 | @@ -2310,10 +2310,10 @@ |
342 | line(doc.handler.__doc__) |
343 | line() |
344 | line() |
345 | - for (http_method, op), function in sorted(exports): |
346 | + for (http_method, operation), function in sorted(exports): |
347 | line("``%s %s``" % (http_method, uri_template), end="") |
348 | - if op is not None: |
349 | - line(" ``op=%s``" % op) |
350 | + if operation is not None: |
351 | + line(" ``op=%s``" % operation) |
352 | line() |
353 | docstring = getdoc(function) |
354 | if docstring is not None: |
355 | |
356 | === modified file 'src/maasserver/forms.py' |
357 | === modified file 'src/maasserver/migrations/0072_remove_ipmi_autodetect.py' |
358 | --- src/maasserver/migrations/0072_remove_ipmi_autodetect.py 2014-04-10 11:13:41 +0000 |
359 | +++ src/maasserver/migrations/0072_remove_ipmi_autodetect.py 2014-05-07 22:25:58 +0000 |
360 | @@ -1,7 +1,16 @@ |
361 | +<<<<<<< TREE |
362 | from django.db.utils import ProgrammingError |
363 | from maasserver import logger |
364 | +======= |
365 | +>>>>>>> MERGE-SOURCE |
366 | from provisioningserver.power_schema import IPMI_DRIVER |
367 | +<<<<<<< TREE |
368 | +======= |
369 | +# -*- coding: utf-8 -*- |
370 | +>>>>>>> MERGE-SOURCE |
371 | from south.v2 import DataMigration |
372 | +from django.db.utils import ProgrammingError |
373 | +from maasserver import logger |
374 | |
375 | |
376 | class Migration(DataMigration): |
377 | |
378 | === modified file 'src/maasserver/models/node.py' |
379 | --- src/maasserver/models/node.py 2014-05-07 02:44:10 +0000 |
380 | +++ src/maasserver/models/node.py 2014-05-07 22:25:58 +0000 |
381 | @@ -23,7 +23,6 @@ |
382 | repeat, |
383 | ) |
384 | import random |
385 | -import re |
386 | from string import whitespace |
387 | from uuid import uuid1 |
388 | |
389 | @@ -153,43 +152,6 @@ |
390 | """Raised when a node has an unknown power type.""" |
391 | |
392 | |
393 | -def validate_hostname(hostname): |
394 | - """Validator for hostnames. |
395 | - |
396 | - :param hostname: Input value for a host name. May include domain. |
397 | - :raise ValidationError: If the hostname is not valid according to RFCs 952 |
398 | - and 1123. |
399 | - """ |
400 | - # Valid characters within a hostname label: ASCII letters, ASCII digits, |
401 | - # hyphens, and underscores. Not all are always valid. |
402 | - # Technically we could write all of this as a single regex, but it's not |
403 | - # very good for code maintenance. |
404 | - label_chars = re.compile('[a-zA-Z0-9_-]*$') |
405 | - |
406 | - if len(hostname) > 255: |
407 | - raise ValidationError( |
408 | - "Hostname is too long. Maximum allowed is 255 characters.") |
409 | - # A hostname consists of "labels" separated by dots. |
410 | - labels = hostname.split('.') |
411 | - if '_' in labels[0]: |
412 | - # The host label cannot contain underscores; the rest of the name can. |
413 | - raise ValidationError( |
414 | - "Host label cannot contain underscore: %r." % labels[0]) |
415 | - for label in labels: |
416 | - if len(label) == 0: |
417 | - raise ValidationError("Hostname contains empty name.") |
418 | - if len(label) > 63: |
419 | - raise ValidationError( |
420 | - "Name is too long: %r. Maximum allowed is 63 characters." |
421 | - % label) |
422 | - if label.startswith('-') or label.endswith('-'): |
423 | - raise ValidationError( |
424 | - "Name cannot start or end with hyphen: %r." % label) |
425 | - if not label_chars.match(label): |
426 | - raise ValidationError( |
427 | - "Name contains disallowed characters: %r." % label) |
428 | - |
429 | - |
430 | class NodeManager(Manager): |
431 | """A utility to manage the collection of Nodes.""" |
432 | |
433 | @@ -446,7 +408,7 @@ |
434 | |
435 | :ivar system_id: The unique identifier for this `Node`. |
436 | (e.g. 'node-41eba45e-4cfa-11e1-a052-00225f89f211'). |
437 | - :ivar hostname: This `Node`'s hostname. Must conform to RFCs 952 and 1123. |
438 | + :ivar hostname: This `Node`'s hostname. |
439 | :ivar status: This `Node`'s status. See the vocabulary |
440 | :class:`NODE_STATUS`. |
441 | :ivar owner: This `Node`'s owner if it's in use, None otherwise. |
442 | @@ -466,9 +428,7 @@ |
443 | max_length=41, unique=True, default=generate_node_system_id, |
444 | editable=False) |
445 | |
446 | - hostname = CharField( |
447 | - max_length=255, default='', blank=True, unique=True, |
448 | - validators=[validate_hostname]) |
449 | + hostname = CharField(max_length=255, default='', blank=True, unique=True) |
450 | |
451 | status = IntegerField( |
452 | max_length=10, choices=NODE_STATUS_CHOICES, editable=False, |
453 | |
454 | === modified file 'src/maasserver/models/tests/test_node.py' |
455 | --- src/maasserver/models/tests/test_node.py 2014-05-07 02:44:10 +0000 |
456 | +++ src/maasserver/models/tests/test_node.py 2014-05-07 22:25:58 +0000 |
457 | @@ -40,7 +40,6 @@ |
458 | from maasserver.models.node import ( |
459 | generate_hostname, |
460 | NODE_TRANSITIONS, |
461 | - validate_hostname, |
462 | ) |
463 | from maasserver.models.user import create_auth_token |
464 | from maasserver.testing import reload_object |
465 | @@ -84,114 +83,6 @@ |
466 | self.assertEqual(sizes, [len(hostname) for hostname in hostnames]) |
467 | |
468 | |
469 | -class TestHostnameValidator(MAASTestCase): |
470 | - """Tests for the validation of hostnames. |
471 | - |
472 | - Specifications based on: |
473 | - http://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names |
474 | - |
475 | - This does not support Internationalized Domain Names. To do so, we'd have |
476 | - to accept and store unicode, but use the Punycode-encoded version. The |
477 | - validator would have to validate both versions: the unicode input for |
478 | - invalid characters, and the encoded version for length. |
479 | - """ |
480 | - def make_maximum_hostname(self): |
481 | - """Create a hostname of the maximum permitted length. |
482 | - |
483 | - The maximum permitted length is 255 characters. The last label in the |
484 | - hostname will not be of the maximum length, so tests can still append a |
485 | - character to it without creating an invalid label. |
486 | - |
487 | - The hostname is not randomised, so do not count on it being unique. |
488 | - """ |
489 | - # A hostname may contain any number of labels, separated by dots. |
490 | - # Each of the labels has a maximum length of 63 characters, so this has |
491 | - # to be built up from multiple labels. |
492 | - ten_chars = ('a' * 9) + '.' |
493 | - hostname = ten_chars * 25 + ('b' * 5) |
494 | - self.assertEqual(255, len(hostname)) |
495 | - return hostname |
496 | - |
497 | - def assertAccepts(self, hostname): |
498 | - """Assertion: the validator accepts `hostname`.""" |
499 | - try: |
500 | - validate_hostname(hostname) |
501 | - except ValidationError as e: |
502 | - raise AssertionError(unicode(e)) |
503 | - |
504 | - def assertRejects(self, hostname): |
505 | - """Assertion: the validator rejects `hostname`.""" |
506 | - self.assertRaises(ValidationError, validate_hostname, hostname) |
507 | - |
508 | - def test_accepts_ascii_letters(self): |
509 | - self.assertAccepts('abcde') |
510 | - |
511 | - def test_accepts_dots(self): |
512 | - self.assertAccepts('abc.def') |
513 | - |
514 | - def test_rejects_adjacent_dots(self): |
515 | - self.assertRejects('abc..def') |
516 | - |
517 | - def test_rejects_leading_dot(self): |
518 | - self.assertRejects('.abc') |
519 | - |
520 | - def test_rejects_trailing_dot(self): |
521 | - self.assertRejects('abc.') |
522 | - |
523 | - def test_accepts_ascii_digits(self): |
524 | - self.assertAccepts('abc123') |
525 | - |
526 | - def test_accepts_leading_digits(self): |
527 | - # Leading digits used to be forbidden, but are now allowed. |
528 | - self.assertAccepts('123abc') |
529 | - |
530 | - def test_rejects_whitespace(self): |
531 | - self.assertRejects('a b') |
532 | - self.assertRejects('a\nb') |
533 | - self.assertRejects('a\tb') |
534 | - |
535 | - def test_rejects_other_ascii_characters(self): |
536 | - self.assertRejects('a?b') |
537 | - self.assertRejects('a!b') |
538 | - self.assertRejects('a,b') |
539 | - self.assertRejects('a:b') |
540 | - self.assertRejects('a;b') |
541 | - self.assertRejects('a+b') |
542 | - self.assertRejects('a=b') |
543 | - |
544 | - def test_accepts_underscore_in_domain(self): |
545 | - self.assertAccepts('host.local_domain') |
546 | - |
547 | - def test_rejects_underscore_in_host(self): |
548 | - self.assertRejects('host_name.local') |
549 | - |
550 | - def test_accepts_hyphen(self): |
551 | - self.assertAccepts('a-b') |
552 | - |
553 | - def test_rejects_hyphen_at_start_of_label(self): |
554 | - self.assertRejects('-ab') |
555 | - |
556 | - def test_rejects_hyphen_at_end_of_label(self): |
557 | - self.assertRejects('ab-') |
558 | - |
559 | - def test_accepts_maximum_valid_length(self): |
560 | - self.assertAccepts(self.make_maximum_hostname()) |
561 | - |
562 | - def test_rejects_oversized_hostname(self): |
563 | - self.assertRejects(self.make_maximum_hostname() + 'x') |
564 | - |
565 | - def test_accepts_maximum_label_length(self): |
566 | - self.assertAccepts('a' * 63) |
567 | - |
568 | - def test_rejects_oversized_label(self): |
569 | - self.assertRejects('b' * 64) |
570 | - |
571 | - def test_rejects_nonascii_letter(self): |
572 | - # The \u03be is the Greek letter xi. Perfectly good letter, just not |
573 | - # ASCII. |
574 | - self.assertRejects('\u03be') |
575 | - |
576 | - |
577 | class NodeTest(MAASServerTestCase): |
578 | |
579 | def test_system_id(self): |
580 | @@ -203,12 +94,6 @@ |
581 | self.assertEqual(len(node.system_id), 41) |
582 | self.assertTrue(node.system_id.startswith('node-')) |
583 | |
584 | - def test_hostname_is_validated(self): |
585 | - bad_hostname = '-_?!@*-' |
586 | - self.assertRaises( |
587 | - ValidationError, |
588 | - factory.make_node, hostname=bad_hostname) |
589 | - |
590 | def test_work_queue_returns_nodegroup_uuid(self): |
591 | nodegroup = factory.make_node_group() |
592 | node = factory.make_node(nodegroup=nodegroup) |
593 | @@ -332,14 +217,14 @@ |
594 | Config.objects.set_config("enlistment_domain", '') |
595 | existing_node = factory.make_node(hostname='hostname') |
596 | |
597 | - hostnames = [existing_node.hostname, "new-hostname"] |
598 | + hostnames = [existing_node.hostname, "new_hostname"] |
599 | self.patch( |
600 | node_module, "generate_hostname", |
601 | lambda size: hostnames.pop(0)) |
602 | |
603 | node = factory.make_node() |
604 | node.set_random_hostname() |
605 | - self.assertEqual('new-hostname', node.hostname) |
606 | + self.assertEqual('new_hostname', node.hostname) |
607 | |
608 | def test_get_effective_power_type_raises_if_not_set(self): |
609 | node = factory.make_node(power_type='') |
610 | |
611 | === modified file 'src/maasserver/support/pertenant/tests/test_migration.py' |
612 | --- src/maasserver/support/pertenant/tests/test_migration.py 2014-04-04 06:46:05 +0000 |
613 | +++ src/maasserver/support/pertenant/tests/test_migration.py 2014-05-07 22:25:58 +0000 |
614 | @@ -156,18 +156,14 @@ |
615 | self.assertEqual(get_legacy_user(), get_destination_user()) |
616 | |
617 | def test_get_destination_user_with_user_from_juju_state(self): |
618 | - user = factory.make_user() |
619 | - # Also create another user. |
620 | - factory.make_user() |
621 | - node = factory.make_node(owner=user) |
622 | + user1, user2 = factory.make_user(), factory.make_user() |
623 | + node = factory.make_node(owner=user1) |
624 | make_provider_state_file(node) |
625 | - self.assertEqual(user, get_destination_user()) |
626 | + self.assertEqual(user1, get_destination_user()) |
627 | |
628 | def test_get_destination_user_with_orphaned_juju_state(self): |
629 | - user = factory.make_user() |
630 | - # Also create another user. |
631 | - factory.make_user() |
632 | - node = factory.make_node(owner=user) |
633 | + user1, user2 = factory.make_user(), factory.make_user() |
634 | + node = factory.make_node(owner=user1) |
635 | make_provider_state_file(node) |
636 | node.delete() # Orphan the state. |
637 | self.assertEqual(get_legacy_user(), get_destination_user()) |
638 | |
639 | === modified file 'src/maasserver/testing/factory.py' |
640 | --- src/maasserver/testing/factory.py 2014-05-06 21:44:04 +0000 |
641 | +++ src/maasserver/testing/factory.py 2014-05-07 22:25:58 +0000 |
642 | @@ -648,8 +648,8 @@ |
643 | else: |
644 | vlan_tags = [None] * number |
645 | return [ |
646 | - self.make_network(vlan_tag=tag, **kwargs) |
647 | - for tag in vlan_tags |
648 | + self.make_network(vlan_tag=vlan_tag, **kwargs) |
649 | + for vlan_tag in vlan_tags |
650 | ] |
651 | |
652 | def make_boot_source(self, cluster=None, url=None, |
653 | |
654 | === modified file 'src/maasserver/tests/test_api_node.py' |
655 | --- src/maasserver/tests/test_api_node.py 2014-04-25 18:03:38 +0000 |
656 | +++ src/maasserver/tests/test_api_node.py 2014-05-07 22:25:58 +0000 |
657 | @@ -503,12 +503,14 @@ |
658 | hostname='diane', owner=self.logged_in_user, |
659 | architecture=make_usable_architecture(self)) |
660 | response = self.client_put( |
661 | - self.get_node_uri(node), {'hostname': '.'}) |
662 | + self.get_node_uri(node), {'hostname': 'too long' * 100}) |
663 | parsed_result = json.loads(response.content) |
664 | |
665 | self.assertEqual(httplib.BAD_REQUEST, response.status_code) |
666 | self.assertEqual( |
667 | - {'hostname': ["Hostname contains empty name."]}, |
668 | + {'hostname': |
669 | + ['Ensure this value has at most 255 characters ' |
670 | + '(it has 800).']}, |
671 | parsed_result) |
672 | |
673 | def test_PUT_refuses_to_update_invisible_node(self): |
674 | |
675 | === modified file 'src/maasserver/tests/test_api_nodegroup.py' |
676 | === modified file 'src/maasserver/tests/test_forms.py' |
677 | === modified file 'src/maasserver/views/nodes.py' |
678 | --- src/maasserver/views/nodes.py 2014-04-25 18:03:38 +0000 |
679 | +++ src/maasserver/views/nodes.py 2014-05-07 22:25:58 +0000 |
680 | @@ -24,8 +24,12 @@ |
681 | 'prefetch_nodes_listing', |
682 | ] |
683 | |
684 | +<<<<<<< TREE |
685 | from cgi import escape |
686 | from textwrap import dedent |
687 | +======= |
688 | +from cgi import escape |
689 | +>>>>>>> MERGE-SOURCE |
690 | from urllib import urlencode |
691 | |
692 | from django.conf import settings as django_settings |
693 | @@ -90,6 +94,7 @@ |
694 | ) |
695 | from metadataserver.models import NodeCommissionResult |
696 | from provisioningserver.tags import merge_details_cleanly |
697 | +from textwrap import dedent |
698 | |
699 | |
700 | def get_longpoll_context(): |
701 | |
702 | === modified file 'src/maasserver/views/tests/test_clusters.py' |
703 | --- src/maasserver/views/tests/test_clusters.py 2014-04-04 06:46:05 +0000 |
704 | +++ src/maasserver/views/tests/test_clusters.py 2014-05-07 22:25:58 +0000 |
705 | @@ -262,11 +262,11 @@ |
706 | self.client_log_in(as_admin=True) |
707 | nodegroup = factory.make_node_group() |
708 | interfaces = set() |
709 | - for _ in range(3): |
710 | - interfaces.add( |
711 | - factory.make_node_group_interface( |
712 | - nodegroup=nodegroup, |
713 | - management=NODEGROUPINTERFACE_MANAGEMENT.UNMANAGED)) |
714 | + for i in range(3): |
715 | + interface = factory.make_node_group_interface( |
716 | + nodegroup=nodegroup, |
717 | + management=NODEGROUPINTERFACE_MANAGEMENT.UNMANAGED) |
718 | + interfaces.add(interface) |
719 | links = get_content_links( |
720 | self.client.get(reverse('cluster-edit', args=[nodegroup.uuid]))) |
721 | interface_edit_links = [ |
722 | |
723 | === modified file 'src/maasserver/views/tests/test_general.py' |
724 | --- src/maasserver/views/tests/test_general.py 2014-04-04 06:46:05 +0000 |
725 | +++ src/maasserver/views/tests/test_general.py 2014-05-07 22:25:58 +0000 |
726 | @@ -418,7 +418,8 @@ |
727 | # to display all the errors. |
728 | component = factory.make_name('component') |
729 | error_message = factory.make_name('error') |
730 | - errors.append(Fault(fault, error_message)) |
731 | + error = Fault(fault, error_message) |
732 | + errors.append(error) |
733 | register_persistent_error(component, error_message) |
734 | links = [ |
735 | reverse('index'), |
736 | |
737 | === modified file 'src/maasserver/views/tests/test_nodes.py' |
738 | --- src/maasserver/views/tests/test_nodes.py 2014-04-25 18:03:38 +0000 |
739 | +++ src/maasserver/views/tests/test_nodes.py 2014-05-07 22:25:58 +0000 |
740 | @@ -16,6 +16,7 @@ |
741 | |
742 | from cgi import escape |
743 | import httplib |
744 | +from cgi import escape |
745 | from operator import attrgetter |
746 | import os |
747 | from random import randint |
748 | @@ -30,6 +31,7 @@ |
749 | from lxml.etree import XPath |
750 | from lxml.html import fromstring |
751 | import maasserver.api |
752 | +from maasserver.third_party_drivers import get_third_party_driver |
753 | from maasserver.enum import ( |
754 | NODE_STATUS, |
755 | NODEGROUP_STATUS, |
756 | @@ -68,10 +70,17 @@ |
757 | from maasserver.views import nodes as nodes_views |
758 | from maasserver.views.nodes import message_from_form_stats |
759 | from maastesting.djangotestcase import count_queries |
760 | -from metadataserver.models.commissioningscript import ( |
761 | - LIST_MODALIASES_OUTPUT_NAME, |
762 | - LLDP_OUTPUT_NAME, |
763 | - ) |
764 | +<<<<<<< TREE |
765 | +from metadataserver.models.commissioningscript import ( |
766 | + LIST_MODALIASES_OUTPUT_NAME, |
767 | + LLDP_OUTPUT_NAME, |
768 | + ) |
769 | +======= |
770 | +from metadataserver.models.commissioningscript import ( |
771 | + LLDP_OUTPUT_NAME, |
772 | + LIST_MODALIASES_OUTPUT_NAME, |
773 | + ) |
774 | +>>>>>>> MERGE-SOURCE |
775 | from testtools.matchers import ContainsAll |
776 | |
777 | |
778 | |
779 | === modified file 'src/provisioningserver/custom_hardware/seamicro.py' |
780 | --- src/provisioningserver/custom_hardware/seamicro.py 2014-04-08 20:58:05 +0000 |
781 | +++ src/provisioningserver/custom_hardware/seamicro.py 2014-05-07 22:25:58 +0000 |
782 | @@ -24,8 +24,17 @@ |
783 | import urlparse |
784 | |
785 | import provisioningserver.custom_hardware.utils as utils |
786 | +<<<<<<< TREE |
787 | from seamicroclient import exceptions as seamicro_exceptions |
788 | from seamicroclient.v2 import client as seamicro_client |
789 | +======= |
790 | +from seamicroclient.v2 import ( |
791 | + client as seamicro_client, |
792 | + ) |
793 | +from seamicroclient import ( |
794 | + exceptions as seamicro_exceptions, |
795 | + ) |
796 | +>>>>>>> MERGE-SOURCE |
797 | |
798 | |
799 | logger = logging.getLogger(__name__) |
800 | |
801 | === modified file 'src/provisioningserver/import_images/boot_resources.py' |
802 | --- src/provisioningserver/import_images/boot_resources.py 2014-04-28 14:19:28 +0000 |
803 | +++ src/provisioningserver/import_images/boot_resources.py 2014-05-07 22:25:58 +0000 |
804 | @@ -79,6 +79,132 @@ |
805 | return entry |
806 | |
807 | |
808 | +<<<<<<< TREE |
809 | +======= |
810 | +def mirror_info_for_path(path, unsigned_policy=None, keyring=None): |
811 | + if unsigned_policy is None: |
812 | + unsigned_policy = lambda content, path, keyring: content |
813 | + (mirror, rpath) = path_from_mirror_url(path, None) |
814 | + policy = policy_read_signed |
815 | + if rpath.endswith(".json"): |
816 | + policy = unsigned_policy |
817 | + if keyring: |
818 | + policy = functools.partial(policy, keyring=keyring) |
819 | + return(mirror, rpath, policy) |
820 | + |
821 | + |
822 | +class RepoDumper(BasicMirrorWriter): |
823 | + |
824 | + def dump(self, path, keyring=None): |
825 | + self._boot = create_empty_hierarchy() |
826 | + (mirror, rpath, policy) = mirror_info_for_path(path, keyring=keyring) |
827 | + reader = UrlMirrorReader(mirror, policy=policy) |
828 | + super(RepoDumper, self).sync(reader, rpath) |
829 | + return self._boot |
830 | + |
831 | + def load_products(self, path=None, content_id=None): |
832 | + return |
833 | + |
834 | + def item_cleanup(self, item): |
835 | + keys_to_keep = ['content_id', 'product_name', 'version_name', 'path'] |
836 | + compact_item = {key: item[key] for key in keys_to_keep} |
837 | + return compact_item |
838 | + |
839 | + def insert_item(self, data, src, target, pedigree, contentsource): |
840 | + item = products_exdata(src, pedigree) |
841 | + arch, subarches = item['arch'], item['subarches'] |
842 | + release = item['release'] |
843 | + label = item['label'] |
844 | + compact_item = self.item_cleanup(item) |
845 | + for subarch in subarches.split(','): |
846 | + if not self._boot[arch][subarch][release][label]: |
847 | + self._boot[arch][subarch][release][label] = compact_item |
848 | + |
849 | + |
850 | +class RepoWriter(BasicMirrorWriter): |
851 | + |
852 | + def __init__(self, root_path, cache_path, info): |
853 | + self._root_path = os.path.abspath(root_path) |
854 | + self._info = info |
855 | + self._cache = FileStore(os.path.abspath(cache_path)) |
856 | + super(RepoWriter, self).__init__() |
857 | + |
858 | + def write(self, path, keyring=None): |
859 | + (mirror, rpath, policy) = mirror_info_for_path(path, keyring=keyring) |
860 | + reader = UrlMirrorReader(mirror, policy=policy) |
861 | + super(RepoWriter, self).sync(reader, rpath) |
862 | + |
863 | + def load_products(self, path=None, content_id=None): |
864 | + return |
865 | + |
866 | + def filter_version(self, data, src, target, pedigree): |
867 | + item = products_exdata(src, pedigree) |
868 | + content_id, product_name = item['content_id'], item['product_name'] |
869 | + version_name = item['version_name'] |
870 | + return ( |
871 | + content_id in self._info and |
872 | + product_name in self._info[content_id] and |
873 | + version_name in self._info[content_id][product_name] |
874 | + ) |
875 | + |
876 | + def insert_file(self, name, tag, checksums, size, contentsource): |
877 | + logger.info("Inserting file %s (tag=%s, size=%s).", name, tag, size) |
878 | + self._cache.insert( |
879 | + tag, contentsource, checksums, mutable=False, size=size) |
880 | + return [(self._cache._fullpath(tag), name)] |
881 | + |
882 | + def insert_root_image(self, tag, checksums, size, contentsource): |
883 | + root_image_tag = 'root-image-%s' % tag |
884 | + root_image_path = self._cache._fullpath(root_image_tag) |
885 | + root_tgz_tag = 'root-tgz-%s' % tag |
886 | + root_tgz_path = self._cache._fullpath(root_tgz_tag) |
887 | + if not os.path.isfile(root_image_path): |
888 | + logger.info("New root image: %s.", root_image_path) |
889 | + self._cache.insert( |
890 | + tag, contentsource, checksums, mutable=False, size=size) |
891 | + uncompressed = FdContentSource( |
892 | + GzipFile(self._cache._fullpath(tag))) |
893 | + self._cache.insert(root_image_tag, uncompressed, mutable=False) |
894 | + self._cache.remove(tag) |
895 | + if not os.path.isfile(root_tgz_path): |
896 | + logger.info("Converting root tarball: %s.", root_tgz_path) |
897 | + call_uec2roottar(root_image_path, root_tgz_path) |
898 | + return [(root_image_path, 'root-image'), (root_tgz_path, 'root-tgz')] |
899 | + |
900 | + def insert_item(self, data, src, target, pedigree, contentsource): |
901 | + item = products_exdata(src, pedigree) |
902 | + checksums = item_checksums(data) |
903 | + tag = checksums['sha256'] |
904 | + size = data['size'] |
905 | + ftype = item['ftype'] |
906 | + if ftype == 'root-image.gz': |
907 | + links = self.insert_root_image(tag, checksums, size, contentsource) |
908 | + else: |
909 | + links = self.insert_file( |
910 | + ftype, tag, checksums, size, contentsource) |
911 | + content_id = item['content_id'] |
912 | + prod_name = item['product_name'] |
913 | + version_name = item['version_name'] |
914 | + for subarch in self._info[content_id][prod_name][version_name]: |
915 | + dst_folder = os.path.join( |
916 | + self._root_path, item['arch'], subarch, item['release'], |
917 | + item['label']) |
918 | + if not os.path.exists(dst_folder): |
919 | + os.makedirs(dst_folder) |
920 | + for src, link_name in links: |
921 | + link_path = os.path.join(dst_folder, link_name) |
922 | + if os.path.isfile(link_path): |
923 | + os.remove(link_path) |
924 | + os.link(src, link_path) |
925 | + |
926 | + |
927 | +def available_boot_resources(root): |
928 | + for resource_path in glob.glob(os.path.join(root, '*/*/*/*')): |
929 | + arch, subarch, release, label = resource_path.split('/')[-4:] |
930 | + yield (arch, subarch, release, label) |
931 | + |
932 | + |
933 | +>>>>>>> MERGE-SOURCE |
934 | def install_boot_loaders(destination): |
935 | """Install the all the required file from each bootloader method. |
936 | :param destination: Directory where the loaders should be stored. |
937 | @@ -195,7 +321,19 @@ |
938 | return |
939 | |
940 | storage = config['boot']['storage'] |
941 | +<<<<<<< TREE |
942 | meta_file_content = image_descriptions.dump_json() |
943 | +======= |
944 | + |
945 | + boot = create_empty_hierarchy() |
946 | + dumper = RepoDumper() |
947 | + |
948 | + for source in config['boot']['sources']: |
949 | + repo_boot = dumper.dump(source['path'], keyring=source['keyring']) |
950 | + boot_merge(boot, repo_boot, source['selections']) |
951 | + |
952 | + meta_file_content = json.dumps(boot, sort_keys=True) |
953 | +>>>>>>> MERGE-SOURCE |
954 | if meta_contains(storage, meta_file_content): |
955 | # The current maas.meta already contains the new config. No need to |
956 | # rewrite anything. |
957 | |
958 | === modified file 'src/provisioningserver/import_images/tests/test_boot_resources.py' |
959 | --- src/provisioningserver/import_images/tests/test_boot_resources.py 2014-04-28 14:19:28 +0000 |
960 | +++ src/provisioningserver/import_images/tests/test_boot_resources.py 2014-05-07 22:25:58 +0000 |
961 | @@ -15,8 +15,6 @@ |
962 | __all__ = [] |
963 | |
964 | import errno |
965 | -import hashlib |
966 | -import json |
967 | import os |
968 | from random import randint |
969 | from subprocess import ( |
970 | @@ -25,12 +23,15 @@ |
971 | ) |
972 | |
973 | from maastesting.factory import factory |
974 | +<<<<<<< TREE |
975 | from maastesting.matchers import MockAnyCall |
976 | +======= |
977 | +>>>>>>> MERGE-SOURCE |
978 | from maastesting.testcase import MAASTestCase |
979 | -import mock |
980 | -from provisioningserver.boot.uefi import UEFIBootMethod |
981 | +from mock import MagicMock |
982 | from provisioningserver.config import BootConfig |
983 | from provisioningserver.import_images import boot_resources |
984 | +<<<<<<< TREE |
985 | from provisioningserver.import_images.boot_image_mapping import ( |
986 | BootImageMapping, |
987 | ) |
988 | @@ -44,8 +45,11 @@ |
989 | FileExists, |
990 | ) |
991 | import yaml |
992 | - |
993 | - |
994 | +======= |
995 | +>>>>>>> MERGE-SOURCE |
996 | + |
997 | + |
998 | +<<<<<<< TREE |
999 | class TestTgtEntry(MAASTestCase): |
1000 | """Tests for `tgt_entry`.""" |
1001 | |
1002 | @@ -95,8 +99,241 @@ |
1003 | return summer.hexdigest() |
1004 | |
1005 | |
1006 | +======= |
1007 | +def make_image_spec(): |
1008 | + """Return an `ImageSpec` with random values.""" |
1009 | + return boot_resources.ImageSpec( |
1010 | + factory.make_name('arch'), |
1011 | + factory.make_name('subarch'), |
1012 | + factory.make_name('release'), |
1013 | + factory.make_name('label'), |
1014 | + ) |
1015 | + |
1016 | + |
1017 | +class TestIterateBootResources(MAASTestCase): |
1018 | + """Tests for `iterate_boot_resources`.""" |
1019 | + |
1020 | + def test_empty_hierarchy_yields_nothing(self): |
1021 | + self.assertItemsEqual( |
1022 | + [], |
1023 | + boot_resources.iterate_boot_resources( |
1024 | + boot_resources.create_empty_hierarchy())) |
1025 | + |
1026 | + def test_finds_boot_resource(self): |
1027 | + image_spec = make_image_spec() |
1028 | + arch, subarch, release, label = image_spec |
1029 | + self.assertItemsEqual( |
1030 | + [image_spec], |
1031 | + boot_resources.iterate_boot_resources( |
1032 | + {arch: {subarch: {release: {label: factory.make_name()}}}})) |
1033 | + |
1034 | + |
1035 | +class TestValuePassesFilterList(MAASTestCase): |
1036 | + """Tests for `value_passes_filter_list`.""" |
1037 | + |
1038 | + def test_nothing_passes_empty_list(self): |
1039 | + self.assertFalse( |
1040 | + boot_resources.value_passes_filter_list( |
1041 | + [], factory.make_name('value'))) |
1042 | + |
1043 | + def test_unmatched_value_does_not_pass(self): |
1044 | + self.assertFalse( |
1045 | + boot_resources.value_passes_filter_list( |
1046 | + [factory.make_name('filter')], factory.make_name('value'))) |
1047 | + |
1048 | + def test_matched_value_passes(self): |
1049 | + value = factory.make_name('value') |
1050 | + self.assertTrue( |
1051 | + boot_resources.value_passes_filter_list([value], value)) |
1052 | + |
1053 | + def test_value_passes_if_matched_anywhere_in_filter(self): |
1054 | + value = factory.make_name('value') |
1055 | + self.assertTrue( |
1056 | + boot_resources.value_passes_filter_list( |
1057 | + [ |
1058 | + factory.make_name('filter'), |
1059 | + value, |
1060 | + factory.make_name('filter'), |
1061 | + ], |
1062 | + value)) |
1063 | + |
1064 | + def test_any_value_passes_asterisk(self): |
1065 | + self.assertTrue( |
1066 | + boot_resources.value_passes_filter_list( |
1067 | + ['*'], factory.make_name('value'))) |
1068 | + |
1069 | + |
1070 | +class TestValuePassesFilter(MAASTestCase): |
1071 | + """Tests for `value_passes_filter`.""" |
1072 | + |
1073 | + def test_unmatched_value_does_not_pass(self): |
1074 | + self.assertFalse( |
1075 | + boot_resources.value_passes_filter( |
1076 | + factory.make_name('filter'), factory.make_name('value'))) |
1077 | + |
1078 | + def test_matching_value_passes(self): |
1079 | + value = factory.make_name('value') |
1080 | + self.assertTrue(boot_resources.value_passes_filter(value, value)) |
1081 | + |
1082 | + def test_any_value_matches_asterisk(self): |
1083 | + self.assertTrue( |
1084 | + boot_resources.value_passes_filter( |
1085 | + '*', factory.make_name('value'))) |
1086 | + |
1087 | + |
1088 | +class TestImagePassesFilter(MAASTestCase): |
1089 | + """Tests for `image_passes_filter`.""" |
1090 | + |
1091 | + def make_filter_from_image(self, image_spec=None): |
1092 | + """Create a filter dict that matches the given `ImageSpec`. |
1093 | + |
1094 | + If `image_spec` is not given, creates a random value. |
1095 | + """ |
1096 | + if image_spec is None: |
1097 | + image_spec = make_image_spec() |
1098 | + return { |
1099 | + 'arches': [image_spec.arch], |
1100 | + 'subarches': [image_spec.subarch], |
1101 | + 'release': image_spec.release, |
1102 | + 'labels': [image_spec.label], |
1103 | + } |
1104 | + |
1105 | + def test_any_image_passes_none_filter(self): |
1106 | + arch, subarch, release, label = make_image_spec() |
1107 | + self.assertTrue( |
1108 | + boot_resources.image_passes_filter( |
1109 | + None, arch, subarch, release, label)) |
1110 | + |
1111 | + def test_any_image_passes_empty_filter(self): |
1112 | + arch, subarch, release, label = make_image_spec() |
1113 | + self.assertTrue( |
1114 | + boot_resources.image_passes_filter( |
1115 | + [], arch, subarch, release, label)) |
1116 | + |
1117 | + def test_image_passes_matching_filter(self): |
1118 | + image = make_image_spec() |
1119 | + self.assertTrue( |
1120 | + boot_resources.image_passes_filter( |
1121 | + [self.make_filter_from_image(image)], |
1122 | + image.arch, image.subarch, image.release, image.label)) |
1123 | + |
1124 | + def test_image_does_not_pass_nonmatching_filter(self): |
1125 | + image = make_image_spec() |
1126 | + self.assertFalse( |
1127 | + boot_resources.image_passes_filter( |
1128 | + [self.make_filter_from_image()], |
1129 | + image.arch, image.subarch, image.release, image.label)) |
1130 | + |
1131 | + def test_image_passes_if_one_filter_matches(self): |
1132 | + image = make_image_spec() |
1133 | + self.assertTrue( |
1134 | + boot_resources.image_passes_filter( |
1135 | + [ |
1136 | + self.make_filter_from_image(), |
1137 | + self.make_filter_from_image(image), |
1138 | + self.make_filter_from_image(), |
1139 | + ], image.arch, image.subarch, image.release, image.label)) |
1140 | + |
1141 | + def test_filter_checks_release(self): |
1142 | + image = make_image_spec() |
1143 | + self.assertFalse( |
1144 | + boot_resources.image_passes_filter( |
1145 | + [ |
1146 | + self.make_filter_from_image(image._replace( |
1147 | + release=factory.make_name('other-release'))) |
1148 | + ], image.arch, image.subarch, image.release, image.label)) |
1149 | + |
1150 | + def test_filter_checks_arches(self): |
1151 | + image = make_image_spec() |
1152 | + self.assertFalse( |
1153 | + boot_resources.image_passes_filter( |
1154 | + [ |
1155 | + self.make_filter_from_image(image._replace( |
1156 | + arch=factory.make_name('other-arch'))) |
1157 | + ], image.arch, image.subarch, image.release, image.label)) |
1158 | + |
1159 | + def test_filter_checks_subarches(self): |
1160 | + image = make_image_spec() |
1161 | + self.assertFalse( |
1162 | + boot_resources.image_passes_filter( |
1163 | + [ |
1164 | + self.make_filter_from_image(image._replace( |
1165 | + subarch=factory.make_name('other-subarch'))) |
1166 | + ], image.arch, image.subarch, image.release, image.label)) |
1167 | + |
1168 | + def test_filter_checks_labels(self): |
1169 | + image = make_image_spec() |
1170 | + self.assertFalse( |
1171 | + boot_resources.image_passes_filter( |
1172 | + [ |
1173 | + self.make_filter_from_image(image._replace( |
1174 | + label=factory.make_name('other-label'))) |
1175 | + ], image.arch, image.subarch, image.release, image.label)) |
1176 | + |
1177 | + |
1178 | +class TestBootMerge(MAASTestCase): |
1179 | + """Tests for `boot_merge`.""" |
1180 | + |
1181 | + def make_resource(self, boot_dict=None, image_spec=None, resource=None): |
1182 | + """Add a boot resource to `boot_dict`, creating it if necessary.""" |
1183 | + if boot_dict is None: |
1184 | + boot_dict = {} |
1185 | + if image_spec is None: |
1186 | + image_spec = make_image_spec() |
1187 | + if resource is None: |
1188 | + resource = factory.make_name('boot-resource') |
1189 | + arch, subarch, release, label = image_spec |
1190 | + # Drill down into the dict; along the way, create any missing levels of |
1191 | + # nested dicts. |
1192 | + nested_dict = boot_dict |
1193 | + for level in (arch, subarch, release): |
1194 | + nested_dict.setdefault(level, {}) |
1195 | + nested_dict = nested_dict[level] |
1196 | + # At the bottom level, indexed by "label," insert "resource" as the |
1197 | + # value. |
1198 | + nested_dict[label] = resource |
1199 | + return boot_dict |
1200 | + |
1201 | + def test_integrates(self): |
1202 | + # End-to-end scenario for boot_merge: start with an empty boot |
1203 | + # resources dict, and receive one resource from Simplestreams. |
1204 | + total_resources = boot_resources.create_empty_hierarchy() |
1205 | + resources_from_repo = self.make_resource() |
1206 | + boot_resources.boot_merge(total_resources, resources_from_repo.copy()) |
1207 | + # Since we started with an empty dict, the result contains the same |
1208 | + # item that we got from Simplestreams, and nothing else. |
1209 | + self.assertEqual(resources_from_repo, total_resources) |
1210 | + |
1211 | + def test_obeys_filters(self): |
1212 | + filters = [ |
1213 | + { |
1214 | + 'arches': [factory.make_name('other-arch')], |
1215 | + 'subarches': [factory.make_name('other-subarch')], |
1216 | + 'release': factory.make_name('other-release'), |
1217 | + 'label': [factory.make_name('other-label')], |
1218 | + }, |
1219 | + ] |
1220 | + total_resources = boot_resources.create_empty_hierarchy() |
1221 | + resources_from_repo = self.make_resource() |
1222 | + boot_resources.boot_merge( |
1223 | + total_resources, resources_from_repo, filters=filters) |
1224 | + self.assertEqual({}, total_resources) |
1225 | + |
1226 | + def test_does_not_overwrite_existing_entry(self): |
1227 | + image = make_image_spec() |
1228 | + original_resources = self.make_resource( |
1229 | + resource="Original resource", image_spec=image) |
1230 | + total_resources = original_resources.copy() |
1231 | + resources_from_repo = self.make_resource( |
1232 | + resource="New resource", image_spec=image) |
1233 | + boot_resources.boot_merge(total_resources, resources_from_repo.copy()) |
1234 | + self.assertEqual(original_resources, total_resources) |
1235 | + |
1236 | + |
1237 | +>>>>>>> MERGE-SOURCE |
1238 | class TestMain(MAASTestCase): |
1239 | |
1240 | +<<<<<<< TREE |
1241 | def patch_logger(self): |
1242 | """Suppress log output from the import code.""" |
1243 | self.patch(boot_resources, 'logger') |
1244 | @@ -343,23 +580,28 @@ |
1245 | "No boot resources found. " |
1246 | "Check configuration and connectivity.")) |
1247 | |
1248 | +======= |
1249 | +>>>>>>> MERGE-SOURCE |
1250 | def test_raises_ioerror_when_no_config_file_found(self): |
1251 | - self.patch_logger() |
1252 | - no_config = os.path.join( |
1253 | - self.make_dir(), '%s.yaml' % factory.make_name('no-config')) |
1254 | + # Suppress log output. |
1255 | + self.logger = self.patch(boot_resources, 'logger') |
1256 | + filename = "/tmp/%s" % factory.make_name("config") |
1257 | + self.assertFalse(os.path.exists(filename)) |
1258 | + args = MagicMock() |
1259 | + args.config_file = filename |
1260 | self.assertRaises( |
1261 | boot_resources.NoConfigFile, |
1262 | - boot_resources.main, self.make_args(config_file=no_config)) |
1263 | + boot_resources.main, args) |
1264 | |
1265 | def test_raises_non_ENOENT_IOErrors(self): |
1266 | # main() will raise a NoConfigFile error when it encounters an |
1267 | # ENOENT IOError, but will otherwise just re-raise the original |
1268 | # IOError. |
1269 | + args = MagicMock() |
1270 | mock_load_from_cache = self.patch(BootConfig, 'load_from_cache') |
1271 | other_error = IOError(randint(errno.ENOENT + 1, 1000)) |
1272 | mock_load_from_cache.side_effect = other_error |
1273 | - self.patch_logger() |
1274 | - raised_error = self.assertRaises( |
1275 | - IOError, |
1276 | - boot_resources.main, self.make_args()) |
1277 | + # Suppress log output. |
1278 | + self.logger = self.patch(boot_resources, 'logger') |
1279 | + raised_error = self.assertRaises(IOError, boot_resources.main, args) |
1280 | self.assertEqual(other_error, raised_error) |
1281 | |
1282 | === modified file 'src/provisioningserver/power_schema.py' |
1283 | === modified file 'src/provisioningserver/tasks.py' |
1284 | === modified file 'src/provisioningserver/tests/test_tasks.py' |
1285 | --- src/provisioningserver/tests/test_tasks.py 2014-05-05 18:51:41 +0000 |
1286 | +++ src/provisioningserver/tests/test_tasks.py 2014-05-07 22:25:58 +0000 |
1287 | @@ -676,6 +676,7 @@ |
1288 | self.patch_boot_resources_function() |
1289 | mock_callback = Mock() |
1290 | import_boot_images(callback=mock_callback) |
1291 | +<<<<<<< TREE |
1292 | self.assertThat(mock_callback.delay, MockCalledOnceWith()) |
1293 | |
1294 | |
1295 | @@ -688,3 +689,17 @@ |
1296 | mock = self.patch(tasks, 'probe_and_enlist_ucsm') |
1297 | enlist_nodes_from_ucsm(url, username, password) |
1298 | self.assertThat(mock, MockCalledOnceWith(url, username, password)) |
1299 | +======= |
1300 | + self.assertEqual([call()], mock_callback.delay.mock_calls) |
1301 | + |
1302 | + |
1303 | +class TestAddUCSM(PservTestCase): |
1304 | + |
1305 | + def test_enlist_nodes_from_ucsm(self): |
1306 | + url = 'url' |
1307 | + username = 'username' |
1308 | + password = 'password' |
1309 | + mock = self.patch(tasks, 'probe_and_enlist_ucsm') |
1310 | + enlist_nodes_from_ucsm(url, username, password) |
1311 | + self.assertThat(mock, MockCalledOnceWith(url, username, password)) |
1312 | +>>>>>>> MERGE-SOURCE |
1313 | |
1314 | === modified file 'src/provisioningserver/utils/__init__.py' |
1315 | === modified file 'src/provisioningserver/utils/tests/test_utils.py' |