Merge ~chad.smith/cloud-init:ubuntu/devel into cloud-init:ubuntu/devel
- Git
- lp:~chad.smith/cloud-init
- ubuntu/devel
- Merge into ubuntu/devel
Proposed by
Chad Smith
Status: | Merged | ||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Merged at revision: | 405e9518755df26f492252c143ae370429c1d6c8 | ||||||||||||
Proposed branch: | ~chad.smith/cloud-init:ubuntu/devel | ||||||||||||
Merge into: | cloud-init:ubuntu/devel | ||||||||||||
Diff against target: |
2321 lines (+1535/-239) 26 files modified
cloudinit/config/cc_snap.py (+2/-45) cloudinit/config/cc_ubuntu_advantage.py (+173/-0) cloudinit/config/tests/test_snap.py (+6/-51) cloudinit/config/tests/test_ubuntu_advantage.py (+269/-0) cloudinit/ec2_utils.py (+2/-4) cloudinit/net/netplan.py (+9/-16) cloudinit/sources/DataSourceAzure.py (+9/-22) cloudinit/sources/DataSourceConfigDrive.py (+10/-0) cloudinit/sources/DataSourceIBMCloud.py (+325/-0) cloudinit/sources/DataSourceScaleway.py (+3/-3) cloudinit/subp.py (+57/-0) cloudinit/tests/test_subp.py (+61/-0) cloudinit/tests/test_util.py (+72/-0) cloudinit/url_helper.py (+12/-8) cloudinit/util.py (+31/-0) config/cloud.cfg.tmpl (+3/-0) debian/changelog (+19/-0) doc/rtd/topics/modules.rst (+1/-0) tests/unittests/test_datasource/test_azure.py (+7/-15) tests/unittests/test_datasource/test_ibmcloud.py (+262/-0) tests/unittests/test_ds_identify.py (+112/-3) tests/unittests/test_handler/test_schema.py (+1/-0) tests/unittests/test_net.py (+12/-63) tools/ds-identify (+60/-9) tools/pipremove (+14/-0) tox.ini (+3/-0) |
||||||||||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Scott Moser | Approve | ||
Server Team CI bot | continuous-integration | Approve | |
Review via email: mp+342011@code.launchpad.net |
Commit message
Description of the change
Sync tip of master for release in Bionic with new IBMCloud datasource, Azure timeout fixes and ubuntu-advantage cloud-config module.
To post a comment you must log in.
Revision history for this message
Server Team CI bot (server-team-bot) wrote : | # |
review:
Approve
(continuous-integration)
Revision history for this message
Scott Moser (smoser) wrote : | # |
I merged this at 405e9518755df26
and then added the IBMCloud datasource and released.
thanks Chad.
review:
Approve
There was an error fetching revisions from git servers. Please try again in a few minutes. If the problem persists, contact Launchpad support.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/cloudinit/config/cc_snap.py b/cloudinit/config/cc_snap.py |
2 | index db96529..34a53fd 100644 |
3 | --- a/cloudinit/config/cc_snap.py |
4 | +++ b/cloudinit/config/cc_snap.py |
5 | @@ -11,6 +11,7 @@ from cloudinit import log as logging |
6 | from cloudinit.config.schema import ( |
7 | get_schema_doc, validate_cloudconfig_schema) |
8 | from cloudinit.settings import PER_INSTANCE |
9 | +from cloudinit.subp import prepend_base_command |
10 | from cloudinit import util |
11 | |
12 | |
13 | @@ -160,50 +161,6 @@ def add_assertions(assertions): |
14 | util.subp(snap_cmd + [ASSERTIONS_FILE], capture=True) |
15 | |
16 | |
17 | -def prepend_snap_commands(commands): |
18 | - """Ensure user-provided commands start with SNAP_CMD, warn otherwise. |
19 | - |
20 | - Each command is either a list or string. Perform the following: |
21 | - - When the command is a list, pop the first element if it is None |
22 | - - When the command is a list, insert SNAP_CMD as the first element if |
23 | - not present. |
24 | - - When the command is a string containing a non-snap command, warn. |
25 | - |
26 | - Support cut-n-paste snap command sets from public snappy documentation. |
27 | - Allow flexibility to provide non-snap environment/config setup if needed. |
28 | - |
29 | - @commands: List of commands. Each command element is a list or string. |
30 | - |
31 | - @return: List of 'fixed up' snap commands. |
32 | - @raise: TypeError on invalid config item type. |
33 | - """ |
34 | - warnings = [] |
35 | - errors = [] |
36 | - fixed_commands = [] |
37 | - for command in commands: |
38 | - if isinstance(command, list): |
39 | - if command[0] is None: # Avoid warnings by specifying None |
40 | - command = command[1:] |
41 | - elif command[0] != SNAP_CMD: # Automatically prepend SNAP_CMD |
42 | - command.insert(0, SNAP_CMD) |
43 | - elif isinstance(command, str): |
44 | - if not command.startswith('%s ' % SNAP_CMD): |
45 | - warnings.append(command) |
46 | - else: |
47 | - errors.append(str(command)) |
48 | - continue |
49 | - fixed_commands.append(command) |
50 | - |
51 | - if warnings: |
52 | - LOG.warning( |
53 | - 'Non-snap commands in snap config:\n%s', '\n'.join(warnings)) |
54 | - if errors: |
55 | - raise TypeError( |
56 | - 'Invalid snap config.' |
57 | - ' These commands are not a string or list:\n' + '\n'.join(errors)) |
58 | - return fixed_commands |
59 | - |
60 | - |
61 | def run_commands(commands): |
62 | """Run the provided commands provided in snap:commands configuration. |
63 | |
64 | @@ -224,7 +181,7 @@ def run_commands(commands): |
65 | 'commands parameter was not a list or dict: {commands}'.format( |
66 | commands=commands)) |
67 | |
68 | - fixed_snap_commands = prepend_snap_commands(commands) |
69 | + fixed_snap_commands = prepend_base_command('snap', commands) |
70 | |
71 | cmd_failures = [] |
72 | for command in fixed_snap_commands: |
73 | diff --git a/cloudinit/config/cc_ubuntu_advantage.py b/cloudinit/config/cc_ubuntu_advantage.py |
74 | new file mode 100644 |
75 | index 0000000..16b1868 |
76 | --- /dev/null |
77 | +++ b/cloudinit/config/cc_ubuntu_advantage.py |
78 | @@ -0,0 +1,173 @@ |
79 | +# Copyright (C) 2018 Canonical Ltd. |
80 | +# |
81 | +# This file is part of cloud-init. See LICENSE file for license information. |
82 | + |
83 | +"""Ubuntu advantage: manage ubuntu-advantage offerings from Canonical.""" |
84 | + |
85 | +import sys |
86 | +from textwrap import dedent |
87 | + |
88 | +from cloudinit import log as logging |
89 | +from cloudinit.config.schema import ( |
90 | + get_schema_doc, validate_cloudconfig_schema) |
91 | +from cloudinit.settings import PER_INSTANCE |
92 | +from cloudinit.subp import prepend_base_command |
93 | +from cloudinit import util |
94 | + |
95 | + |
96 | +distros = ['ubuntu'] |
97 | +frequency = PER_INSTANCE |
98 | + |
99 | +LOG = logging.getLogger(__name__) |
100 | + |
101 | +schema = { |
102 | + 'id': 'cc_ubuntu_advantage', |
103 | + 'name': 'Ubuntu Advantage', |
104 | + 'title': 'Install, configure and manage ubuntu-advantage offerings', |
105 | + 'description': dedent("""\ |
106 | + This module provides configuration options to setup ubuntu-advantage |
107 | + subscriptions. |
108 | + |
109 | + .. note:: |
110 | + Both ``commands`` value can be either a dictionary or a list. If |
111 | + the configuration provided is a dictionary, the keys are only used |
112 | + to order the execution of the commands and the dictionary is |
113 | + merged with any vendor-data ubuntu-advantage configuration |
114 | + provided. If a ``commands`` is provided as a list, any vendor-data |
115 | + ubuntu-advantage ``commands`` are ignored. |
116 | + |
117 | + Ubuntu-advantage ``commands`` is a dictionary or list of |
118 | + ubuntu-advantage commands to run on the deployed machine. |
119 | + These commands can be used to enable or disable subscriptions to |
120 | + various ubuntu-advantage products. See 'man ubuntu-advantage' for more |
121 | + information on supported subcommands. |
122 | + |
123 | + .. note:: |
124 | + Each command item can be a string or list. If the item is a list, |
125 | + 'ubuntu-advantage' can be omitted and it will automatically be |
126 | + inserted as part of the command. |
127 | + """), |
128 | + 'distros': distros, |
129 | + 'examples': [dedent("""\ |
130 | + # Enable Extended Security Maintenance using your service auth token |
131 | + ubuntu-advantage: |
132 | + commands: |
133 | + 00: ubuntu-advantage enable-esm <token> |
134 | + """), dedent("""\ |
135 | + # Enable livepatch by providing your livepatch token |
136 | + ubuntu-advantage: |
137 | + commands: |
138 | + 00: ubuntu-advantage enable-livepatch <livepatch-token> |
139 | + |
140 | + """), dedent("""\ |
141 | + # Convenience: the ubuntu-advantage command can be omitted when |
142 | + # specifying commands as a list and 'ubuntu-advantage' will |
143 | + # automatically be prepended. |
144 | + # The following commands are equivalent |
145 | + ubuntu-advantage: |
146 | + commands: |
147 | + 00: ['enable-livepatch', 'my-token'] |
148 | + 01: ['ubuntu-advantage', 'enable-livepatch', 'my-token'] |
149 | + 02: ubuntu-advantage enable-livepatch my-token |
150 | + 03: 'ubuntu-advantage enable-livepatch my-token' |
151 | + """)], |
152 | + 'frequency': PER_INSTANCE, |
153 | + 'type': 'object', |
154 | + 'properties': { |
155 | + 'ubuntu-advantage': { |
156 | + 'type': 'object', |
157 | + 'properties': { |
158 | + 'commands': { |
159 | + 'type': ['object', 'array'], # Array of strings or dict |
160 | + 'items': { |
161 | + 'oneOf': [ |
162 | + {'type': 'array', 'items': {'type': 'string'}}, |
163 | + {'type': 'string'}] |
164 | + }, |
165 | + 'additionalItems': False, # Reject non-string & non-list |
166 | + 'minItems': 1, |
167 | + 'minProperties': 1, |
168 | + 'uniqueItems': True |
169 | + } |
170 | + }, |
171 | + 'additionalProperties': False, # Reject keys not in schema |
172 | + 'required': ['commands'] |
173 | + } |
174 | + } |
175 | +} |
176 | + |
177 | +# TODO schema for 'assertions' and 'commands' are too permissive at the moment. |
178 | +# Once python-jsonschema supports schema draft 6 add support for arbitrary |
179 | +# object keys with 'patternProperties' constraint to validate string values. |
180 | + |
181 | +__doc__ = get_schema_doc(schema) # Supplement python help() |
182 | + |
183 | +UA_CMD = "ubuntu-advantage" |
184 | + |
185 | + |
186 | +def run_commands(commands): |
187 | + """Run the commands provided in ubuntu-advantage:commands config. |
188 | + |
189 | + Commands are run individually. Any errors are collected and reported |
190 | + after attempting all commands. |
191 | + |
192 | + @param commands: A list or dict containing commands to run. Keys of a |
193 | + dict will be used to order the commands provided as dict values. |
194 | + """ |
195 | + if not commands: |
196 | + return |
197 | + LOG.debug('Running user-provided ubuntu-advantage commands') |
198 | + if isinstance(commands, dict): |
199 | + # Sort commands based on dictionary key |
200 | + commands = [v for _, v in sorted(commands.items())] |
201 | + elif not isinstance(commands, list): |
202 | + raise TypeError( |
203 | + 'commands parameter was not a list or dict: {commands}'.format( |
204 | + commands=commands)) |
205 | + |
206 | + fixed_ua_commands = prepend_base_command('ubuntu-advantage', commands) |
207 | + |
208 | + cmd_failures = [] |
209 | + for command in fixed_ua_commands: |
210 | + shell = isinstance(command, str) |
211 | + try: |
212 | + util.subp(command, shell=shell, status_cb=sys.stderr.write) |
213 | + except util.ProcessExecutionError as e: |
214 | + cmd_failures.append(str(e)) |
215 | + if cmd_failures: |
216 | + msg = ( |
217 | + 'Failures running ubuntu-advantage commands:\n' |
218 | + '{cmd_failures}'.format( |
219 | + cmd_failures=cmd_failures)) |
220 | + util.logexc(LOG, msg) |
221 | + raise RuntimeError(msg) |
222 | + |
223 | + |
224 | +def maybe_install_ua_tools(cloud): |
225 | + """Install ubuntu-advantage-tools if not present.""" |
226 | + if util.which('ubuntu-advantage'): |
227 | + return |
228 | + try: |
229 | + cloud.distro.update_package_sources() |
230 | + except Exception as e: |
231 | + util.logexc(LOG, "Package update failed") |
232 | + raise |
233 | + try: |
234 | + cloud.distro.install_packages(['ubuntu-advantage-tools']) |
235 | + except Exception as e: |
236 | + util.logexc(LOG, "Failed to install ubuntu-advantage-tools") |
237 | + raise |
238 | + |
239 | + |
240 | +def handle(name, cfg, cloud, log, args): |
241 | + cfgin = cfg.get('ubuntu-advantage') |
242 | + if cfgin is None: |
243 | + LOG.debug(("Skipping module named %s," |
244 | + " no 'ubuntu-advantage' key in configuration"), name) |
245 | + return |
246 | + |
247 | + validate_cloudconfig_schema(cfg, schema) |
248 | + maybe_install_ua_tools(cloud) |
249 | + run_commands(cfgin.get('commands', [])) |
250 | + |
251 | +# vi: ts=4 expandtab |
252 | diff --git a/cloudinit/config/tests/test_snap.py b/cloudinit/config/tests/test_snap.py |
253 | index cb1205e..c5b4a9d 100644 |
254 | --- a/cloudinit/config/tests/test_snap.py |
255 | +++ b/cloudinit/config/tests/test_snap.py |
256 | @@ -4,11 +4,12 @@ import re |
257 | from six import StringIO |
258 | |
259 | from cloudinit.config.cc_snap import ( |
260 | - ASSERTIONS_FILE, add_assertions, handle, prepend_snap_commands, |
261 | - maybe_install_squashfuse, run_commands, schema) |
262 | + ASSERTIONS_FILE, add_assertions, handle, maybe_install_squashfuse, |
263 | + run_commands, schema) |
264 | from cloudinit.config.schema import validate_cloudconfig_schema |
265 | from cloudinit import util |
266 | -from cloudinit.tests.helpers import CiTestCase, mock, wrap_and_call |
267 | +from cloudinit.tests.helpers import ( |
268 | + CiTestCase, mock, wrap_and_call, skipUnlessJsonSchema) |
269 | |
270 | |
271 | SYSTEM_USER_ASSERTION = """\ |
272 | @@ -158,54 +159,6 @@ class TestAddAssertions(CiTestCase): |
273 | util.load_file(compare_file), util.load_file(assert_file)) |
274 | |
275 | |
276 | -class TestPrepentSnapCommands(CiTestCase): |
277 | - |
278 | - with_logs = True |
279 | - |
280 | - def test_prepend_snap_commands_errors_on_neither_string_nor_list(self): |
281 | - """Raise an error for each command which is not a string or list.""" |
282 | - orig_commands = ['ls', 1, {'not': 'gonna work'}, ['snap', 'list']] |
283 | - with self.assertRaises(TypeError) as context_manager: |
284 | - prepend_snap_commands(orig_commands) |
285 | - self.assertEqual( |
286 | - "Invalid snap config. These commands are not a string or list:\n" |
287 | - "1\n{'not': 'gonna work'}", |
288 | - str(context_manager.exception)) |
289 | - |
290 | - def test_prepend_snap_commands_warns_on_non_snap_string_commands(self): |
291 | - """Warn on each non-snap for commands of type string.""" |
292 | - orig_commands = ['ls', 'snap list', 'touch /blah', 'snap install x'] |
293 | - fixed_commands = prepend_snap_commands(orig_commands) |
294 | - self.assertEqual( |
295 | - 'WARNING: Non-snap commands in snap config:\n' |
296 | - 'ls\ntouch /blah\n', |
297 | - self.logs.getvalue()) |
298 | - self.assertEqual(orig_commands, fixed_commands) |
299 | - |
300 | - def test_prepend_snap_commands_prepends_on_non_snap_list_commands(self): |
301 | - """Prepend 'snap' for each non-snap command of type list.""" |
302 | - orig_commands = [['ls'], ['snap', 'list'], ['snapa', '/blah'], |
303 | - ['snap', 'install', 'x']] |
304 | - expected = [['snap', 'ls'], ['snap', 'list'], |
305 | - ['snap', 'snapa', '/blah'], |
306 | - ['snap', 'install', 'x']] |
307 | - fixed_commands = prepend_snap_commands(orig_commands) |
308 | - self.assertEqual('', self.logs.getvalue()) |
309 | - self.assertEqual(expected, fixed_commands) |
310 | - |
311 | - def test_prepend_snap_commands_removes_first_item_when_none(self): |
312 | - """Remove the first element of a non-snap command when it is None.""" |
313 | - orig_commands = [[None, 'ls'], ['snap', 'list'], |
314 | - [None, 'touch', '/blah'], |
315 | - ['snap', 'install', 'x']] |
316 | - expected = [['ls'], ['snap', 'list'], |
317 | - ['touch', '/blah'], |
318 | - ['snap', 'install', 'x']] |
319 | - fixed_commands = prepend_snap_commands(orig_commands) |
320 | - self.assertEqual('', self.logs.getvalue()) |
321 | - self.assertEqual(expected, fixed_commands) |
322 | - |
323 | - |
324 | class TestRunCommands(CiTestCase): |
325 | |
326 | with_logs = True |
327 | @@ -291,6 +244,7 @@ class TestRunCommands(CiTestCase): |
328 | self.assertEqual('MOM\nHI\n', util.load_file(outfile)) |
329 | |
330 | |
331 | +@skipUnlessJsonSchema() |
332 | class TestSchema(CiTestCase): |
333 | |
334 | with_logs = True |
335 | @@ -466,6 +420,7 @@ class TestHandle(CiTestCase): |
336 | util.load_file(compare_file), util.load_file(assert_file)) |
337 | |
338 | @mock.patch('cloudinit.config.cc_snap.util.subp') |
339 | + @skipUnlessJsonSchema() |
340 | def test_handle_validates_schema(self, m_subp): |
341 | """Any provided configuration is runs validate_cloudconfig_schema.""" |
342 | assert_file = self.tmp_path('snapd.assertions', dir=self.tmp) |
343 | diff --git a/cloudinit/config/tests/test_ubuntu_advantage.py b/cloudinit/config/tests/test_ubuntu_advantage.py |
344 | new file mode 100644 |
345 | index 0000000..f2a59fa |
346 | --- /dev/null |
347 | +++ b/cloudinit/config/tests/test_ubuntu_advantage.py |
348 | @@ -0,0 +1,269 @@ |
349 | +# This file is part of cloud-init. See LICENSE file for license information. |
350 | + |
351 | +import re |
352 | +from six import StringIO |
353 | + |
354 | +from cloudinit.config.cc_ubuntu_advantage import ( |
355 | + handle, maybe_install_ua_tools, run_commands, schema) |
356 | +from cloudinit.config.schema import validate_cloudconfig_schema |
357 | +from cloudinit import util |
358 | +from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJsonSchema |
359 | + |
360 | + |
361 | +# Module path used in mocks |
362 | +MPATH = 'cloudinit.config.cc_ubuntu_advantage' |
363 | + |
364 | + |
365 | +class FakeCloud(object): |
366 | + def __init__(self, distro): |
367 | + self.distro = distro |
368 | + |
369 | + |
370 | +class TestRunCommands(CiTestCase): |
371 | + |
372 | + with_logs = True |
373 | + |
374 | + def setUp(self): |
375 | + super(TestRunCommands, self).setUp() |
376 | + self.tmp = self.tmp_dir() |
377 | + |
378 | + @mock.patch('%s.util.subp' % MPATH) |
379 | + def test_run_commands_on_empty_list(self, m_subp): |
380 | + """When provided with an empty list, run_commands does nothing.""" |
381 | + run_commands([]) |
382 | + self.assertEqual('', self.logs.getvalue()) |
383 | + m_subp.assert_not_called() |
384 | + |
385 | + def test_run_commands_on_non_list_or_dict(self): |
386 | + """When provided an invalid type, run_commands raises an error.""" |
387 | + with self.assertRaises(TypeError) as context_manager: |
388 | + run_commands(commands="I'm Not Valid") |
389 | + self.assertEqual( |
390 | + "commands parameter was not a list or dict: I'm Not Valid", |
391 | + str(context_manager.exception)) |
392 | + |
393 | + def test_run_command_logs_commands_and_exit_codes_to_stderr(self): |
394 | + """All exit codes are logged to stderr.""" |
395 | + outfile = self.tmp_path('output.log', dir=self.tmp) |
396 | + |
397 | + cmd1 = 'echo "HI" >> %s' % outfile |
398 | + cmd2 = 'bogus command' |
399 | + cmd3 = 'echo "MOM" >> %s' % outfile |
400 | + commands = [cmd1, cmd2, cmd3] |
401 | + |
402 | + mock_path = '%s.sys.stderr' % MPATH |
403 | + with mock.patch(mock_path, new_callable=StringIO) as m_stderr: |
404 | + with self.assertRaises(RuntimeError) as context_manager: |
405 | + run_commands(commands=commands) |
406 | + |
407 | + self.assertIsNotNone( |
408 | + re.search(r'bogus: (command )?not found', |
409 | + str(context_manager.exception)), |
410 | + msg='Expected bogus command not found') |
411 | + expected_stderr_log = '\n'.join([ |
412 | + 'Begin run command: {cmd}'.format(cmd=cmd1), |
413 | + 'End run command: exit(0)', |
414 | + 'Begin run command: {cmd}'.format(cmd=cmd2), |
415 | + 'ERROR: End run command: exit(127)', |
416 | + 'Begin run command: {cmd}'.format(cmd=cmd3), |
417 | + 'End run command: exit(0)\n']) |
418 | + self.assertEqual(expected_stderr_log, m_stderr.getvalue()) |
419 | + |
420 | + def test_run_command_as_lists(self): |
421 | + """When commands are specified as a list, run them in order.""" |
422 | + outfile = self.tmp_path('output.log', dir=self.tmp) |
423 | + |
424 | + cmd1 = 'echo "HI" >> %s' % outfile |
425 | + cmd2 = 'echo "MOM" >> %s' % outfile |
426 | + commands = [cmd1, cmd2] |
427 | + with mock.patch('%s.sys.stderr' % MPATH, new_callable=StringIO): |
428 | + run_commands(commands=commands) |
429 | + |
430 | + self.assertIn( |
431 | + 'DEBUG: Running user-provided ubuntu-advantage commands', |
432 | + self.logs.getvalue()) |
433 | + self.assertEqual('HI\nMOM\n', util.load_file(outfile)) |
434 | + self.assertIn( |
435 | + 'WARNING: Non-ubuntu-advantage commands in ubuntu-advantage' |
436 | + ' config:', |
437 | + self.logs.getvalue()) |
438 | + |
439 | + def test_run_command_dict_sorted_as_command_script(self): |
440 | + """When commands are a dict, sort them and run.""" |
441 | + outfile = self.tmp_path('output.log', dir=self.tmp) |
442 | + cmd1 = 'echo "HI" >> %s' % outfile |
443 | + cmd2 = 'echo "MOM" >> %s' % outfile |
444 | + commands = {'02': cmd1, '01': cmd2} |
445 | + with mock.patch('%s.sys.stderr' % MPATH, new_callable=StringIO): |
446 | + run_commands(commands=commands) |
447 | + |
448 | + expected_messages = [ |
449 | + 'DEBUG: Running user-provided ubuntu-advantage commands'] |
450 | + for message in expected_messages: |
451 | + self.assertIn(message, self.logs.getvalue()) |
452 | + self.assertEqual('MOM\nHI\n', util.load_file(outfile)) |
453 | + |
454 | + |
455 | +@skipUnlessJsonSchema() |
456 | +class TestSchema(CiTestCase): |
457 | + |
458 | + with_logs = True |
459 | + |
460 | + def test_schema_warns_on_ubuntu_advantage_not_as_dict(self): |
461 | + """If ubuntu-advantage configuration is not a dict, emit a warning.""" |
462 | + validate_cloudconfig_schema({'ubuntu-advantage': 'wrong type'}, schema) |
463 | + self.assertEqual( |
464 | + "WARNING: Invalid config:\nubuntu-advantage: 'wrong type' is not" |
465 | + " of type 'object'\n", |
466 | + self.logs.getvalue()) |
467 | + |
468 | + @mock.patch('%s.run_commands' % MPATH) |
469 | + def test_schema_disallows_unknown_keys(self, _): |
470 | + """Unknown keys in ubuntu-advantage configuration emit warnings.""" |
471 | + validate_cloudconfig_schema( |
472 | + {'ubuntu-advantage': {'commands': ['ls'], 'invalid-key': ''}}, |
473 | + schema) |
474 | + self.assertIn( |
475 | + 'WARNING: Invalid config:\nubuntu-advantage: Additional properties' |
476 | + " are not allowed ('invalid-key' was unexpected)", |
477 | + self.logs.getvalue()) |
478 | + |
479 | + def test_warn_schema_requires_commands(self): |
480 | + """Warn when ubuntu-advantage configuration lacks commands.""" |
481 | + validate_cloudconfig_schema( |
482 | + {'ubuntu-advantage': {}}, schema) |
483 | + self.assertEqual( |
484 | + "WARNING: Invalid config:\nubuntu-advantage: 'commands' is a" |
485 | + " required property\n", |
486 | + self.logs.getvalue()) |
487 | + |
488 | + @mock.patch('%s.run_commands' % MPATH) |
489 | + def test_warn_schema_commands_is_not_list_or_dict(self, _): |
490 | + """Warn when ubuntu-advantage:commands config is not a list or dict.""" |
491 | + validate_cloudconfig_schema( |
492 | + {'ubuntu-advantage': {'commands': 'broken'}}, schema) |
493 | + self.assertEqual( |
494 | + "WARNING: Invalid config:\nubuntu-advantage.commands: 'broken' is" |
495 | + " not of type 'object', 'array'\n", |
496 | + self.logs.getvalue()) |
497 | + |
498 | + @mock.patch('%s.run_commands' % MPATH) |
499 | + def test_warn_schema_when_commands_is_empty(self, _): |
500 | + """Emit warnings when ubuntu-advantage:commands is empty.""" |
501 | + validate_cloudconfig_schema( |
502 | + {'ubuntu-advantage': {'commands': []}}, schema) |
503 | + validate_cloudconfig_schema( |
504 | + {'ubuntu-advantage': {'commands': {}}}, schema) |
505 | + self.assertEqual( |
506 | + "WARNING: Invalid config:\nubuntu-advantage.commands: [] is too" |
507 | + " short\nWARNING: Invalid config:\nubuntu-advantage.commands: {}" |
508 | + " does not have enough properties\n", |
509 | + self.logs.getvalue()) |
510 | + |
511 | + @mock.patch('%s.run_commands' % MPATH) |
512 | + def test_schema_when_commands_are_list_or_dict(self, _): |
513 | + """No warnings when ubuntu-advantage:commands are a list or dict.""" |
514 | + validate_cloudconfig_schema( |
515 | + {'ubuntu-advantage': {'commands': ['valid']}}, schema) |
516 | + validate_cloudconfig_schema( |
517 | + {'ubuntu-advantage': {'commands': {'01': 'also valid'}}}, schema) |
518 | + self.assertEqual('', self.logs.getvalue()) |
519 | + |
520 | + |
521 | +class TestHandle(CiTestCase): |
522 | + |
523 | + with_logs = True |
524 | + |
525 | + def setUp(self): |
526 | + super(TestHandle, self).setUp() |
527 | + self.tmp = self.tmp_dir() |
528 | + |
529 | + @mock.patch('%s.run_commands' % MPATH) |
530 | + @mock.patch('%s.validate_cloudconfig_schema' % MPATH) |
531 | + def test_handle_no_config(self, m_schema, m_run): |
532 | + """When no ua-related configuration is provided, nothing happens.""" |
533 | + cfg = {} |
534 | + handle('ua-test', cfg=cfg, cloud=None, log=self.logger, args=None) |
535 | + self.assertIn( |
536 | + "DEBUG: Skipping module named ua-test, no 'ubuntu-advantage' key" |
537 | + " in config", |
538 | + self.logs.getvalue()) |
539 | + m_schema.assert_not_called() |
540 | + m_run.assert_not_called() |
541 | + |
542 | + @mock.patch('%s.maybe_install_ua_tools' % MPATH) |
543 | + def test_handle_tries_to_install_ubuntu_advantage_tools(self, m_install): |
544 | + """If ubuntu_advantage is provided, try installing ua-tools package.""" |
545 | + cfg = {'ubuntu-advantage': {}} |
546 | + mycloud = FakeCloud(None) |
547 | + handle('nomatter', cfg=cfg, cloud=mycloud, log=self.logger, args=None) |
548 | + m_install.assert_called_once_with(mycloud) |
549 | + |
550 | + @mock.patch('%s.maybe_install_ua_tools' % MPATH) |
551 | + def test_handle_runs_commands_provided(self, m_install): |
552 | + """When commands are specified as a list, run them.""" |
553 | + outfile = self.tmp_path('output.log', dir=self.tmp) |
554 | + |
555 | + cfg = { |
556 | + 'ubuntu-advantage': {'commands': ['echo "HI" >> %s' % outfile, |
557 | + 'echo "MOM" >> %s' % outfile]}} |
558 | + mock_path = '%s.sys.stderr' % MPATH |
559 | + with mock.patch(mock_path, new_callable=StringIO): |
560 | + handle('nomatter', cfg=cfg, cloud=None, log=self.logger, args=None) |
561 | + self.assertEqual('HI\nMOM\n', util.load_file(outfile)) |
562 | + |
563 | + |
564 | +class TestMaybeInstallUATools(CiTestCase): |
565 | + |
566 | + with_logs = True |
567 | + |
568 | + def setUp(self): |
569 | + super(TestMaybeInstallUATools, self).setUp() |
570 | + self.tmp = self.tmp_dir() |
571 | + |
572 | + @mock.patch('%s.util.which' % MPATH) |
573 | + def test_maybe_install_ua_tools_noop_when_ua_tools_present(self, m_which): |
574 | + """Do nothing if ubuntu-advantage-tools already exists.""" |
575 | + m_which.return_value = '/usr/bin/ubuntu-advantage' # already installed |
576 | + distro = mock.MagicMock() |
577 | + distro.update_package_sources.side_effect = RuntimeError( |
578 | + 'Some apt error') |
579 | + maybe_install_ua_tools(cloud=FakeCloud(distro)) # No RuntimeError |
580 | + |
581 | + @mock.patch('%s.util.which' % MPATH) |
582 | + def test_maybe_install_ua_tools_raises_update_errors(self, m_which): |
583 | + """maybe_install_ua_tools logs and raises apt update errors.""" |
584 | + m_which.return_value = None |
585 | + distro = mock.MagicMock() |
586 | + distro.update_package_sources.side_effect = RuntimeError( |
587 | + 'Some apt error') |
588 | + with self.assertRaises(RuntimeError) as context_manager: |
589 | + maybe_install_ua_tools(cloud=FakeCloud(distro)) |
590 | + self.assertEqual('Some apt error', str(context_manager.exception)) |
591 | + self.assertIn('Package update failed\nTraceback', self.logs.getvalue()) |
592 | + |
593 | + @mock.patch('%s.util.which' % MPATH) |
594 | + def test_maybe_install_ua_raises_install_errors(self, m_which): |
595 | + """maybe_install_ua_tools logs and raises package install errors.""" |
596 | + m_which.return_value = None |
597 | + distro = mock.MagicMock() |
598 | + distro.update_package_sources.return_value = None |
599 | + distro.install_packages.side_effect = RuntimeError( |
600 | + 'Some install error') |
601 | + with self.assertRaises(RuntimeError) as context_manager: |
602 | + maybe_install_ua_tools(cloud=FakeCloud(distro)) |
603 | + self.assertEqual('Some install error', str(context_manager.exception)) |
604 | + self.assertIn( |
605 | + 'Failed to install ubuntu-advantage-tools\n', self.logs.getvalue()) |
606 | + |
607 | + @mock.patch('%s.util.which' % MPATH) |
608 | + def test_maybe_install_ua_tools_happy_path(self, m_which): |
609 | + """maybe_install_ua_tools installs ubuntu-advantage-tools.""" |
610 | + m_which.return_value = None |
611 | + distro = mock.MagicMock() # No errors raised |
612 | + maybe_install_ua_tools(cloud=FakeCloud(distro)) |
613 | + distro.update_package_sources.assert_called_once_with() |
614 | + distro.install_packages.assert_called_once_with( |
615 | + ['ubuntu-advantage-tools']) |
616 | + |
617 | +# vi: ts=4 expandtab |
618 | diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py |
619 | index d6c61e4..dc3f0fc 100644 |
620 | --- a/cloudinit/ec2_utils.py |
621 | +++ b/cloudinit/ec2_utils.py |
622 | @@ -135,10 +135,8 @@ class MetadataMaterializer(object): |
623 | |
624 | |
625 | def _skip_retry_on_codes(status_codes, _request_args, cause): |
626 | - """Returns if a request should retry based on a given set of codes that |
627 | - case retrying to be stopped/skipped. |
628 | - """ |
629 | - return cause.code in status_codes |
630 | + """Returns False if cause.code is in status_codes.""" |
631 | + return cause.code not in status_codes |
632 | |
633 | |
634 | def get_instance_userdata(api_version='latest', |
635 | diff --git a/cloudinit/net/netplan.py b/cloudinit/net/netplan.py |
636 | index 6bee3d3..6344348 100644 |
637 | --- a/cloudinit/net/netplan.py |
638 | +++ b/cloudinit/net/netplan.py |
639 | @@ -336,22 +336,15 @@ class Renderer(renderer.Renderer): |
640 | _extract_addresses(ifcfg, vlan) |
641 | vlans.update({ifname: vlan}) |
642 | |
643 | - # inject global nameserver values under each physical interface |
644 | - if nameservers: |
645 | - for _eth, cfg in ethernets.items(): |
646 | - nscfg = cfg.get('nameservers', {}) |
647 | - addresses = nscfg.get('addresses', []) |
648 | - addresses += nameservers |
649 | - nscfg.update({'addresses': addresses}) |
650 | - cfg.update({'nameservers': nscfg}) |
651 | - |
652 | - if searchdomains: |
653 | - for _eth, cfg in ethernets.items(): |
654 | - nscfg = cfg.get('nameservers', {}) |
655 | - search = nscfg.get('search', []) |
656 | - search += searchdomains |
657 | - nscfg.update({'search': search}) |
658 | - cfg.update({'nameservers': nscfg}) |
659 | + # inject global nameserver values under each all interface which |
660 | + # has addresses and do not already have a DNS configuration |
661 | + if nameservers or searchdomains: |
662 | + nscfg = {'addresses': nameservers, 'search': searchdomains} |
663 | + for section in [ethernets, wifis, bonds, bridges, vlans]: |
664 | + for _name, cfg in section.items(): |
665 | + if 'nameservers' in cfg or 'addresses' not in cfg: |
666 | + continue |
667 | + cfg.update({'nameservers': nscfg}) |
668 | |
669 | # workaround yaml dictionary key sorting when dumping |
670 | def _render_section(name, section): |
671 | diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py |
672 | index 0bb7fad..0ee622e 100644 |
673 | --- a/cloudinit/sources/DataSourceAzure.py |
674 | +++ b/cloudinit/sources/DataSourceAzure.py |
675 | @@ -20,7 +20,7 @@ from cloudinit import net |
676 | from cloudinit.net.dhcp import EphemeralDHCPv4 |
677 | from cloudinit import sources |
678 | from cloudinit.sources.helpers.azure import get_metadata_from_fabric |
679 | -from cloudinit.url_helper import readurl, wait_for_url, UrlError |
680 | +from cloudinit.url_helper import readurl, UrlError |
681 | from cloudinit import util |
682 | |
683 | LOG = logging.getLogger(__name__) |
684 | @@ -49,7 +49,6 @@ DEFAULT_FS = 'ext4' |
685 | AZURE_CHASSIS_ASSET_TAG = '7783-7084-3265-9085-8269-3286-77' |
686 | REPROVISION_MARKER_FILE = "/var/lib/cloud/data/poll_imds" |
687 | IMDS_URL = "http://169.254.169.254/metadata/reprovisiondata" |
688 | -IMDS_RETRIES = 5 |
689 | |
690 | |
691 | def find_storvscid_from_sysctl_pnpinfo(sysctl_out, deviceid): |
692 | @@ -451,36 +450,24 @@ class DataSourceAzure(sources.DataSource): |
693 | headers = {"Metadata": "true"} |
694 | LOG.debug("Start polling IMDS") |
695 | |
696 | - def sleep_cb(response, loop_n): |
697 | - return 1 |
698 | - |
699 | - def exception_cb(msg, exception): |
700 | + def exc_cb(msg, exception): |
701 | if isinstance(exception, UrlError) and exception.code == 404: |
702 | - return |
703 | - LOG.warning("Exception during polling. Will try DHCP.", |
704 | - exc_info=True) |
705 | - |
706 | + return True |
707 | # If we get an exception while trying to call IMDS, we |
708 | # call DHCP and setup the ephemeral network to acquire the new IP. |
709 | - raise exception |
710 | + return False |
711 | |
712 | need_report = report_ready |
713 | - for i in range(IMDS_RETRIES): |
714 | + while True: |
715 | try: |
716 | with EphemeralDHCPv4() as lease: |
717 | if need_report: |
718 | self._report_ready(lease=lease) |
719 | need_report = False |
720 | - wait_for_url([url], max_wait=None, timeout=60, |
721 | - status_cb=LOG.info, |
722 | - headers_cb=lambda url: headers, sleep_time=1, |
723 | - exception_cb=exception_cb, |
724 | - sleep_time_cb=sleep_cb) |
725 | - return str(readurl(url, headers=headers)) |
726 | - except Exception: |
727 | - LOG.debug("Exception during polling-retrying dhcp" + |
728 | - " %d more time(s).", (IMDS_RETRIES - i), |
729 | - exc_info=True) |
730 | + return readurl(url, timeout=1, headers=headers, |
731 | + exception_cb=exc_cb, infinite=True).contents |
732 | + except UrlError: |
733 | + pass |
734 | |
735 | def _report_ready(self, lease): |
736 | """Tells the fabric provisioning has completed |
737 | diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py |
738 | index b8db626..c7b5fe5 100644 |
739 | --- a/cloudinit/sources/DataSourceConfigDrive.py |
740 | +++ b/cloudinit/sources/DataSourceConfigDrive.py |
741 | @@ -14,6 +14,7 @@ from cloudinit import util |
742 | |
743 | from cloudinit.net import eni |
744 | |
745 | +from cloudinit.sources.DataSourceIBMCloud import get_ibm_platform |
746 | from cloudinit.sources.helpers import openstack |
747 | |
748 | LOG = logging.getLogger(__name__) |
749 | @@ -255,6 +256,15 @@ def find_candidate_devs(probe_optical=True): |
750 | # an unpartitioned block device (ex sda, not sda1) |
751 | devices = [d for d in candidates |
752 | if d in by_label or not util.is_partition(d)] |
753 | + |
754 | + if devices: |
755 | + # IBMCloud uses config-2 label, but limited to a single UUID. |
756 | + ibm_platform, ibm_path = get_ibm_platform() |
757 | + if ibm_path in devices: |
758 | + devices.remove(ibm_path) |
759 | + LOG.debug("IBMCloud device '%s' (%s) removed from candidate list", |
760 | + ibm_path, ibm_platform) |
761 | + |
762 | return devices |
763 | |
764 | |
765 | diff --git a/cloudinit/sources/DataSourceIBMCloud.py b/cloudinit/sources/DataSourceIBMCloud.py |
766 | new file mode 100644 |
767 | index 0000000..02b3d56 |
768 | --- /dev/null |
769 | +++ b/cloudinit/sources/DataSourceIBMCloud.py |
770 | @@ -0,0 +1,325 @@ |
771 | +# This file is part of cloud-init. See LICENSE file for license information. |
772 | +"""Datasource for IBMCloud. |
773 | + |
774 | +IBMCloud is also know as SoftLayer or BlueMix. |
775 | +IBMCloud hypervisor is xen (2018-03-10). |
776 | + |
777 | +There are 2 different api exposed launch methods. |
778 | + * template: This is the legacy method of launching instances. |
779 | + When booting from an image template, the system boots first into |
780 | + a "provisioning" mode. There, host <-> guest mechanisms are utilized |
781 | + to execute code in the guest and provision it. |
782 | + |
783 | + Cloud-init will disable itself when it detects that it is in the |
784 | + provisioning mode. It detects this by the presence of |
785 | + a file '/root/provisioningConfiguration.cfg'. |
786 | + |
787 | + When provided with user-data, the "first boot" will contain a |
788 | + ConfigDrive-like disk labeled with 'METADATA'. If there is no user-data |
789 | + provided, then there is no data-source. |
790 | + |
791 | + Cloud-init never does any network configuration in this mode. |
792 | + |
793 | + * os_code: Essentially "launch by OS Code" (Operating System Code). |
794 | + This is a more modern approach. There is no specific "provisioning" boot. |
795 | + Instead, cloud-init does all the customization. With or without |
796 | + user-data provided, an OpenStack ConfigDrive like disk is attached. |
797 | + |
798 | + Only disks with label 'config-2' and UUID '9796-932E' are considered. |
799 | + This is to avoid this datasource claiming ConfigDrive. This does |
800 | + mean that 1 in 8^16 (~4 billion) Xen ConfigDrive systems will be |
801 | + incorrectly identified as IBMCloud. |
802 | + |
803 | +TODO: |
804 | + * is uuid (/sys/hypervisor/uuid) stable for life of an instance? |
805 | + it seems it is not the same as data's uuid in the os_code case |
806 | + but is in the template case. |
807 | + |
808 | +""" |
809 | +import base64 |
810 | +import json |
811 | +import os |
812 | + |
813 | +from cloudinit import log as logging |
814 | +from cloudinit import sources |
815 | +from cloudinit.sources.helpers import openstack |
816 | +from cloudinit import util |
817 | + |
818 | +LOG = logging.getLogger(__name__) |
819 | + |
820 | +IBM_CONFIG_UUID = "9796-932E" |
821 | + |
822 | + |
823 | +class Platforms(object): |
824 | + TEMPLATE_LIVE_METADATA = "Template/Live/Metadata" |
825 | + TEMPLATE_LIVE_NODATA = "UNABLE TO BE IDENTIFIED." |
826 | + TEMPLATE_PROVISIONING_METADATA = "Template/Provisioning/Metadata" |
827 | + TEMPLATE_PROVISIONING_NODATA = "Template/Provisioning/No-Metadata" |
828 | + OS_CODE = "OS-Code/Live" |
829 | + |
830 | + |
831 | +PROVISIONING = ( |
832 | + Platforms.TEMPLATE_PROVISIONING_METADATA, |
833 | + Platforms.TEMPLATE_PROVISIONING_NODATA) |
834 | + |
835 | + |
836 | +class DataSourceIBMCloud(sources.DataSource): |
837 | + |
838 | + dsname = 'IBMCloud' |
839 | + system_uuid = None |
840 | + |
841 | + def __init__(self, sys_cfg, distro, paths): |
842 | + super(DataSourceIBMCloud, self).__init__(sys_cfg, distro, paths) |
843 | + self.source = None |
844 | + self._network_config = None |
845 | + self.network_json = None |
846 | + self.platform = None |
847 | + |
848 | + def __str__(self): |
849 | + root = super(DataSourceIBMCloud, self).__str__() |
850 | + mstr = "%s [%s %s]" % (root, self.platform, self.source) |
851 | + return mstr |
852 | + |
853 | + def _get_data(self): |
854 | + results = read_md() |
855 | + if results is None: |
856 | + return False |
857 | + |
858 | + self.source = results['source'] |
859 | + self.platform = results['platform'] |
860 | + self.metadata = results['metadata'] |
861 | + self.userdata_raw = results.get('userdata') |
862 | + self.network_json = results.get('networkdata') |
863 | + vd = results.get('vendordata') |
864 | + self.vendordata_pure = vd |
865 | + self.system_uuid = results['system-uuid'] |
866 | + try: |
867 | + self.vendordata_raw = sources.convert_vendordata(vd) |
868 | + except ValueError as e: |
869 | + LOG.warning("Invalid content in vendor-data: %s", e) |
870 | + self.vendordata_raw = None |
871 | + |
872 | + return True |
873 | + |
874 | + def check_instance_id(self, sys_cfg): |
875 | + """quickly (local check only) if self.instance_id is still valid |
876 | + |
877 | + in Template mode, the system uuid (/sys/hypervisor/uuid) is the |
878 | + same as found in the METADATA disk. But that is not true in OS_CODE |
879 | + mode. So we read the system_uuid and keep that for later compare.""" |
880 | + if self.system_uuid is None: |
881 | + return False |
882 | + return self.system_uuid == _read_system_uuid() |
883 | + |
884 | + @property |
885 | + def network_config(self): |
886 | + if self.platform != Platforms.OS_CODE: |
887 | + # If deployed from template, an agent in the provisioning |
888 | + # environment handles networking configuration. Not cloud-init. |
889 | + return {'config': 'disabled', 'version': 1} |
890 | + if self._network_config is None: |
891 | + if self.network_json is not None: |
892 | + LOG.debug("network config provided via network_json") |
893 | + self._network_config = openstack.convert_net_json( |
894 | + self.network_json, known_macs=None) |
895 | + else: |
896 | + LOG.debug("no network configuration available.") |
897 | + return self._network_config |
898 | + |
899 | + |
900 | +def _read_system_uuid(): |
901 | + uuid_path = "/sys/hypervisor/uuid" |
902 | + if not os.path.isfile(uuid_path): |
903 | + return None |
904 | + return util.load_file(uuid_path).strip().lower() |
905 | + |
906 | + |
907 | +def _is_xen(): |
908 | + return os.path.exists("/proc/xen") |
909 | + |
910 | + |
911 | +def _is_ibm_provisioning(): |
912 | + return os.path.exists("/root/provisioningConfiguration.cfg") |
913 | + |
914 | + |
915 | +def get_ibm_platform(): |
916 | + """Return a tuple (Platform, path) |
917 | + |
918 | + If this is Not IBM cloud, then the return value is (None, None). |
919 | + An instance in provisioning mode is considered running on IBM cloud.""" |
920 | + label_mdata = "METADATA" |
921 | + label_cfg2 = "CONFIG-2" |
922 | + not_found = (None, None) |
923 | + |
924 | + if not _is_xen(): |
925 | + return not_found |
926 | + |
927 | + # fslabels contains only the first entry with a given label. |
928 | + fslabels = {} |
929 | + try: |
930 | + devs = util.blkid() |
931 | + except util.ProcessExecutionError as e: |
932 | + LOG.warning("Failed to run blkid: %s", e) |
933 | + return (None, None) |
934 | + |
935 | + for dev in sorted(devs.keys()): |
936 | + data = devs[dev] |
937 | + label = data.get("LABEL", "").upper() |
938 | + uuid = data.get("UUID", "").upper() |
939 | + if label not in (label_mdata, label_cfg2): |
940 | + continue |
941 | + if label in fslabels: |
942 | + LOG.warning("Duplicate fslabel '%s'. existing=%s current=%s", |
943 | + label, fslabels[label], data) |
944 | + continue |
945 | + if label == label_cfg2 and uuid != IBM_CONFIG_UUID: |
946 | + LOG.debug("Skipping %s with LABEL=%s due to uuid != %s: %s", |
947 | + dev, label, uuid, data) |
948 | + continue |
949 | + fslabels[label] = data |
950 | + |
951 | + metadata_path = fslabels.get(label_mdata, {}).get('DEVNAME') |
952 | + cfg2_path = fslabels.get(label_cfg2, {}).get('DEVNAME') |
953 | + |
954 | + if cfg2_path: |
955 | + return (Platforms.OS_CODE, cfg2_path) |
956 | + elif metadata_path: |
957 | + if _is_ibm_provisioning(): |
958 | + return (Platforms.TEMPLATE_PROVISIONING_METADATA, metadata_path) |
959 | + else: |
960 | + return (Platforms.TEMPLATE_LIVE_METADATA, metadata_path) |
961 | + elif _is_ibm_provisioning(): |
962 | + return (Platforms.TEMPLATE_PROVISIONING_NODATA, None) |
963 | + return not_found |
964 | + |
965 | + |
966 | +def read_md(): |
967 | + """Read data from IBM Cloud. |
968 | + |
969 | + @return: None if not running on IBM Cloud. |
970 | + dictionary with guaranteed fields: metadata, version |
971 | + and optional fields: userdata, vendordata, networkdata. |
972 | + Also includes the system uuid from /sys/hypervisor/uuid.""" |
973 | + platform, path = get_ibm_platform() |
974 | + if platform is None: |
975 | + LOG.debug("This is not an IBMCloud platform.") |
976 | + return None |
977 | + elif platform in PROVISIONING: |
978 | + LOG.debug("Cloud-init is disabled during provisioning: %s.", |
979 | + platform) |
980 | + return None |
981 | + |
982 | + ret = {'platform': platform, 'source': path, |
983 | + 'system-uuid': _read_system_uuid()} |
984 | + |
985 | + try: |
986 | + if os.path.isdir(path): |
987 | + results = metadata_from_dir(path) |
988 | + else: |
989 | + results = util.mount_cb(path, metadata_from_dir) |
990 | + except BrokenMetadata as e: |
991 | + raise RuntimeError( |
992 | + "Failed reading IBM config disk (platform=%s path=%s): %s" % |
993 | + (platform, path, e)) |
994 | + |
995 | + ret.update(results) |
996 | + return ret |
997 | + |
998 | + |
999 | +class BrokenMetadata(IOError): |
1000 | + pass |
1001 | + |
1002 | + |
1003 | +def metadata_from_dir(source_dir): |
1004 | + """Walk source_dir extracting standardized metadata. |
1005 | + |
1006 | + Certain metadata keys are renamed to present a standardized set of metadata |
1007 | + keys. |
1008 | + |
1009 | + This function has a lot in common with ConfigDriveReader.read_v2 but |
1010 | + there are a number of inconsistencies, such key renames and as only |
1011 | + presenting a 'latest' version which make it an unlikely candidate to share |
1012 | + code. |
1013 | + |
1014 | + @return: Dict containing translated metadata, userdata, vendordata, |
1015 | + networkdata as present. |
1016 | + """ |
1017 | + |
1018 | + def opath(fname): |
1019 | + return os.path.join("openstack", "latest", fname) |
1020 | + |
1021 | + def load_json_bytes(blob): |
1022 | + return json.loads(blob.decode('utf-8')) |
1023 | + |
1024 | + files = [ |
1025 | + # tuples of (results_name, path, translator) |
1026 | + ('metadata_raw', opath('meta_data.json'), load_json_bytes), |
1027 | + ('userdata', opath('user_data'), None), |
1028 | + ('vendordata', opath('vendor_data.json'), load_json_bytes), |
1029 | + ('networkdata', opath('network_data.json'), load_json_bytes), |
1030 | + ] |
1031 | + |
1032 | + results = {} |
1033 | + for (name, path, transl) in files: |
1034 | + fpath = os.path.join(source_dir, path) |
1035 | + raw = None |
1036 | + try: |
1037 | + raw = util.load_file(fpath, decode=False) |
1038 | + except IOError as e: |
1039 | + LOG.debug("Failed reading path '%s': %s", fpath, e) |
1040 | + |
1041 | + if raw is None or transl is None: |
1042 | + data = raw |
1043 | + else: |
1044 | + try: |
1045 | + data = transl(raw) |
1046 | + except Exception as e: |
1047 | + raise BrokenMetadata("Failed decoding %s: %s" % (path, e)) |
1048 | + |
1049 | + results[name] = data |
1050 | + |
1051 | + if results.get('metadata_raw') is None: |
1052 | + raise BrokenMetadata( |
1053 | + "%s missing required file 'meta_data.json'" % source_dir) |
1054 | + |
1055 | + results['metadata'] = {} |
1056 | + |
1057 | + md_raw = results['metadata_raw'] |
1058 | + md = results['metadata'] |
1059 | + if 'random_seed' in md_raw: |
1060 | + try: |
1061 | + md['random_seed'] = base64.b64decode(md_raw['random_seed']) |
1062 | + except (ValueError, TypeError) as e: |
1063 | + raise BrokenMetadata( |
1064 | + "Badly formatted metadata random_seed entry: %s" % e) |
1065 | + |
1066 | + renames = ( |
1067 | + ('public_keys', 'public-keys'), ('hostname', 'local-hostname'), |
1068 | + ('uuid', 'instance-id')) |
1069 | + for mdname, newname in renames: |
1070 | + if mdname in md_raw: |
1071 | + md[newname] = md_raw[mdname] |
1072 | + |
1073 | + return results |
1074 | + |
1075 | + |
1076 | +# Used to match classes to dependencies |
1077 | +datasources = [ |
1078 | + (DataSourceIBMCloud, (sources.DEP_FILESYSTEM,)), |
1079 | +] |
1080 | + |
1081 | + |
1082 | +# Return a list of data sources that match this set of dependencies |
1083 | +def get_datasource_list(depends): |
1084 | + return sources.list_from_depends(depends, datasources) |
1085 | + |
1086 | + |
1087 | +if __name__ == "__main__": |
1088 | + import argparse |
1089 | + |
1090 | + parser = argparse.ArgumentParser(description='Query IBM Cloud Metadata') |
1091 | + args = parser.parse_args() |
1092 | + data = read_md() |
1093 | + print(util.json_dumps(data)) |
1094 | + |
1095 | +# vi: ts=4 expandtab |
1096 | diff --git a/cloudinit/sources/DataSourceScaleway.py b/cloudinit/sources/DataSourceScaleway.py |
1097 | index 9005624..e2502b0 100644 |
1098 | --- a/cloudinit/sources/DataSourceScaleway.py |
1099 | +++ b/cloudinit/sources/DataSourceScaleway.py |
1100 | @@ -113,9 +113,9 @@ def query_data_api_once(api_address, timeout, requests_session): |
1101 | retries=0, |
1102 | session=requests_session, |
1103 | # If the error is a HTTP/404 or a ConnectionError, go into raise |
1104 | - # block below. |
1105 | - exception_cb=lambda _, exc: exc.code == 404 or ( |
1106 | - isinstance(exc.cause, requests.exceptions.ConnectionError) |
1107 | + # block below and don't bother retrying. |
1108 | + exception_cb=lambda _, exc: exc.code != 404 and ( |
1109 | + not isinstance(exc.cause, requests.exceptions.ConnectionError) |
1110 | ) |
1111 | ) |
1112 | return util.decode_binary(resp.contents) |
1113 | diff --git a/cloudinit/subp.py b/cloudinit/subp.py |
1114 | new file mode 100644 |
1115 | index 0000000..0ad0930 |
1116 | --- /dev/null |
1117 | +++ b/cloudinit/subp.py |
1118 | @@ -0,0 +1,57 @@ |
1119 | +# This file is part of cloud-init. See LICENSE file for license information. |
1120 | +"""Common utility functions for interacting with subprocess.""" |
1121 | + |
1122 | +# TODO move subp shellify and runparts related functions out of util.py |
1123 | + |
1124 | +import logging |
1125 | + |
1126 | +LOG = logging.getLogger(__name__) |
1127 | + |
1128 | + |
1129 | +def prepend_base_command(base_command, commands): |
1130 | + """Ensure user-provided commands start with base_command; warn otherwise. |
1131 | + |
1132 | + Each command is either a list or string. Perform the following: |
1133 | + - If the command is a list, pop the first element if it is None |
1134 | + - If the command is a list, insert base_command as the first element if |
1135 | + not present. |
1136 | + - When the command is a string not starting with 'base-command', warn. |
1137 | + |
1138 | + Allow flexibility to provide non-base-command environment/config setup if |
1139 | + needed. |
1140 | + |
1141 | + @commands: List of commands. Each command element is a list or string. |
1142 | + |
1143 | + @return: List of 'fixed up' commands. |
1144 | + @raise: TypeError on invalid config item type. |
1145 | + """ |
1146 | + warnings = [] |
1147 | + errors = [] |
1148 | + fixed_commands = [] |
1149 | + for command in commands: |
1150 | + if isinstance(command, list): |
1151 | + if command[0] is None: # Avoid warnings by specifying None |
1152 | + command = command[1:] |
1153 | + elif command[0] != base_command: # Automatically prepend |
1154 | + command.insert(0, base_command) |
1155 | + elif isinstance(command, str): |
1156 | + if not command.startswith('%s ' % base_command): |
1157 | + warnings.append(command) |
1158 | + else: |
1159 | + errors.append(str(command)) |
1160 | + continue |
1161 | + fixed_commands.append(command) |
1162 | + |
1163 | + if warnings: |
1164 | + LOG.warning( |
1165 | + 'Non-%s commands in %s config:\n%s', |
1166 | + base_command, base_command, '\n'.join(warnings)) |
1167 | + if errors: |
1168 | + raise TypeError( |
1169 | + 'Invalid {name} config.' |
1170 | + ' These commands are not a string or list:\n{errors}'.format( |
1171 | + name=base_command, errors='\n'.join(errors))) |
1172 | + return fixed_commands |
1173 | + |
1174 | + |
1175 | +# vi: ts=4 expandtab |
1176 | diff --git a/cloudinit/tests/test_subp.py b/cloudinit/tests/test_subp.py |
1177 | new file mode 100644 |
1178 | index 0000000..448097d |
1179 | --- /dev/null |
1180 | +++ b/cloudinit/tests/test_subp.py |
1181 | @@ -0,0 +1,61 @@ |
1182 | +# This file is part of cloud-init. See LICENSE file for license information. |
1183 | + |
1184 | +"""Tests for cloudinit.subp utility functions""" |
1185 | + |
1186 | +from cloudinit import subp |
1187 | +from cloudinit.tests.helpers import CiTestCase |
1188 | + |
1189 | + |
1190 | +class TestPrependBaseCommands(CiTestCase): |
1191 | + |
1192 | + with_logs = True |
1193 | + |
1194 | + def test_prepend_base_command_errors_on_neither_string_nor_list(self): |
1195 | + """Raise an error for each command which is not a string or list.""" |
1196 | + orig_commands = ['ls', 1, {'not': 'gonna work'}, ['basecmd', 'list']] |
1197 | + with self.assertRaises(TypeError) as context_manager: |
1198 | + subp.prepend_base_command( |
1199 | + base_command='basecmd', commands=orig_commands) |
1200 | + self.assertEqual( |
1201 | + "Invalid basecmd config. These commands are not a string or" |
1202 | + " list:\n1\n{'not': 'gonna work'}", |
1203 | + str(context_manager.exception)) |
1204 | + |
1205 | + def test_prepend_base_command_warns_on_non_base_string_commands(self): |
1206 | + """Warn on each non-base for commands of type string.""" |
1207 | + orig_commands = [ |
1208 | + 'ls', 'basecmd list', 'touch /blah', 'basecmd install x'] |
1209 | + fixed_commands = subp.prepend_base_command( |
1210 | + base_command='basecmd', commands=orig_commands) |
1211 | + self.assertEqual( |
1212 | + 'WARNING: Non-basecmd commands in basecmd config:\n' |
1213 | + 'ls\ntouch /blah\n', |
1214 | + self.logs.getvalue()) |
1215 | + self.assertEqual(orig_commands, fixed_commands) |
1216 | + |
1217 | + def test_prepend_base_command_prepends_on_non_base_list_commands(self): |
1218 | + """Prepend 'basecmd' for each non-basecmd command of type list.""" |
1219 | + orig_commands = [['ls'], ['basecmd', 'list'], ['basecmda', '/blah'], |
1220 | + ['basecmd', 'install', 'x']] |
1221 | + expected = [['basecmd', 'ls'], ['basecmd', 'list'], |
1222 | + ['basecmd', 'basecmda', '/blah'], |
1223 | + ['basecmd', 'install', 'x']] |
1224 | + fixed_commands = subp.prepend_base_command( |
1225 | + base_command='basecmd', commands=orig_commands) |
1226 | + self.assertEqual('', self.logs.getvalue()) |
1227 | + self.assertEqual(expected, fixed_commands) |
1228 | + |
1229 | + def test_prepend_base_command_removes_first_item_when_none(self): |
1230 | + """Remove the first element of a non-basecmd when it is None.""" |
1231 | + orig_commands = [[None, 'ls'], ['basecmd', 'list'], |
1232 | + [None, 'touch', '/blah'], |
1233 | + ['basecmd', 'install', 'x']] |
1234 | + expected = [['ls'], ['basecmd', 'list'], |
1235 | + ['touch', '/blah'], |
1236 | + ['basecmd', 'install', 'x']] |
1237 | + fixed_commands = subp.prepend_base_command( |
1238 | + base_command='basecmd', commands=orig_commands) |
1239 | + self.assertEqual('', self.logs.getvalue()) |
1240 | + self.assertEqual(expected, fixed_commands) |
1241 | + |
1242 | +# vi: ts=4 expandtab |
1243 | diff --git a/cloudinit/tests/test_util.py b/cloudinit/tests/test_util.py |
1244 | index d30643d..3f37dbb 100644 |
1245 | --- a/cloudinit/tests/test_util.py |
1246 | +++ b/cloudinit/tests/test_util.py |
1247 | @@ -3,6 +3,7 @@ |
1248 | """Tests for cloudinit.util""" |
1249 | |
1250 | import logging |
1251 | +from textwrap import dedent |
1252 | |
1253 | import cloudinit.util as util |
1254 | |
1255 | @@ -140,4 +141,75 @@ class TestGetHostnameFqdn(CiTestCase): |
1256 | [{'fqdn': True, 'metadata_only': True}, |
1257 | {'metadata_only': True}], mycloud.calls) |
1258 | |
1259 | + |
1260 | +class TestBlkid(CiTestCase): |
1261 | + ids = { |
1262 | + "id01": "1111-1111", |
1263 | + "id02": "22222222-2222", |
1264 | + "id03": "33333333-3333", |
1265 | + "id04": "44444444-4444", |
1266 | + "id05": "55555555-5555-5555-5555-555555555555", |
1267 | + "id06": "66666666-6666-6666-6666-666666666666", |
1268 | + "id07": "52894610484658920398", |
1269 | + "id08": "86753098675309867530", |
1270 | + "id09": "99999999-9999-9999-9999-999999999999", |
1271 | + } |
1272 | + |
1273 | + blkid_out = dedent("""\ |
1274 | + /dev/loop0: TYPE="squashfs" |
1275 | + /dev/loop1: TYPE="squashfs" |
1276 | + /dev/loop2: TYPE="squashfs" |
1277 | + /dev/loop3: TYPE="squashfs" |
1278 | + /dev/sda1: UUID="{id01}" TYPE="vfat" PARTUUID="{id02}" |
1279 | + /dev/sda2: UUID="{id03}" TYPE="ext4" PARTUUID="{id04}" |
1280 | + /dev/sda3: UUID="{id05}" TYPE="ext4" PARTUUID="{id06}" |
1281 | + /dev/sda4: LABEL="default" UUID="{id07}" UUID_SUB="{id08}" """ |
1282 | + """TYPE="zfs_member" PARTUUID="{id09}" |
1283 | + /dev/loop4: TYPE="squashfs" |
1284 | + """) |
1285 | + |
1286 | + maxDiff = None |
1287 | + |
1288 | + def _get_expected(self): |
1289 | + return ({ |
1290 | + "/dev/loop0": {"DEVNAME": "/dev/loop0", "TYPE": "squashfs"}, |
1291 | + "/dev/loop1": {"DEVNAME": "/dev/loop1", "TYPE": "squashfs"}, |
1292 | + "/dev/loop2": {"DEVNAME": "/dev/loop2", "TYPE": "squashfs"}, |
1293 | + "/dev/loop3": {"DEVNAME": "/dev/loop3", "TYPE": "squashfs"}, |
1294 | + "/dev/loop4": {"DEVNAME": "/dev/loop4", "TYPE": "squashfs"}, |
1295 | + "/dev/sda1": {"DEVNAME": "/dev/sda1", "TYPE": "vfat", |
1296 | + "UUID": self.ids["id01"], |
1297 | + "PARTUUID": self.ids["id02"]}, |
1298 | + "/dev/sda2": {"DEVNAME": "/dev/sda2", "TYPE": "ext4", |
1299 | + "UUID": self.ids["id03"], |
1300 | + "PARTUUID": self.ids["id04"]}, |
1301 | + "/dev/sda3": {"DEVNAME": "/dev/sda3", "TYPE": "ext4", |
1302 | + "UUID": self.ids["id05"], |
1303 | + "PARTUUID": self.ids["id06"]}, |
1304 | + "/dev/sda4": {"DEVNAME": "/dev/sda4", "TYPE": "zfs_member", |
1305 | + "LABEL": "default", |
1306 | + "UUID": self.ids["id07"], |
1307 | + "UUID_SUB": self.ids["id08"], |
1308 | + "PARTUUID": self.ids["id09"]}, |
1309 | + }) |
1310 | + |
1311 | + @mock.patch("cloudinit.util.subp") |
1312 | + def test_functional_blkid(self, m_subp): |
1313 | + m_subp.return_value = ( |
1314 | + self.blkid_out.format(**self.ids), "") |
1315 | + self.assertEqual(self._get_expected(), util.blkid()) |
1316 | + m_subp.assert_called_with(["blkid", "-o", "full"], capture=True, |
1317 | + decode="replace") |
1318 | + |
1319 | + @mock.patch("cloudinit.util.subp") |
1320 | + def test_blkid_no_cache_uses_no_cache(self, m_subp): |
1321 | + """blkid should turn off cache if disable_cache is true.""" |
1322 | + m_subp.return_value = ( |
1323 | + self.blkid_out.format(**self.ids), "") |
1324 | + self.assertEqual(self._get_expected(), |
1325 | + util.blkid(disable_cache=True)) |
1326 | + m_subp.assert_called_with(["blkid", "-o", "full", "-c", "/dev/null"], |
1327 | + capture=True, decode="replace") |
1328 | + |
1329 | + |
1330 | # vi: ts=4 expandtab |
1331 | diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py |
1332 | index 4e814a5..03a573a 100644 |
1333 | --- a/cloudinit/url_helper.py |
1334 | +++ b/cloudinit/url_helper.py |
1335 | @@ -16,7 +16,7 @@ import time |
1336 | |
1337 | from email.utils import parsedate |
1338 | from functools import partial |
1339 | - |
1340 | +from itertools import count |
1341 | from requests import exceptions |
1342 | |
1343 | from six.moves.urllib.parse import ( |
1344 | @@ -172,7 +172,7 @@ def _get_ssl_args(url, ssl_details): |
1345 | def readurl(url, data=None, timeout=None, retries=0, sec_between=1, |
1346 | headers=None, headers_cb=None, ssl_details=None, |
1347 | check_status=True, allow_redirects=True, exception_cb=None, |
1348 | - session=None): |
1349 | + session=None, infinite=False): |
1350 | url = _cleanurl(url) |
1351 | req_args = { |
1352 | 'url': url, |
1353 | @@ -220,7 +220,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, |
1354 | excps = [] |
1355 | # Handle retrying ourselves since the built-in support |
1356 | # doesn't handle sleeping between tries... |
1357 | - for i in range(0, manual_tries): |
1358 | + # Infinitely retry if infinite is True |
1359 | + for i in count() if infinite else range(0, manual_tries): |
1360 | req_args['headers'] = headers_cb(url) |
1361 | filtered_req_args = {} |
1362 | for (k, v) in req_args.items(): |
1363 | @@ -229,7 +230,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, |
1364 | filtered_req_args[k] = v |
1365 | try: |
1366 | LOG.debug("[%s/%s] open '%s' with %s configuration", i, |
1367 | - manual_tries, url, filtered_req_args) |
1368 | + "infinite" if infinite else manual_tries, url, |
1369 | + filtered_req_args) |
1370 | |
1371 | if session is None: |
1372 | session = requests.Session() |
1373 | @@ -258,11 +260,13 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1, |
1374 | # ssl exceptions are not going to get fixed by waiting a |
1375 | # few seconds |
1376 | break |
1377 | - if exception_cb and exception_cb(req_args.copy(), excps[-1]): |
1378 | - # if an exception callback was given it should return None |
1379 | - # a true-ish value means to break and re-raise the exception |
1380 | + if exception_cb and not exception_cb(req_args.copy(), excps[-1]): |
1381 | + # if an exception callback was given, it should return True |
1382 | + # to continue retrying and False to break and re-raise the |
1383 | + # exception |
1384 | break |
1385 | - if i + 1 < manual_tries and sec_between > 0: |
1386 | + if (infinite and sec_between > 0) or \ |
1387 | + (i + 1 < manual_tries and sec_between > 0): |
1388 | LOG.debug("Please wait %s seconds while we wait to try again", |
1389 | sec_between) |
1390 | time.sleep(sec_between) |
1391 | diff --git a/cloudinit/util.py b/cloudinit/util.py |
1392 | index cae8b19..fb4ee5f 100644 |
1393 | --- a/cloudinit/util.py |
1394 | +++ b/cloudinit/util.py |
1395 | @@ -1237,6 +1237,37 @@ def find_devs_with(criteria=None, oformat='device', |
1396 | return entries |
1397 | |
1398 | |
1399 | +def blkid(devs=None, disable_cache=False): |
1400 | + """Get all device tags details from blkid. |
1401 | + |
1402 | + @param devs: Optional list of device paths you wish to query. |
1403 | + @param disable_cache: Bool, set True to start with clean cache. |
1404 | + |
1405 | + @return: Dict of key value pairs of info for the device. |
1406 | + """ |
1407 | + if devs is None: |
1408 | + devs = [] |
1409 | + else: |
1410 | + devs = list(devs) |
1411 | + |
1412 | + cmd = ['blkid', '-o', 'full'] |
1413 | + if disable_cache: |
1414 | + cmd.extend(['-c', '/dev/null']) |
1415 | + cmd.extend(devs) |
1416 | + |
1417 | + # we have to decode with 'replace' as shelx.split (called by |
1418 | + # load_shell_content) can't take bytes. So this is potentially |
1419 | + # lossy of non-utf-8 chars in blkid output. |
1420 | + out, _ = subp(cmd, capture=True, decode="replace") |
1421 | + ret = {} |
1422 | + for line in out.splitlines(): |
1423 | + dev, _, data = line.partition(":") |
1424 | + ret[dev] = load_shell_content(data) |
1425 | + ret[dev]["DEVNAME"] = dev |
1426 | + |
1427 | + return ret |
1428 | + |
1429 | + |
1430 | def peek_file(fname, max_bytes): |
1431 | LOG.debug("Peeking at %s (max_bytes=%s)", fname, max_bytes) |
1432 | with open(fname, 'rb') as ifh: |
1433 | diff --git a/config/cloud.cfg.tmpl b/config/cloud.cfg.tmpl |
1434 | index 56a34fa..3129d4e 100644 |
1435 | --- a/config/cloud.cfg.tmpl |
1436 | +++ b/config/cloud.cfg.tmpl |
1437 | @@ -87,6 +87,9 @@ cloud_config_modules: |
1438 | - apt-pipelining |
1439 | - apt-configure |
1440 | {% endif %} |
1441 | +{% if variant in ["ubuntu"] %} |
1442 | + - ubuntu-advantage |
1443 | +{% endif %} |
1444 | {% if variant in ["suse"] %} |
1445 | - zypper-add-repo |
1446 | {% endif %} |
1447 | diff --git a/debian/changelog b/debian/changelog |
1448 | index c333306..9da09db 100644 |
1449 | --- a/debian/changelog |
1450 | +++ b/debian/changelog |
1451 | @@ -1,3 +1,22 @@ |
1452 | +cloud-init (18.1-35-ge0f644b7-0ubuntu1) bionic; urgency=medium |
1453 | + |
1454 | + * New upstream snapshot. |
1455 | + - IBMCloud: Initial IBM Cloud datasource. |
1456 | + - tests: remove jsonschema from xenial tox environment. |
1457 | + - tests: Fix newly added schema unit tests to skip if no jsonschema. |
1458 | + - ec2: Adjust ec2 datasource after exception_cb change. |
1459 | + - Reduce AzurePreprovisioning HTTP timeouts. |
1460 | + [Douglas Jordan] (LP: #1752977) |
1461 | + - Revert the logic of exception_cb in read_url. |
1462 | + [Kurt Garloff] (LP: #1702160, #1298921) |
1463 | + - ubuntu-advantage: Add new config module to support |
1464 | + ubuntu-advantage-tools |
1465 | + - Handle global dns entries in netplan (LP: #1750884) |
1466 | + - Identify OpenTelekomCloud Xen as OpenStack DS. |
1467 | + [Kurt Garloff] (LP: #1756471) |
1468 | + |
1469 | + -- Chad Smith <chad.smith@canonical.com> Fri, 23 Mar 2018 17:21:37 -0600 |
1470 | + |
1471 | cloud-init (18.1-26-g685f9901-0ubuntu1) bionic; urgency=medium |
1472 | |
1473 | * debian/cloud-init.templates: Enable Hetzner Cloud datasource. |
1474 | diff --git a/doc/rtd/topics/modules.rst b/doc/rtd/topics/modules.rst |
1475 | index a0f6812..d9720f6 100644 |
1476 | --- a/doc/rtd/topics/modules.rst |
1477 | +++ b/doc/rtd/topics/modules.rst |
1478 | @@ -53,6 +53,7 @@ Modules |
1479 | .. automodule:: cloudinit.config.cc_ssh_authkey_fingerprints |
1480 | .. automodule:: cloudinit.config.cc_ssh_import_id |
1481 | .. automodule:: cloudinit.config.cc_timezone |
1482 | +.. automodule:: cloudinit.config.cc_ubuntu_advantage |
1483 | .. automodule:: cloudinit.config.cc_update_etc_hosts |
1484 | .. automodule:: cloudinit.config.cc_update_hostname |
1485 | .. automodule:: cloudinit.config.cc_users_groups |
1486 | diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py |
1487 | index da7da0c..3e8b791 100644 |
1488 | --- a/tests/unittests/test_datasource/test_azure.py |
1489 | +++ b/tests/unittests/test_datasource/test_azure.py |
1490 | @@ -1177,7 +1177,8 @@ class TestAzureDataSourcePreprovisioning(CiTestCase): |
1491 | url = 'http://{0}/metadata/reprovisiondata?api-version=2017-04-02' |
1492 | host = "169.254.169.254" |
1493 | full_url = url.format(host) |
1494 | - fake_resp.return_value = mock.MagicMock(status_code=200, text="ovf") |
1495 | + fake_resp.return_value = mock.MagicMock(status_code=200, text="ovf", |
1496 | + content="ovf") |
1497 | dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths) |
1498 | self.assertTrue(len(dsa._poll_imds()) > 0) |
1499 | self.assertEqual(fake_resp.call_args_list, |
1500 | @@ -1185,13 +1186,8 @@ class TestAzureDataSourcePreprovisioning(CiTestCase): |
1501 | headers={'Metadata': 'true', |
1502 | 'User-Agent': |
1503 | 'Cloud-Init/%s' % vs() |
1504 | - }, method='GET', timeout=60.0, |
1505 | - url=full_url), |
1506 | - mock.call(allow_redirects=True, |
1507 | - headers={'Metadata': 'true', |
1508 | - 'User-Agent': |
1509 | - 'Cloud-Init/%s' % vs() |
1510 | - }, method='GET', url=full_url)]) |
1511 | + }, method='GET', timeout=1, |
1512 | + url=full_url)]) |
1513 | self.assertEqual(m_dhcp.call_count, 1) |
1514 | m_net.assert_any_call( |
1515 | broadcast='192.168.2.255', interface='eth9', ip='192.168.2.9', |
1516 | @@ -1217,7 +1213,8 @@ class TestAzureDataSourcePreprovisioning(CiTestCase): |
1517 | username = "myuser" |
1518 | odata = {'HostName': hostname, 'UserName': username} |
1519 | content = construct_valid_ovf_env(data=odata) |
1520 | - fake_resp.return_value = mock.MagicMock(status_code=200, text=content) |
1521 | + fake_resp.return_value = mock.MagicMock(status_code=200, text=content, |
1522 | + content=content) |
1523 | dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths) |
1524 | md, ud, cfg, d = dsa._reprovision() |
1525 | self.assertEqual(md['local-hostname'], hostname) |
1526 | @@ -1227,12 +1224,7 @@ class TestAzureDataSourcePreprovisioning(CiTestCase): |
1527 | headers={'Metadata': 'true', |
1528 | 'User-Agent': |
1529 | 'Cloud-Init/%s' % vs()}, |
1530 | - method='GET', timeout=60.0, url=full_url), |
1531 | - mock.call(allow_redirects=True, |
1532 | - headers={'Metadata': 'true', |
1533 | - 'User-Agent': |
1534 | - 'Cloud-Init/%s' % vs()}, |
1535 | - method='GET', url=full_url)]) |
1536 | + method='GET', timeout=1, url=full_url)]) |
1537 | self.assertEqual(m_dhcp.call_count, 1) |
1538 | m_net.assert_any_call( |
1539 | broadcast='192.168.2.255', interface='eth9', ip='192.168.2.9', |
1540 | diff --git a/tests/unittests/test_datasource/test_ibmcloud.py b/tests/unittests/test_datasource/test_ibmcloud.py |
1541 | new file mode 100644 |
1542 | index 0000000..621cfe4 |
1543 | --- /dev/null |
1544 | +++ b/tests/unittests/test_datasource/test_ibmcloud.py |
1545 | @@ -0,0 +1,262 @@ |
1546 | +# This file is part of cloud-init. See LICENSE file for license information. |
1547 | + |
1548 | +from cloudinit.sources import DataSourceIBMCloud as ibm |
1549 | +from cloudinit.tests import helpers as test_helpers |
1550 | + |
1551 | +import base64 |
1552 | +import copy |
1553 | +import json |
1554 | +import mock |
1555 | +from textwrap import dedent |
1556 | + |
1557 | +D_PATH = "cloudinit.sources.DataSourceIBMCloud." |
1558 | + |
1559 | + |
1560 | +class TestIBMCloud(test_helpers.CiTestCase): |
1561 | + """Test the datasource.""" |
1562 | + def setUp(self): |
1563 | + super(TestIBMCloud, self).setUp() |
1564 | + pass |
1565 | + |
1566 | + |
1567 | +@mock.patch(D_PATH + "_is_xen", return_value=True) |
1568 | +@mock.patch(D_PATH + "_is_ibm_provisioning") |
1569 | +@mock.patch(D_PATH + "util.blkid") |
1570 | +class TestGetIBMPlatform(test_helpers.CiTestCase): |
1571 | + """Test the get_ibm_platform helper.""" |
1572 | + |
1573 | + blkid_base = { |
1574 | + "/dev/xvda1": { |
1575 | + "DEVNAME": "/dev/xvda1", "LABEL": "cloudimg-bootfs", |
1576 | + "TYPE": "ext3"}, |
1577 | + "/dev/xvda2": { |
1578 | + "DEVNAME": "/dev/xvda2", "LABEL": "cloudimg-rootfs", |
1579 | + "TYPE": "ext4"}, |
1580 | + } |
1581 | + |
1582 | + blkid_metadata_disk = { |
1583 | + "/dev/xvdh1": { |
1584 | + "DEVNAME": "/dev/xvdh1", "LABEL": "METADATA", "TYPE": "vfat", |
1585 | + "SEC_TYPE": "msdos", "UUID": "681B-8C5D", |
1586 | + "PARTUUID": "3d631e09-01"}, |
1587 | + } |
1588 | + |
1589 | + blkid_oscode_disk = { |
1590 | + "/dev/xvdh": { |
1591 | + "DEVNAME": "/dev/xvdh", "LABEL": "config-2", "TYPE": "vfat", |
1592 | + "SEC_TYPE": "msdos", "UUID": ibm.IBM_CONFIG_UUID} |
1593 | + } |
1594 | + |
1595 | + def setUp(self): |
1596 | + self.blkid_metadata = copy.deepcopy(self.blkid_base) |
1597 | + self.blkid_metadata.update(copy.deepcopy(self.blkid_metadata_disk)) |
1598 | + |
1599 | + self.blkid_oscode = copy.deepcopy(self.blkid_base) |
1600 | + self.blkid_oscode.update(copy.deepcopy(self.blkid_oscode_disk)) |
1601 | + |
1602 | + def test_id_template_live_metadata(self, m_blkid, m_is_prov, _m_xen): |
1603 | + """identify TEMPLATE_LIVE_METADATA.""" |
1604 | + m_blkid.return_value = self.blkid_metadata |
1605 | + m_is_prov.return_value = False |
1606 | + self.assertEqual( |
1607 | + (ibm.Platforms.TEMPLATE_LIVE_METADATA, "/dev/xvdh1"), |
1608 | + ibm.get_ibm_platform()) |
1609 | + |
1610 | + def test_id_template_prov_metadata(self, m_blkid, m_is_prov, _m_xen): |
1611 | + """identify TEMPLATE_PROVISIONING_METADATA.""" |
1612 | + m_blkid.return_value = self.blkid_metadata |
1613 | + m_is_prov.return_value = True |
1614 | + self.assertEqual( |
1615 | + (ibm.Platforms.TEMPLATE_PROVISIONING_METADATA, "/dev/xvdh1"), |
1616 | + ibm.get_ibm_platform()) |
1617 | + |
1618 | + def test_id_template_prov_nodata(self, m_blkid, m_is_prov, _m_xen): |
1619 | + """identify TEMPLATE_PROVISIONING_NODATA.""" |
1620 | + m_blkid.return_value = self.blkid_base |
1621 | + m_is_prov.return_value = True |
1622 | + self.assertEqual( |
1623 | + (ibm.Platforms.TEMPLATE_PROVISIONING_NODATA, None), |
1624 | + ibm.get_ibm_platform()) |
1625 | + |
1626 | + def test_id_os_code(self, m_blkid, m_is_prov, _m_xen): |
1627 | + """Identify OS_CODE.""" |
1628 | + m_blkid.return_value = self.blkid_oscode |
1629 | + m_is_prov.return_value = False |
1630 | + self.assertEqual((ibm.Platforms.OS_CODE, "/dev/xvdh"), |
1631 | + ibm.get_ibm_platform()) |
1632 | + |
1633 | + def test_id_os_code_must_match_uuid(self, m_blkid, m_is_prov, _m_xen): |
1634 | + """Test against false positive on openstack with non-ibm UUID.""" |
1635 | + blkid = self.blkid_oscode |
1636 | + blkid["/dev/xvdh"]["UUID"] = "9999-9999" |
1637 | + m_blkid.return_value = blkid |
1638 | + m_is_prov.return_value = False |
1639 | + self.assertEqual((None, None), ibm.get_ibm_platform()) |
1640 | + |
1641 | + |
1642 | +@mock.patch(D_PATH + "_read_system_uuid", return_value=None) |
1643 | +@mock.patch(D_PATH + "get_ibm_platform") |
1644 | +class TestReadMD(test_helpers.CiTestCase): |
1645 | + """Test the read_datasource helper.""" |
1646 | + |
1647 | + template_md = { |
1648 | + "files": [], |
1649 | + "network_config": {"content_path": "/content/interfaces"}, |
1650 | + "hostname": "ci-fond-ram", |
1651 | + "name": "ci-fond-ram", |
1652 | + "domain": "testing.ci.cloud-init.org", |
1653 | + "meta": {"dsmode": "net"}, |
1654 | + "uuid": "8e636730-9f5d-c4a5-327c-d7123c46e82f", |
1655 | + "public_keys": {"1091307": "ssh-rsa AAAAB3NzaC1...Hw== ci-pubkey"}, |
1656 | + } |
1657 | + |
1658 | + oscode_md = { |
1659 | + "hostname": "ci-grand-gannet.testing.ci.cloud-init.org", |
1660 | + "name": "ci-grand-gannet", |
1661 | + "uuid": "2f266908-8e6c-4818-9b5c-42e9cc66a785", |
1662 | + "random_seed": "bm90LXJhbmRvbQo=", |
1663 | + "crypt_key": "ssh-rsa AAAAB3NzaC1yc2..n6z/", |
1664 | + "configuration_token": "eyJhbGciOi..M3ZA", |
1665 | + "public_keys": {"1091307": "ssh-rsa AAAAB3N..Hw== ci-pubkey"}, |
1666 | + } |
1667 | + |
1668 | + content_interfaces = dedent("""\ |
1669 | + auto lo |
1670 | + iface lo inet loopback |
1671 | + |
1672 | + auto eth0 |
1673 | + allow-hotplug eth0 |
1674 | + iface eth0 inet static |
1675 | + address 10.82.43.5 |
1676 | + netmask 255.255.255.192 |
1677 | + """) |
1678 | + |
1679 | + userdata = b"#!/bin/sh\necho hi mom\n" |
1680 | + # meta.js file gets json encoded userdata as a list. |
1681 | + meta_js = '["#!/bin/sh\necho hi mom\n"]' |
1682 | + vendor_data = { |
1683 | + "cloud-init": "#!/bin/bash\necho 'root:$6$5ab01p1m1' | chpasswd -e"} |
1684 | + |
1685 | + network_data = { |
1686 | + "links": [ |
1687 | + {"id": "interface_29402281", "name": "eth0", "mtu": None, |
1688 | + "type": "phy", "ethernet_mac_address": "06:00:f1:bd:da:25"}, |
1689 | + {"id": "interface_29402279", "name": "eth1", "mtu": None, |
1690 | + "type": "phy", "ethernet_mac_address": "06:98:5e:d0:7f:86"} |
1691 | + ], |
1692 | + "networks": [ |
1693 | + {"id": "network_109887563", "link": "interface_29402281", |
1694 | + "type": "ipv4", "ip_address": "10.82.43.2", |
1695 | + "netmask": "255.255.255.192", |
1696 | + "routes": [ |
1697 | + {"network": "10.0.0.0", "netmask": "255.0.0.0", |
1698 | + "gateway": "10.82.43.1"}, |
1699 | + {"network": "161.26.0.0", "netmask": "255.255.0.0", |
1700 | + "gateway": "10.82.43.1"}]}, |
1701 | + {"id": "network_109887551", "link": "interface_29402279", |
1702 | + "type": "ipv4", "ip_address": "108.168.194.252", |
1703 | + "netmask": "255.255.255.248", |
1704 | + "routes": [ |
1705 | + {"network": "0.0.0.0", "netmask": "0.0.0.0", |
1706 | + "gateway": "108.168.194.249"}]} |
1707 | + ], |
1708 | + "services": [ |
1709 | + {"type": "dns", "address": "10.0.80.11"}, |
1710 | + {"type": "dns", "address": "10.0.80.12"} |
1711 | + ], |
1712 | + } |
1713 | + |
1714 | + sysuuid = '7f79ebf5-d791-43c3-a723-854e8389d59f' |
1715 | + |
1716 | + def _get_expected_metadata(self, os_md): |
1717 | + """return expected 'metadata' for data loaded from meta_data.json.""" |
1718 | + os_md = copy.deepcopy(os_md) |
1719 | + renames = ( |
1720 | + ('hostname', 'local-hostname'), |
1721 | + ('uuid', 'instance-id'), |
1722 | + ('public_keys', 'public-keys')) |
1723 | + ret = {} |
1724 | + for osname, mdname in renames: |
1725 | + if osname in os_md: |
1726 | + ret[mdname] = os_md[osname] |
1727 | + if 'random_seed' in os_md: |
1728 | + ret['random_seed'] = base64.b64decode(os_md['random_seed']) |
1729 | + |
1730 | + return ret |
1731 | + |
1732 | + def test_provisioning_md(self, m_platform, m_sysuuid): |
1733 | + """Provisioning env with a metadata disk should return None.""" |
1734 | + m_platform.return_value = ( |
1735 | + ibm.Platforms.TEMPLATE_PROVISIONING_METADATA, "/dev/xvdh") |
1736 | + self.assertIsNone(ibm.read_md()) |
1737 | + |
1738 | + def test_provisioning_no_metadata(self, m_platform, m_sysuuid): |
1739 | + """Provisioning env with no metadata disk should return None.""" |
1740 | + m_platform.return_value = ( |
1741 | + ibm.Platforms.TEMPLATE_PROVISIONING_NODATA, None) |
1742 | + self.assertIsNone(ibm.read_md()) |
1743 | + |
1744 | + def test_provisioning_not_ibm(self, m_platform, m_sysuuid): |
1745 | + """Provisioning env but not identified as IBM should return None.""" |
1746 | + m_platform.return_value = (None, None) |
1747 | + self.assertIsNone(ibm.read_md()) |
1748 | + |
1749 | + def test_template_live(self, m_platform, m_sysuuid): |
1750 | + """Template live environment should be identified.""" |
1751 | + tmpdir = self.tmp_dir() |
1752 | + m_platform.return_value = ( |
1753 | + ibm.Platforms.TEMPLATE_LIVE_METADATA, tmpdir) |
1754 | + m_sysuuid.return_value = self.sysuuid |
1755 | + |
1756 | + test_helpers.populate_dir(tmpdir, { |
1757 | + 'openstack/latest/meta_data.json': json.dumps(self.template_md), |
1758 | + 'openstack/latest/user_data': self.userdata, |
1759 | + 'openstack/content/interfaces': self.content_interfaces, |
1760 | + 'meta.js': self.meta_js}) |
1761 | + |
1762 | + ret = ibm.read_md() |
1763 | + self.assertEqual(ibm.Platforms.TEMPLATE_LIVE_METADATA, |
1764 | + ret['platform']) |
1765 | + self.assertEqual(tmpdir, ret['source']) |
1766 | + self.assertEqual(self.userdata, ret['userdata']) |
1767 | + self.assertEqual(self._get_expected_metadata(self.template_md), |
1768 | + ret['metadata']) |
1769 | + self.assertEqual(self.sysuuid, ret['system-uuid']) |
1770 | + |
1771 | + def test_os_code_live(self, m_platform, m_sysuuid): |
1772 | + """Verify an os_code metadata path.""" |
1773 | + tmpdir = self.tmp_dir() |
1774 | + m_platform.return_value = (ibm.Platforms.OS_CODE, tmpdir) |
1775 | + netdata = json.dumps(self.network_data) |
1776 | + test_helpers.populate_dir(tmpdir, { |
1777 | + 'openstack/latest/meta_data.json': json.dumps(self.oscode_md), |
1778 | + 'openstack/latest/user_data': self.userdata, |
1779 | + 'openstack/latest/vendor_data.json': json.dumps(self.vendor_data), |
1780 | + 'openstack/latest/network_data.json': netdata, |
1781 | + }) |
1782 | + |
1783 | + ret = ibm.read_md() |
1784 | + self.assertEqual(ibm.Platforms.OS_CODE, ret['platform']) |
1785 | + self.assertEqual(tmpdir, ret['source']) |
1786 | + self.assertEqual(self.userdata, ret['userdata']) |
1787 | + self.assertEqual(self._get_expected_metadata(self.oscode_md), |
1788 | + ret['metadata']) |
1789 | + |
1790 | + def test_os_code_live_no_userdata(self, m_platform, m_sysuuid): |
1791 | + """Verify os_code without user-data.""" |
1792 | + tmpdir = self.tmp_dir() |
1793 | + m_platform.return_value = (ibm.Platforms.OS_CODE, tmpdir) |
1794 | + test_helpers.populate_dir(tmpdir, { |
1795 | + 'openstack/latest/meta_data.json': json.dumps(self.oscode_md), |
1796 | + 'openstack/latest/vendor_data.json': json.dumps(self.vendor_data), |
1797 | + }) |
1798 | + |
1799 | + ret = ibm.read_md() |
1800 | + self.assertEqual(ibm.Platforms.OS_CODE, ret['platform']) |
1801 | + self.assertEqual(tmpdir, ret['source']) |
1802 | + self.assertIsNone(ret['userdata']) |
1803 | + self.assertEqual(self._get_expected_metadata(self.oscode_md), |
1804 | + ret['metadata']) |
1805 | + |
1806 | + |
1807 | +# vi: ts=4 expandtab |
1808 | diff --git a/tests/unittests/test_ds_identify.py b/tests/unittests/test_ds_identify.py |
1809 | index 9c5628e..5364398 100644 |
1810 | --- a/tests/unittests/test_ds_identify.py |
1811 | +++ b/tests/unittests/test_ds_identify.py |
1812 | @@ -9,6 +9,8 @@ from cloudinit import util |
1813 | from cloudinit.tests.helpers import ( |
1814 | CiTestCase, dir2dict, populate_dir) |
1815 | |
1816 | +from cloudinit.sources import DataSourceIBMCloud as dsibm |
1817 | + |
1818 | UNAME_MYSYS = ("Linux bart 4.4.0-62-generic #83-Ubuntu " |
1819 | "SMP Wed Jan 18 14:10:15 UTC 2017 x86_64 GNU/Linux") |
1820 | UNAME_PPC64EL = ("Linux diamond 4.4.0-83-generic #106-Ubuntu SMP " |
1821 | @@ -37,8 +39,8 @@ BLKID_UEFI_UBUNTU = [ |
1822 | |
1823 | POLICY_FOUND_ONLY = "search,found=all,maybe=none,notfound=disabled" |
1824 | POLICY_FOUND_OR_MAYBE = "search,found=all,maybe=all,notfound=disabled" |
1825 | -DI_DEFAULT_POLICY = "search,found=all,maybe=all,notfound=enabled" |
1826 | -DI_DEFAULT_POLICY_NO_DMI = "search,found=all,maybe=all,notfound=disabled" |
1827 | +DI_DEFAULT_POLICY = "search,found=all,maybe=all,notfound=disabled" |
1828 | +DI_DEFAULT_POLICY_NO_DMI = "search,found=all,maybe=all,notfound=enabled" |
1829 | DI_EC2_STRICT_ID_DEFAULT = "true" |
1830 | OVF_MATCH_STRING = 'http://schemas.dmtf.org/ovf/environment/1' |
1831 | |
1832 | @@ -64,8 +66,12 @@ P_SYS_VENDOR = "sys/class/dmi/id/sys_vendor" |
1833 | P_SEED_DIR = "var/lib/cloud/seed" |
1834 | P_DSID_CFG = "etc/cloud/ds-identify.cfg" |
1835 | |
1836 | +IBM_PROVISIONING_CHECK_PATH = "/root/provisioningConfiguration.cfg" |
1837 | +IBM_CONFIG_UUID = "9796-932E" |
1838 | + |
1839 | MOCK_VIRT_IS_KVM = {'name': 'detect_virt', 'RET': 'kvm', 'ret': 0} |
1840 | MOCK_VIRT_IS_VMWARE = {'name': 'detect_virt', 'RET': 'vmware', 'ret': 0} |
1841 | +MOCK_VIRT_IS_XEN = {'name': 'detect_virt', 'RET': 'xen', 'ret': 0} |
1842 | MOCK_UNAME_IS_PPC64 = {'name': 'uname', 'out': UNAME_PPC64EL, 'ret': 0} |
1843 | |
1844 | |
1845 | @@ -238,6 +244,57 @@ class TestDsIdentify(CiTestCase): |
1846 | self._test_ds_found('ConfigDriveUpper') |
1847 | return |
1848 | |
1849 | + def test_ibmcloud_template_userdata_in_provisioning(self): |
1850 | + """Template provisioned with user-data during provisioning stage. |
1851 | + |
1852 | + Template provisioning with user-data has METADATA disk, |
1853 | + datasource should return not found.""" |
1854 | + data = copy.deepcopy(VALID_CFG['IBMCloud-metadata']) |
1855 | + data['files'] = {IBM_PROVISIONING_CHECK_PATH: 'xxx'} |
1856 | + return self._check_via_dict(data, RC_NOT_FOUND) |
1857 | + |
1858 | + def test_ibmcloud_template_userdata(self): |
1859 | + """Template provisioned with user-data first boot. |
1860 | + |
1861 | + Template provisioning with user-data has METADATA disk. |
1862 | + datasource should return found.""" |
1863 | + self._test_ds_found('IBMCloud-metadata') |
1864 | + |
1865 | + def test_ibmcloud_template_no_userdata_in_provisioning(self): |
1866 | + """Template provisioned with no user-data during provisioning. |
1867 | + |
1868 | + no disks attached. Datasource should return not found.""" |
1869 | + data = copy.deepcopy(VALID_CFG['IBMCloud-nodisks']) |
1870 | + data['files'] = {IBM_PROVISIONING_CHECK_PATH: 'xxx'} |
1871 | + return self._check_via_dict(data, RC_NOT_FOUND) |
1872 | + |
1873 | + def test_ibmcloud_template_no_userdata(self): |
1874 | + """Template provisioned with no user-data first boot. |
1875 | + |
1876 | + no disks attached. Datasource should return found.""" |
1877 | + self._check_via_dict(VALID_CFG['IBMCloud-nodisks'], RC_NOT_FOUND) |
1878 | + |
1879 | + def test_ibmcloud_os_code(self): |
1880 | + """Launched by os code always has config-2 disk.""" |
1881 | + self._test_ds_found('IBMCloud-config-2') |
1882 | + |
1883 | + def test_ibmcloud_os_code_different_uuid(self): |
1884 | + """IBM cloud config-2 disks must be explicit match on UUID. |
1885 | + |
1886 | + If the UUID is not 9796-932E then we actually expect ConfigDrive.""" |
1887 | + data = copy.deepcopy(VALID_CFG['IBMCloud-config-2']) |
1888 | + offset = None |
1889 | + for m, d in enumerate(data['mocks']): |
1890 | + if d.get('name') == "blkid": |
1891 | + offset = m |
1892 | + break |
1893 | + if not offset: |
1894 | + raise ValueError("Expected to find 'blkid' mock, but did not.") |
1895 | + data['mocks'][offset]['out'] = d['out'].replace(dsibm.IBM_CONFIG_UUID, |
1896 | + "DEAD-BEEF") |
1897 | + self._check_via_dict( |
1898 | + data, rc=RC_FOUND, dslist=['ConfigDrive', DS_NONE]) |
1899 | + |
1900 | def test_policy_disabled(self): |
1901 | """A Builtin policy of 'disabled' should return not found. |
1902 | |
1903 | @@ -291,6 +348,10 @@ class TestDsIdentify(CiTestCase): |
1904 | """On Intel, openstack must be identified.""" |
1905 | self._test_ds_found('OpenStack') |
1906 | |
1907 | + def test_openstack_open_telekom_cloud(self): |
1908 | + """Open Telecom identification.""" |
1909 | + self._test_ds_found('OpenStack-OpenTelekom') |
1910 | + |
1911 | def test_openstack_on_non_intel_is_maybe(self): |
1912 | """On non-Intel, openstack without dmi info is maybe. |
1913 | |
1914 | @@ -447,7 +508,7 @@ VALID_CFG = { |
1915 | }, |
1916 | 'Ec2-xen': { |
1917 | 'ds': 'Ec2', |
1918 | - 'mocks': [{'name': 'detect_virt', 'RET': 'xen', 'ret': 0}], |
1919 | + 'mocks': [MOCK_VIRT_IS_XEN], |
1920 | 'files': { |
1921 | 'sys/hypervisor/uuid': 'ec2c6e2f-5fac-4fc7-9c82-74127ec14bbb\n' |
1922 | }, |
1923 | @@ -502,6 +563,12 @@ VALID_CFG = { |
1924 | 'policy_dmi': POLICY_FOUND_ONLY, |
1925 | 'policy_no_dmi': POLICY_FOUND_ONLY, |
1926 | }, |
1927 | + 'OpenStack-OpenTelekom': { |
1928 | + # OTC gen1 (Xen) hosts use OpenStack datasource, LP: #1756471 |
1929 | + 'ds': 'OpenStack', |
1930 | + 'files': {P_CHASSIS_ASSET_TAG: 'OpenTelekomCloud\n'}, |
1931 | + 'mocks': [MOCK_VIRT_IS_XEN], |
1932 | + }, |
1933 | 'OVF-seed': { |
1934 | 'ds': 'OVF', |
1935 | 'files': { |
1936 | @@ -568,6 +635,48 @@ VALID_CFG = { |
1937 | 'ds': 'Hetzner', |
1938 | 'files': {P_SYS_VENDOR: 'Hetzner\n'}, |
1939 | }, |
1940 | + 'IBMCloud-metadata': { |
1941 | + 'ds': 'IBMCloud', |
1942 | + 'mocks': [ |
1943 | + MOCK_VIRT_IS_XEN, |
1944 | + {'name': 'blkid', 'ret': 0, |
1945 | + 'out': blkid_out( |
1946 | + [{'DEVNAME': 'xvda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()}, |
1947 | + {'DEVNAME': 'xvda2', 'TYPE': 'ext4', |
1948 | + 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4()}, |
1949 | + {'DEVNAME': 'xvdb', 'TYPE': 'vfat', 'LABEL': 'METADATA'}]), |
1950 | + }, |
1951 | + ], |
1952 | + }, |
1953 | + 'IBMCloud-config-2': { |
1954 | + 'ds': 'IBMCloud', |
1955 | + 'mocks': [ |
1956 | + MOCK_VIRT_IS_XEN, |
1957 | + {'name': 'blkid', 'ret': 0, |
1958 | + 'out': blkid_out( |
1959 | + [{'DEVNAME': 'xvda1', 'TYPE': 'ext3', 'PARTUUID': uuid4(), |
1960 | + 'UUID': uuid4(), 'LABEL': 'cloudimg-bootfs'}, |
1961 | + {'DEVNAME': 'xvdb', 'TYPE': 'vfat', 'LABEL': 'config-2', |
1962 | + 'UUID': dsibm.IBM_CONFIG_UUID}, |
1963 | + {'DEVNAME': 'xvda2', 'TYPE': 'ext4', |
1964 | + 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4(), |
1965 | + 'UUID': uuid4()}, |
1966 | + ]), |
1967 | + }, |
1968 | + ], |
1969 | + }, |
1970 | + 'IBMCloud-nodisks': { |
1971 | + 'ds': 'IBMCloud', |
1972 | + 'mocks': [ |
1973 | + MOCK_VIRT_IS_XEN, |
1974 | + {'name': 'blkid', 'ret': 0, |
1975 | + 'out': blkid_out( |
1976 | + [{'DEVNAME': 'xvda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()}, |
1977 | + {'DEVNAME': 'xvda2', 'TYPE': 'ext4', |
1978 | + 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4()}]), |
1979 | + }, |
1980 | + ], |
1981 | + }, |
1982 | } |
1983 | |
1984 | # vi: ts=4 expandtab |
1985 | diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py |
1986 | index 9b50ee7..ac41f12 100644 |
1987 | --- a/tests/unittests/test_handler/test_schema.py |
1988 | +++ b/tests/unittests/test_handler/test_schema.py |
1989 | @@ -27,6 +27,7 @@ class GetSchemaTest(CiTestCase): |
1990 | 'cc_resizefs', |
1991 | 'cc_runcmd', |
1992 | 'cc_snap', |
1993 | + 'cc_ubuntu_advantage', |
1994 | 'cc_zypper_add_repo' |
1995 | ], |
1996 | [subschema['id'] for subschema in schema['allOf']]) |
1997 | diff --git a/tests/unittests/test_net.py b/tests/unittests/test_net.py |
1998 | index 84a0eab..c12a487 100644 |
1999 | --- a/tests/unittests/test_net.py |
2000 | +++ b/tests/unittests/test_net.py |
2001 | @@ -393,12 +393,6 @@ NETWORK_CONFIGS = { |
2002 | eth1: |
2003 | match: |
2004 | macaddress: cf:d6:af:48:e8:80 |
2005 | - nameservers: |
2006 | - addresses: |
2007 | - - 1.2.3.4 |
2008 | - - 5.6.7.8 |
2009 | - search: |
2010 | - - wark.maas |
2011 | set-name: eth1 |
2012 | eth99: |
2013 | addresses: |
2014 | @@ -410,12 +404,9 @@ NETWORK_CONFIGS = { |
2015 | addresses: |
2016 | - 8.8.8.8 |
2017 | - 8.8.4.4 |
2018 | - - 1.2.3.4 |
2019 | - - 5.6.7.8 |
2020 | search: |
2021 | - barley.maas |
2022 | - sach.maas |
2023 | - - wark.maas |
2024 | routes: |
2025 | - to: 0.0.0.0/0 |
2026 | via: 65.61.151.37 |
2027 | @@ -654,81 +645,27 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true |
2028 | eth0: |
2029 | match: |
2030 | macaddress: c0:d6:9f:2c:e8:80 |
2031 | - nameservers: |
2032 | - addresses: |
2033 | - - 8.8.8.8 |
2034 | - - 4.4.4.4 |
2035 | - - 8.8.4.4 |
2036 | - search: |
2037 | - - barley.maas |
2038 | - - wark.maas |
2039 | - - foobar.maas |
2040 | set-name: eth0 |
2041 | eth1: |
2042 | match: |
2043 | macaddress: aa:d6:9f:2c:e8:80 |
2044 | - nameservers: |
2045 | - addresses: |
2046 | - - 8.8.8.8 |
2047 | - - 4.4.4.4 |
2048 | - - 8.8.4.4 |
2049 | - search: |
2050 | - - barley.maas |
2051 | - - wark.maas |
2052 | - - foobar.maas |
2053 | set-name: eth1 |
2054 | eth2: |
2055 | match: |
2056 | macaddress: c0:bb:9f:2c:e8:80 |
2057 | - nameservers: |
2058 | - addresses: |
2059 | - - 8.8.8.8 |
2060 | - - 4.4.4.4 |
2061 | - - 8.8.4.4 |
2062 | - search: |
2063 | - - barley.maas |
2064 | - - wark.maas |
2065 | - - foobar.maas |
2066 | set-name: eth2 |
2067 | eth3: |
2068 | match: |
2069 | macaddress: 66:bb:9f:2c:e8:80 |
2070 | - nameservers: |
2071 | - addresses: |
2072 | - - 8.8.8.8 |
2073 | - - 4.4.4.4 |
2074 | - - 8.8.4.4 |
2075 | - search: |
2076 | - - barley.maas |
2077 | - - wark.maas |
2078 | - - foobar.maas |
2079 | set-name: eth3 |
2080 | eth4: |
2081 | match: |
2082 | macaddress: 98:bb:9f:2c:e8:80 |
2083 | - nameservers: |
2084 | - addresses: |
2085 | - - 8.8.8.8 |
2086 | - - 4.4.4.4 |
2087 | - - 8.8.4.4 |
2088 | - search: |
2089 | - - barley.maas |
2090 | - - wark.maas |
2091 | - - foobar.maas |
2092 | set-name: eth4 |
2093 | eth5: |
2094 | dhcp4: true |
2095 | match: |
2096 | macaddress: 98:bb:9f:2c:e8:8a |
2097 | - nameservers: |
2098 | - addresses: |
2099 | - - 8.8.8.8 |
2100 | - - 4.4.4.4 |
2101 | - - 8.8.4.4 |
2102 | - search: |
2103 | - - barley.maas |
2104 | - - wark.maas |
2105 | - - foobar.maas |
2106 | set-name: eth5 |
2107 | bonds: |
2108 | bond0: |
2109 | @@ -748,6 +685,15 @@ pre-down route del -net 10.0.0.0 netmask 255.0.0.0 gw 11.0.0.1 metric 3 || true |
2110 | interfaces: |
2111 | - eth3 |
2112 | - eth4 |
2113 | + nameservers: |
2114 | + addresses: |
2115 | + - 8.8.8.8 |
2116 | + - 4.4.4.4 |
2117 | + - 8.8.4.4 |
2118 | + search: |
2119 | + - barley.maas |
2120 | + - wark.maas |
2121 | + - foobar.maas |
2122 | parameters: |
2123 | ageing-time: 250 |
2124 | forward-delay: 1 |
2125 | @@ -2334,6 +2280,9 @@ class TestNetplanRoundTrip(CiTestCase): |
2126 | def testsimple_render_all(self): |
2127 | entry = NETWORK_CONFIGS['all'] |
2128 | files = self._render_and_read(network_config=yaml.load(entry['yaml'])) |
2129 | + print(entry['expected_netplan']) |
2130 | + print('-- expected ^ | v rendered --') |
2131 | + print(files['/etc/netplan/50-cloud-init.yaml']) |
2132 | self.assertEqual( |
2133 | entry['expected_netplan'].splitlines(), |
2134 | files['/etc/netplan/50-cloud-init.yaml'].splitlines()) |
2135 | diff --git a/tools/ds-identify b/tools/ds-identify |
2136 | index e3f93c9..9a2db5c 100755 |
2137 | --- a/tools/ds-identify |
2138 | +++ b/tools/ds-identify |
2139 | @@ -92,6 +92,7 @@ DI_DMI_SYS_VENDOR="" |
2140 | DI_DMI_PRODUCT_SERIAL="" |
2141 | DI_DMI_PRODUCT_UUID="" |
2142 | DI_FS_LABELS="" |
2143 | +DI_FS_UUIDS="" |
2144 | DI_ISO9660_DEVS="" |
2145 | DI_KERNEL_CMDLINE="" |
2146 | DI_VIRT="" |
2147 | @@ -114,7 +115,7 @@ DI_DSNAME="" |
2148 | # be searched if there is no setting found in config. |
2149 | DI_DSLIST_DEFAULT="MAAS ConfigDrive NoCloud AltCloud Azure Bigstep \ |
2150 | CloudSigma CloudStack DigitalOcean AliYun Ec2 GCE OpenNebula OpenStack \ |
2151 | -OVF SmartOS Scaleway Hetzner" |
2152 | +OVF SmartOS Scaleway Hetzner IBMCloud" |
2153 | DI_DSLIST="" |
2154 | DI_MODE="" |
2155 | DI_ON_FOUND="" |
2156 | @@ -123,6 +124,8 @@ DI_ON_NOTFOUND="" |
2157 | |
2158 | DI_EC2_STRICT_ID_DEFAULT="true" |
2159 | |
2160 | +_IS_IBM_CLOUD="" |
2161 | + |
2162 | error() { |
2163 | set -- "ERROR:" "$@"; |
2164 | debug 0 "$@" |
2165 | @@ -196,7 +199,7 @@ read_fs_info() { |
2166 | return |
2167 | fi |
2168 | local oifs="$IFS" line="" delim="," |
2169 | - local ret=0 out="" labels="" dev="" label="" ftype="" isodevs="" |
2170 | + local ret=0 out="" labels="" dev="" label="" ftype="" isodevs="" uuids="" |
2171 | out=$(blkid -c /dev/null -o export) || { |
2172 | ret=$? |
2173 | error "failed running [$ret]: blkid -c /dev/null -o export" |
2174 | @@ -219,12 +222,14 @@ read_fs_info() { |
2175 | LABEL=*) label="${line#LABEL=}"; |
2176 | labels="${labels}${line#LABEL=}${delim}";; |
2177 | TYPE=*) ftype=${line#TYPE=};; |
2178 | + UUID=*) uuids="${uuids}${line#UUID=}$delim";; |
2179 | esac |
2180 | done |
2181 | [ -n "$dev" -a "$ftype" = "iso9660" ] && |
2182 | isodevs="${isodevs} ${dev}=$label" |
2183 | |
2184 | DI_FS_LABELS="${labels%${delim}}" |
2185 | + DI_FS_UUIDS="${uuids%${delim}}" |
2186 | DI_ISO9660_DEVS="${isodevs# }" |
2187 | } |
2188 | |
2189 | @@ -437,14 +442,25 @@ dmi_sys_vendor_is() { |
2190 | [ "${DI_DMI_SYS_VENDOR}" = "$1" ] |
2191 | } |
2192 | |
2193 | -has_fs_with_label() { |
2194 | - local label="$1" |
2195 | - case ",${DI_FS_LABELS}," in |
2196 | - *,$label,*) return 0;; |
2197 | +has_fs_with_uuid() { |
2198 | + case ",${DI_FS_UUIDS}," in |
2199 | + *,$1,*) return 0;; |
2200 | esac |
2201 | return 1 |
2202 | } |
2203 | |
2204 | +has_fs_with_label() { |
2205 | + # has_fs_with_label(label1[ ,label2 ..]) |
2206 | + # return 0 if a there is a filesystem that matches any of the labels. |
2207 | + local label="" |
2208 | + for label in "$@"; do |
2209 | + case ",${DI_FS_LABELS}," in |
2210 | + *,$label,*) return 0;; |
2211 | + esac |
2212 | + done |
2213 | + return 1 |
2214 | +} |
2215 | + |
2216 | nocase_equal() { |
2217 | # nocase_equal(a, b) |
2218 | # return 0 if case insenstive comparision a.lower() == b.lower() |
2219 | @@ -583,6 +599,8 @@ dscheck_NoCloud() { |
2220 | case " ${DI_DMI_PRODUCT_SERIAL} " in |
2221 | *\ ds=nocloud*) return ${DS_FOUND};; |
2222 | esac |
2223 | + |
2224 | + is_ibm_cloud && return ${DS_NOT_FOUND} |
2225 | for d in nocloud nocloud-net; do |
2226 | check_seed_dir "$d" meta-data user-data && return ${DS_FOUND} |
2227 | check_writable_seed_dir "$d" meta-data user-data && return ${DS_FOUND} |
2228 | @@ -594,9 +612,8 @@ dscheck_NoCloud() { |
2229 | } |
2230 | |
2231 | check_configdrive_v2() { |
2232 | - if has_fs_with_label "config-2"; then |
2233 | - return ${DS_FOUND} |
2234 | - elif has_fs_with_label "CONFIG-2"; then |
2235 | + is_ibm_cloud && return ${DS_NOT_FOUND} |
2236 | + if has_fs_with_label CONFIG-2 config-2; then |
2237 | return ${DS_FOUND} |
2238 | fi |
2239 | # look in /config-drive <vlc>/seed/config_drive for a directory |
2240 | @@ -894,6 +911,10 @@ dscheck_OpenStack() { |
2241 | return ${DS_FOUND} |
2242 | fi |
2243 | |
2244 | + if dmi_chassis_asset_tag_matches "OpenTelekomCloud"; then |
2245 | + return ${DS_FOUND} |
2246 | + fi |
2247 | + |
2248 | # LP: #1715241 : arch other than intel are not identified properly. |
2249 | case "$DI_UNAME_MACHINE" in |
2250 | i?86|x86_64) :;; |
2251 | @@ -984,6 +1005,36 @@ dscheck_Hetzner() { |
2252 | return ${DS_NOT_FOUND} |
2253 | } |
2254 | |
2255 | +is_ibm_provisioning() { |
2256 | + [ -f "${PATH_ROOT}/root/provisioningConfiguration.cfg" ] |
2257 | +} |
2258 | + |
2259 | +is_ibm_cloud() { |
2260 | + cached "${_IS_IBM_CLOUD}" && return ${_IS_IBM_CLOUD} |
2261 | + local ret=1 |
2262 | + if [ "$DI_VIRT" = "xen" ]; then |
2263 | + if is_ibm_provisioning; then |
2264 | + ret=0 |
2265 | + elif has_fs_with_label METADATA metadata; then |
2266 | + ret=0 |
2267 | + elif has_fs_with_uuid 9796-932E && |
2268 | + has_fs_with_label CONFIG-2 config-2; then |
2269 | + ret=0 |
2270 | + fi |
2271 | + fi |
2272 | + _IS_IBM_CLOUD=$ret |
2273 | + return $ret |
2274 | +} |
2275 | + |
2276 | +dscheck_IBMCloud() { |
2277 | + if is_ibm_provisioning; then |
2278 | + debug 1 "cloud-init disabled during provisioning on IBMCloud" |
2279 | + return ${DS_NOT_FOUND} |
2280 | + fi |
2281 | + is_ibm_cloud && return ${DS_FOUND} |
2282 | + return ${DS_NOT_FOUND} |
2283 | +} |
2284 | + |
2285 | collect_info() { |
2286 | read_virt |
2287 | read_pid1_product_name |
2288 | diff --git a/tools/pipremove b/tools/pipremove |
2289 | new file mode 100755 |
2290 | index 0000000..f8f4ff1 |
2291 | --- /dev/null |
2292 | +++ b/tools/pipremove |
2293 | @@ -0,0 +1,14 @@ |
2294 | +#!/usr/bin/python3 |
2295 | +import subprocess |
2296 | +import sys |
2297 | + |
2298 | +for pkg in sys.argv[1:]: |
2299 | + try: |
2300 | + exec('import %s' % pkg) # pylint: disable=W0122 |
2301 | + except ImportError: |
2302 | + continue |
2303 | + sys.stderr.write("%s removing package %s\n" % (sys.argv[0], pkg)) |
2304 | + ret = subprocess.Popen(['pip', 'uninstall', '--yes', pkg]).wait() |
2305 | + if ret != 0: |
2306 | + sys.stderr.write("Failed to uninstall %s (%d)\n" % (pkg, ret)) |
2307 | + sys.exit(ret) |
2308 | diff --git a/tox.ini b/tox.ini |
2309 | index 1f990af..818ade3 100644 |
2310 | --- a/tox.ini |
2311 | +++ b/tox.ini |
2312 | @@ -60,6 +60,9 @@ deps = sphinx |
2313 | commands = {envpython} -m sphinx {posargs:doc/rtd doc/rtd_html} |
2314 | |
2315 | [testenv:xenial] |
2316 | +commands = |
2317 | + python ./tools/pipremove jsonschema |
2318 | + python -m nose {posargs:tests/unittests cloudinit} |
2319 | basepython = python3 |
2320 | deps = |
2321 | # requirements |
PASSED: Continuous integration, rev:b6f9fc66c73 915f81b51fdf24a 77dec6df274d21 /jenkins. ubuntu. com/server/ job/cloud- init-ci/ 924/
https:/
Executed test runs:
SUCCESS: Checkout
SUCCESS: Unit & Style Tests
SUCCESS: Ubuntu LTS: Build
SUCCESS: Ubuntu LTS: Integration
SUCCESS: MAAS Compatability Testing
IN_PROGRESS: Declarative: Post Actions
Click here to trigger a rebuild: /jenkins. ubuntu. com/server/ job/cloud- init-ci/ 924/rebuild
https:/