Merge ~chad.smith/cloud-init:feature/command-cloud-id into cloud-init:master
- Git
- lp:~chad.smith/cloud-init
- feature/command-cloud-id
- Merge into master
Status: | Superseded |
---|---|
Proposed branch: | ~chad.smith/cloud-init:feature/command-cloud-id |
Merge into: | cloud-init:master |
Diff against target: |
2342 lines (+954/-275) 36 files modified
cloudinit/cmd/cloud_id.py (+88/-0) cloudinit/cmd/tests/test_cloud_id.py (+112/-0) cloudinit/sources/DataSourceAliYun.py (+5/-15) cloudinit/sources/DataSourceAltCloud.py (+22/-11) cloudinit/sources/DataSourceAzure.py (+8/-0) cloudinit/sources/DataSourceBigstep.py (+4/-0) cloudinit/sources/DataSourceCloudSigma.py (+5/-1) cloudinit/sources/DataSourceConfigDrive.py (+12/-0) cloudinit/sources/DataSourceEc2.py (+59/-56) cloudinit/sources/DataSourceIBMCloud.py (+4/-0) cloudinit/sources/DataSourceMAAS.py (+4/-0) cloudinit/sources/DataSourceNoCloud.py (+21/-0) cloudinit/sources/DataSourceNone.py (+4/-0) cloudinit/sources/DataSourceOVF.py (+6/-0) cloudinit/sources/DataSourceOpenNebula.py (+8/-0) cloudinit/sources/DataSourceOracle.py (+4/-0) cloudinit/sources/DataSourceSmartOS.py (+3/-0) cloudinit/sources/__init__.py (+107/-21) cloudinit/sources/tests/test_init.py (+10/-2) cloudinit/sources/tests/test_oracle.py (+8/-0) cloudinit/tests/test_util.py (+16/-0) cloudinit/util.py (+5/-0) doc/rtd/topics/instancedata.rst (+137/-46) setup.py (+2/-1) tests/cloud_tests/testcases/base.py (+11/-2) tests/unittests/test_datasource/test_aliyun.py (+4/-0) tests/unittests/test_datasource/test_altcloud.py (+67/-51) tests/unittests/test_datasource/test_azure.py (+63/-47) tests/unittests/test_datasource/test_cloudsigma.py (+6/-0) tests/unittests/test_datasource/test_configdrive.py (+3/-0) tests/unittests/test_datasource/test_ec2.py (+13/-7) tests/unittests/test_datasource/test_ibmcloud.py (+39/-1) tests/unittests/test_datasource/test_nocloud.py (+35/-10) tests/unittests/test_datasource/test_opennebula.py (+4/-0) tests/unittests/test_datasource/test_ovf.py (+48/-4) tests/unittests/test_datasource/test_smartos.py (+7/-0) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Server Team CI bot | continuous-integration | Needs Fixing | |
cloud-init Commiters | Pending | ||
Review via email: mp+356361@code.launchpad.net |
Commit message
tools: Add cloud-id command line utility
Add a quick cloud lookup utility in order to more easily determine
the cloud on which an instance is running.
The utility parses standardized attributes from
/run/cloud-
for the instance. It uses known region maps if necessary to determine
on which specific cloud the instance is running.
Examples:
aws, aws-gov, aws-china, rackspace, azure-china, lxd, openstack, unknown
Description of the change
Server Team CI bot (server-team-bot) wrote : | # |
Unmerged commits
- abc1200... by Chad Smith
-
return platform from canonical_cloud_id if cloud_name is unknown
- 0af64de... by Chad Smith
-
canonical_cloud_id handles None and unknown regions
- ae16154... by Chad Smith
-
move cloud_id out of tools and under cloudinit.cmd
- 8bc2628... by Chad Smith
-
revert tox changes
- c3a9e4c... by Chad Smith
-
oops
- 655bb65... by Chad Smith
-
update docs, wrap timer around ec2 crawl_metadata/
get_data - 4981999... by Chad Smith
-
add tools dir to test tox envs for testing
- a1d0620... by Chad Smith
-
tests: add test_cloud_id. fix setup.py to source cloud_id
- 5fef4ae... by Chad Smith
-
tools: add cloud-id utility to report canonical cloud info
- 94e3f12... by Chad Smith
-
tests: nocloud_kvm integration check for config-disk subplatform
Preview Diff
1 | diff --git a/cloudinit/cmd/cloud_id.py b/cloudinit/cmd/cloud_id.py |
2 | new file mode 100755 |
3 | index 0000000..8312d08 |
4 | --- /dev/null |
5 | +++ b/cloudinit/cmd/cloud_id.py |
6 | @@ -0,0 +1,88 @@ |
7 | +# This file is part of cloud-init. See LICENSE file for license information. |
8 | + |
9 | +"""Commandline utility to list the canonical cloud-id for an instance.""" |
10 | + |
11 | +import argparse |
12 | +import os |
13 | +import six |
14 | +import sys |
15 | + |
16 | +from cloudinit import util |
17 | +from cloudinit.sources import ( |
18 | + INSTANCE_JSON_FILE, METADATA_UNKNOWN, canonical_cloud_id) |
19 | + |
20 | +DEFAULT_INSTANCE_JSON = '/run/cloud-init/%s' % INSTANCE_JSON_FILE |
21 | + |
22 | +NAME = 'cloud-id' |
23 | + |
24 | + |
25 | +def get_parser(parser=None): |
26 | + """Build or extend an arg parser for the cloud-id utility. |
27 | + |
28 | + @param parser: Optional existing ArgumentParser instance representing the |
29 | + query subcommand which will be extended to support the args of |
30 | + this utility. |
31 | + |
32 | + @returns: ArgumentParser with proper argument configuration. |
33 | + """ |
34 | + if not parser: |
35 | + parser = argparse.ArgumentParser( |
36 | + prog=NAME, |
37 | + description='Report the canonical cloud-id for this instance') |
38 | + parser.add_argument( |
39 | + '-j', '--json', action='store_true', default=False, |
40 | + help='Report all standardized cloud-id information as json.') |
41 | + parser.add_argument( |
42 | + '-l', '--long', action='store_true', default=False, |
43 | + help='Report extended cloud-id information as hyphenated string.') |
44 | + parser.add_argument( |
45 | + '-i', '--instance-data', type=str, default=DEFAULT_INSTANCE_JSON, |
46 | + help=('Path to instance-data.json file. Default is %s' % |
47 | + DEFAULT_INSTANCE_JSON)) |
48 | + return parser |
49 | + |
50 | +def error(msg): |
51 | + sys.stderr.write('ERROR: %s\n' % msg) |
52 | + return 1 |
53 | + |
54 | + |
55 | +def handle_args(name, args): |
56 | + """Handle calls to 'cloud-id' cli. |
57 | + |
58 | + Print the canonical cloud-id on which the instance is running. |
59 | + |
60 | + @return: 0 on success, 1 otherwise. |
61 | + """ |
62 | + try: |
63 | + instance_json = util.load_file(args.instance_data) |
64 | + except IOError: |
65 | + return error( |
66 | + "File not found '%s'. Provide a path to instance data json file" |
67 | + ' using --instance-data' % args.instance_data) |
68 | + instance_data = util.load_json(instance_json) |
69 | + v1 = instance_data.get('v1', {}) |
70 | + cloud_id = canonical_cloud_id( |
71 | + v1.get('cloud_name', METADATA_UNKNOWN), |
72 | + v1.get('region', METADATA_UNKNOWN), |
73 | + v1.get('platform', METADATA_UNKNOWN)) |
74 | + if args.json: |
75 | + v1['cloud_id'] = cloud_id |
76 | + response = util.json_dumps(v1) |
77 | + elif args.long: |
78 | + response = '%s-%s' % (cloud_id, v1.get('region', METADATA_UNKNOWN)) |
79 | + else: |
80 | + response = cloud_id |
81 | + sys.stdout.write('%s\n' % response) |
82 | + return 0 |
83 | + |
84 | + |
85 | +def main(): |
86 | + """Tool to query specific instance-data values.""" |
87 | + parser = get_parser() |
88 | + sys.exit(handle_args(NAME, parser.parse_args())) |
89 | + |
90 | + |
91 | +if __name__ == '__main__': |
92 | + main() |
93 | + |
94 | +# vi: ts=4 expandtab |
95 | diff --git a/cloudinit/cmd/tests/test_cloud_id.py b/cloudinit/cmd/tests/test_cloud_id.py |
96 | new file mode 100644 |
97 | index 0000000..a42cfa8 |
98 | --- /dev/null |
99 | +++ b/cloudinit/cmd/tests/test_cloud_id.py |
100 | @@ -0,0 +1,112 @@ |
101 | +# This file is part of cloud-init. See LICENSE file for license information. |
102 | + |
103 | +"""Tests for cloud-id command line utility.""" |
104 | + |
105 | +from cloudinit import util |
106 | +from collections import namedtuple |
107 | +from six import StringIO |
108 | + |
109 | +from cloudinit.cmd import cloud_id |
110 | + |
111 | +from cloudinit.tests.helpers import CiTestCase, mock |
112 | + |
113 | + |
114 | +class TestCloudId(CiTestCase): |
115 | + |
116 | + args = namedtuple('cloudidargs', ('instance_data json long')) |
117 | + |
118 | + def setUp(self): |
119 | + super(TestCloudId, self).setUp() |
120 | + self.tmp = self.tmp_dir() |
121 | + self.instance_data = self.tmp_path('instance-data.json', dir=self.tmp) |
122 | + |
123 | + def test_cloud_id_arg_parser_defaults(self): |
124 | + """Validate the argument defaults when not provided by the end-user.""" |
125 | + cmd = ['cloud-id'] |
126 | + with mock.patch('sys.argv', cmd): |
127 | + args = cloud_id.get_parser().parse_args() |
128 | + self.assertEqual('/run/cloud-init/instance-data.json', args.instance_data) |
129 | + self.assertEqual(False, args.long) |
130 | + self.assertEqual(False, args.json) |
131 | + |
132 | + def test_cloud_id_arg_parse_overrides(self): |
133 | + """Override argument defaults by specifying values for each param.""" |
134 | + util.write_file(self.instance_data, '{}') |
135 | + cmd = ['cloud-id', '--instance-data', self.instance_data, '--long', |
136 | + '--json'] |
137 | + with mock.patch('sys.argv', cmd): |
138 | + args = cloud_id.get_parser().parse_args() |
139 | + self.assertEqual(self.instance_data, args.instance_data) |
140 | + self.assertEqual(True, args.long) |
141 | + self.assertEqual(True, args.json) |
142 | + |
143 | + def test_cloud_id_missing_instance_data_json(self): |
144 | + """Exit error when the provided instance-data.json does not exist.""" |
145 | + cmd = ['cloud-id', '--instance-data', self.instance_data] |
146 | + with mock.patch('sys.argv', cmd): |
147 | + with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr: |
148 | + with self.assertRaises(SystemExit) as context_manager: |
149 | + cloud_id.main() |
150 | + self.assertEqual(1, context_manager.exception.code) |
151 | + self.assertIn( |
152 | + "ERROR: File not found '%s'" % self.instance_data, |
153 | + m_stderr.getvalue()) |
154 | + |
155 | + def test_cloud_id_from_cloud_name_in_instance_data(self): |
156 | + """Report canonical cloud-id from cloud_name in instance-data.""" |
157 | + util.write_file( |
158 | + self.instance_data, |
159 | + '{"v1": {"cloud_name": "mycloud", "region": "somereg"}}') |
160 | + cmd = ['cloud-id', '--instance-data', self.instance_data] |
161 | + with mock.patch('sys.argv', cmd): |
162 | + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: |
163 | + with self.assertRaises(SystemExit) as context_manager: |
164 | + cloud_id.main() |
165 | + self.assertEqual(0, context_manager.exception.code) |
166 | + self.assertEqual("mycloud\n", m_stdout.getvalue()) |
167 | + |
168 | + def test_cloud_id_long_name_from_instance_data(self): |
169 | + """Report long cloud-id format from cloud_name and region.""" |
170 | + util.write_file( |
171 | + self.instance_data, |
172 | + '{"v1": {"cloud_name": "mycloud", "region": "somereg"}}') |
173 | + cmd = ['cloud-id', '--instance-data', self.instance_data, '--long'] |
174 | + with mock.patch('sys.argv', cmd): |
175 | + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: |
176 | + with self.assertRaises(SystemExit) as context_manager: |
177 | + cloud_id.main() |
178 | + self.assertEqual(0, context_manager.exception.code) |
179 | + self.assertEqual("mycloud-somereg\n", m_stdout.getvalue()) |
180 | + |
181 | + def test_cloud_id_lookup_from_instance_data_region(self): |
182 | + """Report discovered canonical cloud_id when region lookup matches.""" |
183 | + util.write_file( |
184 | + self.instance_data, |
185 | + '{"v1": {"cloud_name": "aws", "region": "cn-north-1",' |
186 | + ' "platform": "ec2"}}') |
187 | + cmd = ['cloud-id', '--instance-data', self.instance_data, '--long'] |
188 | + with mock.patch('sys.argv', cmd): |
189 | + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: |
190 | + with self.assertRaises(SystemExit) as context_manager: |
191 | + cloud_id.main() |
192 | + self.assertEqual(0, context_manager.exception.code) |
193 | + self.assertEqual("aws-china-cn-north-1\n", m_stdout.getvalue()) |
194 | + |
195 | + def test_cloud_id_lookup_json_instance_data_adds_cloud_id_to_json(self): |
196 | + """Report v1 instance-data content with cloud_id when --json set.""" |
197 | + util.write_file( |
198 | + self.instance_data, |
199 | + '{"v1": {"cloud_name": "unknown", "region": "dfw",' |
200 | + ' "platform": "openstack", "public_ssh_keys": []}}') |
201 | + expected = util.json_dumps({ |
202 | + 'cloud_id': 'rackspace', 'cloud_name': 'unknown', |
203 | + 'platform': 'openstack', 'public_ssh_keys': [], 'region': 'dfw'}) |
204 | + cmd = ['cloud-id', '--instance-data', self.instance_data, '--json'] |
205 | + with mock.patch('sys.argv', cmd): |
206 | + with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout: |
207 | + with self.assertRaises(SystemExit) as context_manager: |
208 | + cloud_id.main() |
209 | + self.assertEqual(0, context_manager.exception.code) |
210 | + self.assertEqual(expected + '\n', m_stdout.getvalue()) |
211 | + |
212 | +# vi: ts=4 expandtab |
213 | diff --git a/cloudinit/sources/DataSourceAliYun.py b/cloudinit/sources/DataSourceAliYun.py |
214 | index 858e082..45cc9f0 100644 |
215 | --- a/cloudinit/sources/DataSourceAliYun.py |
216 | +++ b/cloudinit/sources/DataSourceAliYun.py |
217 | @@ -1,7 +1,5 @@ |
218 | # This file is part of cloud-init. See LICENSE file for license information. |
219 | |
220 | -import os |
221 | - |
222 | from cloudinit import sources |
223 | from cloudinit.sources import DataSourceEc2 as EC2 |
224 | from cloudinit import util |
225 | @@ -18,25 +16,17 @@ class DataSourceAliYun(EC2.DataSourceEc2): |
226 | min_metadata_version = '2016-01-01' |
227 | extended_metadata_versions = [] |
228 | |
229 | - def __init__(self, sys_cfg, distro, paths): |
230 | - super(DataSourceAliYun, self).__init__(sys_cfg, distro, paths) |
231 | - self.seed_dir = os.path.join(paths.seed_dir, "AliYun") |
232 | - |
233 | def get_hostname(self, fqdn=False, resolve_ip=False, metadata_only=False): |
234 | return self.metadata.get('hostname', 'localhost.localdomain') |
235 | |
236 | def get_public_ssh_keys(self): |
237 | return parse_public_keys(self.metadata.get('public-keys', {})) |
238 | |
239 | - @property |
240 | - def cloud_platform(self): |
241 | - if self._cloud_platform is None: |
242 | - if _is_aliyun(): |
243 | - self._cloud_platform = EC2.Platforms.ALIYUN |
244 | - else: |
245 | - self._cloud_platform = EC2.Platforms.NO_EC2_METADATA |
246 | - |
247 | - return self._cloud_platform |
248 | + def _get_cloud_name(self): |
249 | + if _is_aliyun(): |
250 | + return EC2.CloudNames.ALIYUN |
251 | + else: |
252 | + return EC2.CloudNames.NO_EC2_METADATA |
253 | |
254 | |
255 | def _is_aliyun(): |
256 | diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py |
257 | index 8cd312d..5270fda 100644 |
258 | --- a/cloudinit/sources/DataSourceAltCloud.py |
259 | +++ b/cloudinit/sources/DataSourceAltCloud.py |
260 | @@ -89,7 +89,9 @@ class DataSourceAltCloud(sources.DataSource): |
261 | ''' |
262 | Description: |
263 | Get the type for the cloud back end this instance is running on |
264 | - by examining the string returned by reading the dmi data. |
265 | + by examining the string returned by reading either: |
266 | + CLOUD_INFO_FILE or |
267 | + the dmi data. |
268 | |
269 | Input: |
270 | None |
271 | @@ -99,7 +101,14 @@ class DataSourceAltCloud(sources.DataSource): |
272 | 'RHEV', 'VSPHERE' or 'UNKNOWN' |
273 | |
274 | ''' |
275 | - |
276 | + if os.path.exists(CLOUD_INFO_FILE): |
277 | + try: |
278 | + cloud_type = util.load_file(CLOUD_INFO_FILE).strip().upper() |
279 | + except IOError: |
280 | + util.logexc(LOG, 'Unable to access cloud info file at %s.', |
281 | + CLOUD_INFO_FILE) |
282 | + return 'UNKNOWN' |
283 | + return cloud_type |
284 | system_name = util.read_dmi_data("system-product-name") |
285 | if not system_name: |
286 | return 'UNKNOWN' |
287 | @@ -134,15 +143,7 @@ class DataSourceAltCloud(sources.DataSource): |
288 | |
289 | LOG.debug('Invoked get_data()') |
290 | |
291 | - if os.path.exists(CLOUD_INFO_FILE): |
292 | - try: |
293 | - cloud_type = util.load_file(CLOUD_INFO_FILE).strip().upper() |
294 | - except IOError: |
295 | - util.logexc(LOG, 'Unable to access cloud info file at %s.', |
296 | - CLOUD_INFO_FILE) |
297 | - return False |
298 | - else: |
299 | - cloud_type = self.get_cloud_type() |
300 | + cloud_type = self.get_cloud_type() |
301 | |
302 | LOG.debug('cloud_type: %s', str(cloud_type)) |
303 | |
304 | @@ -161,6 +162,15 @@ class DataSourceAltCloud(sources.DataSource): |
305 | util.logexc(LOG, 'Failed accessing user data.') |
306 | return False |
307 | |
308 | + def _get_subplatform(self): |
309 | + """Return the subplatform metadata details.""" |
310 | + cloud_type = self.get_cloud_type() |
311 | + if not hasattr(self, 'source'): |
312 | + self.source = sources.METADATA_UNKNOWN |
313 | + if cloud_type == 'RHEV': |
314 | + self.source = '/dev/fd0' |
315 | + return '%s (%s)' % (cloud_type.lower(), self.source) |
316 | + |
317 | def user_data_rhevm(self): |
318 | ''' |
319 | RHEVM specific userdata read |
320 | @@ -232,6 +242,7 @@ class DataSourceAltCloud(sources.DataSource): |
321 | try: |
322 | return_str = util.mount_cb(cdrom_dev, read_user_data_callback) |
323 | if return_str: |
324 | + self.source = cdrom_dev |
325 | break |
326 | except OSError as err: |
327 | if err.errno != errno.ENOENT: |
328 | diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py |
329 | index 783445e..39391d0 100644 |
330 | --- a/cloudinit/sources/DataSourceAzure.py |
331 | +++ b/cloudinit/sources/DataSourceAzure.py |
332 | @@ -351,6 +351,14 @@ class DataSourceAzure(sources.DataSource): |
333 | metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files) |
334 | return metadata |
335 | |
336 | + def _get_subplatform(self): |
337 | + """Return the subplatform metadata source details.""" |
338 | + if self.seed.startswith('/dev'): |
339 | + subplatform_type = 'config-disk' |
340 | + else: |
341 | + subplatform_type = 'seed-dir' |
342 | + return '%s (%s)' % (subplatform_type, self.seed) |
343 | + |
344 | def crawl_metadata(self): |
345 | """Walk all instance metadata sources returning a dict on success. |
346 | |
347 | diff --git a/cloudinit/sources/DataSourceBigstep.py b/cloudinit/sources/DataSourceBigstep.py |
348 | index 699a85b..52fff20 100644 |
349 | --- a/cloudinit/sources/DataSourceBigstep.py |
350 | +++ b/cloudinit/sources/DataSourceBigstep.py |
351 | @@ -36,6 +36,10 @@ class DataSourceBigstep(sources.DataSource): |
352 | self.userdata_raw = decoded["userdata_raw"] |
353 | return True |
354 | |
355 | + def _get_subplatform(self): |
356 | + """Return the subplatform metadata source details.""" |
357 | + return 'metadata (%s)' % get_url_from_file() |
358 | + |
359 | |
360 | def get_url_from_file(): |
361 | try: |
362 | diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py |
363 | index c816f34..2955d3f 100644 |
364 | --- a/cloudinit/sources/DataSourceCloudSigma.py |
365 | +++ b/cloudinit/sources/DataSourceCloudSigma.py |
366 | @@ -7,7 +7,7 @@ |
367 | from base64 import b64decode |
368 | import re |
369 | |
370 | -from cloudinit.cs_utils import Cepko |
371 | +from cloudinit.cs_utils import Cepko, SERIAL_PORT |
372 | |
373 | from cloudinit import log as logging |
374 | from cloudinit import sources |
375 | @@ -84,6 +84,10 @@ class DataSourceCloudSigma(sources.DataSource): |
376 | |
377 | return True |
378 | |
379 | + def _get_subplatform(self): |
380 | + """Return the subplatform metadata source details.""" |
381 | + return 'cepko (%s)' % SERIAL_PORT |
382 | + |
383 | def get_hostname(self, fqdn=False, resolve_ip=False, metadata_only=False): |
384 | """ |
385 | Cleans up and uses the server's name if the latter is set. Otherwise |
386 | diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py |
387 | index 664dc4b..564e3eb 100644 |
388 | --- a/cloudinit/sources/DataSourceConfigDrive.py |
389 | +++ b/cloudinit/sources/DataSourceConfigDrive.py |
390 | @@ -160,6 +160,18 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource): |
391 | LOG.debug("no network configuration available") |
392 | return self._network_config |
393 | |
394 | + @property |
395 | + def platform(self): |
396 | + return 'openstack' |
397 | + |
398 | + def _get_subplatform(self): |
399 | + """Return the subplatform metadata source details.""" |
400 | + if self.seed_dir in self.source: |
401 | + subplatform_type = 'seed-dir' |
402 | + elif self.source.startswith('/dev'): |
403 | + subplatform_type = 'config-disk' |
404 | + return '%s (%s)' % (subplatform_type, self.source) |
405 | + |
406 | |
407 | def read_config_drive(source_dir): |
408 | reader = openstack.ConfigDriveReader(source_dir) |
409 | diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py |
410 | index 968ab3f..9ccf2cd 100644 |
411 | --- a/cloudinit/sources/DataSourceEc2.py |
412 | +++ b/cloudinit/sources/DataSourceEc2.py |
413 | @@ -28,18 +28,16 @@ STRICT_ID_PATH = ("datasource", "Ec2", "strict_id") |
414 | STRICT_ID_DEFAULT = "warn" |
415 | |
416 | |
417 | -class Platforms(object): |
418 | - # TODO Rename and move to cloudinit.cloud.CloudNames |
419 | - ALIYUN = "AliYun" |
420 | - AWS = "AWS" |
421 | - BRIGHTBOX = "Brightbox" |
422 | - SEEDED = "Seeded" |
423 | +class CloudNames(object): |
424 | + ALIYUN = "aliyun" |
425 | + AWS = "aws" |
426 | + BRIGHTBOX = "brightbox" |
427 | # UNKNOWN indicates no positive id. If strict_id is 'warn' or 'false', |
428 | # then an attempt at the Ec2 Metadata service will be made. |
429 | - UNKNOWN = "Unknown" |
430 | + UNKNOWN = "unknown" |
431 | # NO_EC2_METADATA indicates this platform does not have a Ec2 metadata |
432 | # service available. No attempt at the Ec2 Metadata service will be made. |
433 | - NO_EC2_METADATA = "No-EC2-Metadata" |
434 | + NO_EC2_METADATA = "no-ec2-metadata" |
435 | |
436 | |
437 | class DataSourceEc2(sources.DataSource): |
438 | @@ -61,8 +59,6 @@ class DataSourceEc2(sources.DataSource): |
439 | url_max_wait = 120 |
440 | url_timeout = 50 |
441 | |
442 | - _cloud_platform = None |
443 | - |
444 | _network_config = sources.UNSET # Used to cache calculated network cfg v1 |
445 | |
446 | # Whether we want to get network configuration from the metadata service. |
447 | @@ -71,30 +67,21 @@ class DataSourceEc2(sources.DataSource): |
448 | def __init__(self, sys_cfg, distro, paths): |
449 | super(DataSourceEc2, self).__init__(sys_cfg, distro, paths) |
450 | self.metadata_address = None |
451 | - self.seed_dir = os.path.join(paths.seed_dir, "ec2") |
452 | |
453 | def _get_cloud_name(self): |
454 | """Return the cloud name as identified during _get_data.""" |
455 | - return self.cloud_platform |
456 | + return identify_platform() |
457 | |
458 | def _get_data(self): |
459 | - seed_ret = {} |
460 | - if util.read_optional_seed(seed_ret, base=(self.seed_dir + "/")): |
461 | - self.userdata_raw = seed_ret['user-data'] |
462 | - self.metadata = seed_ret['meta-data'] |
463 | - LOG.debug("Using seeded ec2 data from %s", self.seed_dir) |
464 | - self._cloud_platform = Platforms.SEEDED |
465 | - return True |
466 | - |
467 | strict_mode, _sleep = read_strict_mode( |
468 | util.get_cfg_by_path(self.sys_cfg, STRICT_ID_PATH, |
469 | STRICT_ID_DEFAULT), ("warn", None)) |
470 | |
471 | - LOG.debug("strict_mode: %s, cloud_platform=%s", |
472 | - strict_mode, self.cloud_platform) |
473 | - if strict_mode == "true" and self.cloud_platform == Platforms.UNKNOWN: |
474 | + LOG.debug("strict_mode: %s, cloud_name=%s cloud_platform=%s", |
475 | + strict_mode, self.cloud_name, self.platform) |
476 | + if strict_mode == "true" and self.cloud_name == CloudNames.UNKNOWN: |
477 | return False |
478 | - elif self.cloud_platform == Platforms.NO_EC2_METADATA: |
479 | + elif self.cloud_name == CloudNames.NO_EC2_METADATA: |
480 | return False |
481 | |
482 | if self.perform_dhcp_setup: # Setup networking in init-local stage. |
483 | @@ -103,13 +90,22 @@ class DataSourceEc2(sources.DataSource): |
484 | return False |
485 | try: |
486 | with EphemeralDHCPv4(self.fallback_interface): |
487 | - return util.log_time( |
488 | + self._crawled_metadata = util.log_time( |
489 | logfunc=LOG.debug, msg='Crawl of metadata service', |
490 | - func=self._crawl_metadata) |
491 | + func=self.crawl_metadata) |
492 | except NoDHCPLeaseError: |
493 | return False |
494 | else: |
495 | - return self._crawl_metadata() |
496 | + self._crawled_metadata = util.log_time( |
497 | + logfunc=LOG.debug, msg='Crawl of metadata service', |
498 | + func=self.crawl_metadata) |
499 | + if not self._crawled_metadata: |
500 | + return False |
501 | + self.metadata = self._crawled_metadata.get('meta-data', None) |
502 | + self.userdata_raw = self._crawled_metadata.get('user-data', None) |
503 | + self.identity = self._crawled_metadata.get( |
504 | + 'dynamic', {}).get('instance-identity', {}).get('document', {}) |
505 | + return True |
506 | |
507 | @property |
508 | def launch_index(self): |
509 | @@ -117,6 +113,15 @@ class DataSourceEc2(sources.DataSource): |
510 | return None |
511 | return self.metadata.get('ami-launch-index') |
512 | |
513 | + @property |
514 | + def platform(self): |
515 | + # Handle upgrade path of pickled ds |
516 | + if not hasattr(self, '_platform_type'): |
517 | + self._platform_type = DataSourceEc2.dsname.lower() |
518 | + if not self._platform_type: |
519 | + self._platform_type = DataSourceEc2.dsname.lower() |
520 | + return self._platform_type |
521 | + |
522 | def get_metadata_api_version(self): |
523 | """Get the best supported api version from the metadata service. |
524 | |
525 | @@ -144,7 +149,7 @@ class DataSourceEc2(sources.DataSource): |
526 | return self.min_metadata_version |
527 | |
528 | def get_instance_id(self): |
529 | - if self.cloud_platform == Platforms.AWS: |
530 | + if self.cloud_name == CloudNames.AWS: |
531 | # Prefer the ID from the instance identity document, but fall back |
532 | if not getattr(self, 'identity', None): |
533 | # If re-using cached datasource, it's get_data run didn't |
534 | @@ -254,7 +259,7 @@ class DataSourceEc2(sources.DataSource): |
535 | @property |
536 | def availability_zone(self): |
537 | try: |
538 | - if self.cloud_platform == Platforms.AWS: |
539 | + if self.cloud_name == CloudNames.AWS: |
540 | return self.identity.get( |
541 | 'availabilityZone', |
542 | self.metadata['placement']['availability-zone']) |
543 | @@ -265,7 +270,7 @@ class DataSourceEc2(sources.DataSource): |
544 | |
545 | @property |
546 | def region(self): |
547 | - if self.cloud_platform == Platforms.AWS: |
548 | + if self.cloud_name == CloudNames.AWS: |
549 | region = self.identity.get('region') |
550 | # Fallback to trimming the availability zone if region is missing |
551 | if self.availability_zone and not region: |
552 | @@ -277,16 +282,10 @@ class DataSourceEc2(sources.DataSource): |
553 | return az[:-1] |
554 | return None |
555 | |
556 | - @property |
557 | - def cloud_platform(self): # TODO rename cloud_name |
558 | - if self._cloud_platform is None: |
559 | - self._cloud_platform = identify_platform() |
560 | - return self._cloud_platform |
561 | - |
562 | def activate(self, cfg, is_new_instance): |
563 | if not is_new_instance: |
564 | return |
565 | - if self.cloud_platform == Platforms.UNKNOWN: |
566 | + if self.cloud_name == CloudNames.UNKNOWN: |
567 | warn_if_necessary( |
568 | util.get_cfg_by_path(cfg, STRICT_ID_PATH, STRICT_ID_DEFAULT), |
569 | cfg) |
570 | @@ -306,13 +305,13 @@ class DataSourceEc2(sources.DataSource): |
571 | result = None |
572 | no_network_metadata_on_aws = bool( |
573 | 'network' not in self.metadata and |
574 | - self.cloud_platform == Platforms.AWS) |
575 | + self.cloud_name == CloudNames.AWS) |
576 | if no_network_metadata_on_aws: |
577 | LOG.debug("Metadata 'network' not present:" |
578 | " Refreshing stale metadata from prior to upgrade.") |
579 | util.log_time( |
580 | logfunc=LOG.debug, msg='Re-crawl of metadata service', |
581 | - func=self._crawl_metadata) |
582 | + func=self.get_data) |
583 | |
584 | # Limit network configuration to only the primary/fallback nic |
585 | iface = self.fallback_interface |
586 | @@ -340,28 +339,32 @@ class DataSourceEc2(sources.DataSource): |
587 | return super(DataSourceEc2, self).fallback_interface |
588 | return self._fallback_interface |
589 | |
590 | - def _crawl_metadata(self): |
591 | + def crawl_metadata(self): |
592 | """Crawl metadata service when available. |
593 | |
594 | - @returns: True on success, False otherwise. |
595 | + @returns: Dictionary of crawled metadata content containing the keys: |
596 | + meta-data, user-data and dynamic. |
597 | """ |
598 | if not self.wait_for_metadata_service(): |
599 | - return False |
600 | + return {} |
601 | api_version = self.get_metadata_api_version() |
602 | + crawled_metadata = {} |
603 | try: |
604 | - self.userdata_raw = ec2.get_instance_userdata( |
605 | + crawled_metadata['user-data'] = ec2.get_instance_userdata( |
606 | api_version, self.metadata_address) |
607 | - self.metadata = ec2.get_instance_metadata( |
608 | + crawled_metadata['meta-data'] = ec2.get_instance_metadata( |
609 | api_version, self.metadata_address) |
610 | - if self.cloud_platform == Platforms.AWS: |
611 | - self.identity = ec2.get_instance_identity( |
612 | - api_version, self.metadata_address).get('document', {}) |
613 | + if self.cloud_name == CloudNames.AWS: |
614 | + identity = ec2.get_instance_identity( |
615 | + api_version, self.metadata_address) |
616 | + crawled_metadata['dynamic'] = {'instance-identity': identity} |
617 | except Exception: |
618 | util.logexc( |
619 | LOG, "Failed reading from metadata address %s", |
620 | self.metadata_address) |
621 | - return False |
622 | - return True |
623 | + return {} |
624 | + crawled_metadata['_metadata_api_version'] = api_version |
625 | + return crawled_metadata |
626 | |
627 | |
628 | class DataSourceEc2Local(DataSourceEc2): |
629 | @@ -375,10 +378,10 @@ class DataSourceEc2Local(DataSourceEc2): |
630 | perform_dhcp_setup = True # Use dhcp before querying metadata |
631 | |
632 | def get_data(self): |
633 | - supported_platforms = (Platforms.AWS,) |
634 | - if self.cloud_platform not in supported_platforms: |
635 | + supported_platforms = (CloudNames.AWS,) |
636 | + if self.cloud_name not in supported_platforms: |
637 | LOG.debug("Local Ec2 mode only supported on %s, not %s", |
638 | - supported_platforms, self.cloud_platform) |
639 | + supported_platforms, self.cloud_name) |
640 | return False |
641 | return super(DataSourceEc2Local, self).get_data() |
642 | |
643 | @@ -439,20 +442,20 @@ def identify_aws(data): |
644 | if (data['uuid'].startswith('ec2') and |
645 | (data['uuid_source'] == 'hypervisor' or |
646 | data['uuid'] == data['serial'])): |
647 | - return Platforms.AWS |
648 | + return CloudNames.AWS |
649 | |
650 | return None |
651 | |
652 | |
653 | def identify_brightbox(data): |
654 | if data['serial'].endswith('brightbox.com'): |
655 | - return Platforms.BRIGHTBOX |
656 | + return CloudNames.BRIGHTBOX |
657 | |
658 | |
659 | def identify_platform(): |
660 | - # identify the platform and return an entry in Platforms. |
661 | + # identify the platform and return an entry in CloudNames. |
662 | data = _collect_platform_data() |
663 | - checks = (identify_aws, identify_brightbox, lambda x: Platforms.UNKNOWN) |
664 | + checks = (identify_aws, identify_brightbox, lambda x: CloudNames.UNKNOWN) |
665 | for checker in checks: |
666 | try: |
667 | result = checker(data) |
668 | diff --git a/cloudinit/sources/DataSourceIBMCloud.py b/cloudinit/sources/DataSourceIBMCloud.py |
669 | index a535814..21e6ae6 100644 |
670 | --- a/cloudinit/sources/DataSourceIBMCloud.py |
671 | +++ b/cloudinit/sources/DataSourceIBMCloud.py |
672 | @@ -157,6 +157,10 @@ class DataSourceIBMCloud(sources.DataSource): |
673 | |
674 | return True |
675 | |
676 | + def _get_subplatform(self): |
677 | + """Return the subplatform metadata source details.""" |
678 | + return '%s (%s)' % (self.platform, self.source) |
679 | + |
680 | def check_instance_id(self, sys_cfg): |
681 | """quickly (local check only) if self.instance_id is still valid |
682 | |
683 | diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py |
684 | index bcb3854..61aa6d7 100644 |
685 | --- a/cloudinit/sources/DataSourceMAAS.py |
686 | +++ b/cloudinit/sources/DataSourceMAAS.py |
687 | @@ -109,6 +109,10 @@ class DataSourceMAAS(sources.DataSource): |
688 | LOG.warning("Invalid content in vendor-data: %s", e) |
689 | self.vendordata_raw = None |
690 | |
691 | + def _get_subplatform(self): |
692 | + """Return the subplatform metadata source details.""" |
693 | + return 'seed-dir (%s)' % self.base_url |
694 | + |
695 | def wait_for_metadata_service(self, url): |
696 | mcfg = self.ds_cfg |
697 | max_wait = 120 |
698 | diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py |
699 | index 2daea59..9010f06 100644 |
700 | --- a/cloudinit/sources/DataSourceNoCloud.py |
701 | +++ b/cloudinit/sources/DataSourceNoCloud.py |
702 | @@ -186,6 +186,27 @@ class DataSourceNoCloud(sources.DataSource): |
703 | self._network_eni = mydata['meta-data'].get('network-interfaces') |
704 | return True |
705 | |
706 | + @property |
707 | + def platform_type(self): |
708 | + # Handle upgrade path of pickled ds |
709 | + if not hasattr(self, '_platform_type'): |
710 | + self._platform_type = None |
711 | + if not self._platform_type: |
712 | + self._platform_type = 'lxd' if util.is_lxd() else 'nocloud' |
713 | + return self._platform_type |
714 | + |
715 | + def _get_cloud_name(self): |
716 | + """Return unknown when 'cloud-name' key is absent from metadata.""" |
717 | + return sources.METADATA_UNKNOWN |
718 | + |
719 | + def _get_subplatform(self): |
720 | + """Return the subplatform metadata source details.""" |
721 | + if self.seed.startswith('/dev'): |
722 | + subplatform_type = 'config-disk' |
723 | + else: |
724 | + subplatform_type = 'seed-dir' |
725 | + return '%s (%s)' % (subplatform_type, self.seed) |
726 | + |
727 | def check_instance_id(self, sys_cfg): |
728 | # quickly (local check only) if self.instance_id is still valid |
729 | # we check kernel command line or files. |
730 | diff --git a/cloudinit/sources/DataSourceNone.py b/cloudinit/sources/DataSourceNone.py |
731 | index e63a7e3..e625080 100644 |
732 | --- a/cloudinit/sources/DataSourceNone.py |
733 | +++ b/cloudinit/sources/DataSourceNone.py |
734 | @@ -28,6 +28,10 @@ class DataSourceNone(sources.DataSource): |
735 | self.metadata = self.ds_cfg['metadata'] |
736 | return True |
737 | |
738 | + def _get_subplatform(self): |
739 | + """Return the subplatform metadata source details.""" |
740 | + return 'config' |
741 | + |
742 | def get_instance_id(self): |
743 | return 'iid-datasource-none' |
744 | |
745 | diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py |
746 | index 178ccb0..045291e 100644 |
747 | --- a/cloudinit/sources/DataSourceOVF.py |
748 | +++ b/cloudinit/sources/DataSourceOVF.py |
749 | @@ -275,6 +275,12 @@ class DataSourceOVF(sources.DataSource): |
750 | self.cfg = cfg |
751 | return True |
752 | |
753 | + def _get_subplatform(self): |
754 | + system_type = util.read_dmi_data("system-product-name").lower() |
755 | + if system_type == 'vmware': |
756 | + return 'vmware (%s)' % self.seed |
757 | + return 'ovf (%s)' % self.seed |
758 | + |
759 | def get_public_ssh_keys(self): |
760 | if 'public-keys' not in self.metadata: |
761 | return [] |
762 | diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py |
763 | index 77ccd12..e62e972 100644 |
764 | --- a/cloudinit/sources/DataSourceOpenNebula.py |
765 | +++ b/cloudinit/sources/DataSourceOpenNebula.py |
766 | @@ -95,6 +95,14 @@ class DataSourceOpenNebula(sources.DataSource): |
767 | self.userdata_raw = results.get('userdata') |
768 | return True |
769 | |
770 | + def _get_subplatform(self): |
771 | + """Return the subplatform metadata source details.""" |
772 | + if self.seed_dir in self.seed: |
773 | + subplatform_type = 'seed-dir' |
774 | + else: |
775 | + subplatform_type = 'config-disk' |
776 | + return '%s (%s)' % (subplatform_type, self.seed) |
777 | + |
778 | @property |
779 | def network_config(self): |
780 | if self.network is not None: |
781 | diff --git a/cloudinit/sources/DataSourceOracle.py b/cloudinit/sources/DataSourceOracle.py |
782 | index fab39af..70b9c58 100644 |
783 | --- a/cloudinit/sources/DataSourceOracle.py |
784 | +++ b/cloudinit/sources/DataSourceOracle.py |
785 | @@ -91,6 +91,10 @@ class DataSourceOracle(sources.DataSource): |
786 | def crawl_metadata(self): |
787 | return read_metadata() |
788 | |
789 | + def _get_subplatform(self): |
790 | + """Return the subplatform metadata source details.""" |
791 | + return 'metadata (%s)' % METADATA_ENDPOINT |
792 | + |
793 | def check_instance_id(self, sys_cfg): |
794 | """quickly check (local only) if self.instance_id is still valid |
795 | |
796 | diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py |
797 | index 593ac91..32b57cd 100644 |
798 | --- a/cloudinit/sources/DataSourceSmartOS.py |
799 | +++ b/cloudinit/sources/DataSourceSmartOS.py |
800 | @@ -303,6 +303,9 @@ class DataSourceSmartOS(sources.DataSource): |
801 | self._set_provisioned() |
802 | return True |
803 | |
804 | + def _get_subplatform(self): |
805 | + return 'serial (%s)' % SERIAL_DEVICE |
806 | + |
807 | def device_name_to_device(self, name): |
808 | return self.ds_cfg['disk_aliases'].get(name) |
809 | |
810 | diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py |
811 | index 5ac9882..2cf76ad 100644 |
812 | --- a/cloudinit/sources/__init__.py |
813 | +++ b/cloudinit/sources/__init__.py |
814 | @@ -54,9 +54,20 @@ REDACT_SENSITIVE_VALUE = 'redacted for non-root user' |
815 | METADATA_CLOUD_NAME_KEY = 'cloud-name' |
816 | |
817 | UNSET = "_unset" |
818 | +METADATA_UNKNOWN = 'unknown' |
819 | |
820 | LOG = logging.getLogger(__name__) |
821 | |
822 | +# CLOUD_ID_REGION_PREFIX_MAP format is: |
823 | +# <region-match-prefix>: (<new-cloud-id>: <test_allowed_cloud_callable>) |
824 | +CLOUD_ID_REGION_PREFIX_MAP = { |
825 | + 'cn-': ('aws-china', lambda c, _: c == 'aws'), # only change aws regions |
826 | + 'us-gov-': ('aws-gov', lambda c, _: c == 'aws'), # only change aws regions |
827 | + 'china': ('azure-china', lambda c, _: c == 'azure'), # only change azure |
828 | + ('dfw', 'ord', 'iad', 'lon', 'syd', 'hkg'): ( |
829 | + 'rackspace' , lambda c, p: (c == 'unknown' and p == 'openstack')) |
830 | +} |
831 | + |
832 | |
833 | class DataSourceNotFoundException(Exception): |
834 | pass |
835 | @@ -133,6 +144,14 @@ class DataSource(object): |
836 | # Cached cloud_name as determined by _get_cloud_name |
837 | _cloud_name = None |
838 | |
839 | + # Cached cloud platform api type: e.g. ec2, openstack, kvm, lxd, azure etc. |
840 | + _platform_type = None |
841 | + |
842 | + # More details about the cloud platform: |
843 | + # - metadata (http://169.254.169.254/) |
844 | + # - seed-dir (<dirname>) |
845 | + _subplatform = None |
846 | + |
847 | # Track the discovered fallback nic for use in configuration generation. |
848 | _fallback_interface = None |
849 | |
850 | @@ -192,21 +211,24 @@ class DataSource(object): |
851 | local_hostname = self.get_hostname() |
852 | instance_id = self.get_instance_id() |
853 | availability_zone = self.availability_zone |
854 | - cloud_name = self.cloud_name |
855 | - # When adding new standard keys prefer underscore-delimited instead |
856 | - # of hyphen-delimted to support simple variable references in jinja |
857 | - # templates. |
858 | + # In the event of upgrade from existing cloudinit, pickled datasource |
859 | + # will not contain these new class attributes. So we need to recrawl |
860 | + # metadata to discover that content. |
861 | return { |
862 | 'v1': { |
863 | + '_beta_keys': ['subplatform'], |
864 | 'availability-zone': availability_zone, |
865 | 'availability_zone': availability_zone, |
866 | - 'cloud-name': cloud_name, |
867 | - 'cloud_name': cloud_name, |
868 | + 'cloud-name': self.cloud_name, |
869 | + 'cloud_name': self.cloud_name, |
870 | + 'platform': self.platform_type, |
871 | + 'public_ssh_keys': self.get_public_ssh_keys(), |
872 | 'instance-id': instance_id, |
873 | 'instance_id': instance_id, |
874 | 'local-hostname': local_hostname, |
875 | 'local_hostname': local_hostname, |
876 | - 'region': self.region}} |
877 | + 'region': self.region, |
878 | + 'subplatform': self.subplatform}} |
879 | |
880 | def clear_cached_attrs(self, attr_defaults=()): |
881 | """Reset any cached metadata attributes to datasource defaults. |
882 | @@ -247,19 +269,27 @@ class DataSource(object): |
883 | |
884 | @return True on successful write, False otherwise. |
885 | """ |
886 | - instance_data = { |
887 | - 'ds': {'_doc': EXPERIMENTAL_TEXT, |
888 | - 'meta_data': self.metadata}} |
889 | - if hasattr(self, 'network_json'): |
890 | - network_json = getattr(self, 'network_json') |
891 | - if network_json != UNSET: |
892 | - instance_data['ds']['network_json'] = network_json |
893 | - if hasattr(self, 'ec2_metadata'): |
894 | - ec2_metadata = getattr(self, 'ec2_metadata') |
895 | - if ec2_metadata != UNSET: |
896 | - instance_data['ds']['ec2_metadata'] = ec2_metadata |
897 | + if hasattr(self, '_crawled_metadata'): |
898 | + # Any datasource with _crawled_metadata will best represent |
899 | + # most recent, 'raw' metadata |
900 | + crawled_metadata = copy.deepcopy( |
901 | + getattr(self, '_crawled_metadata')) |
902 | + crawled_metadata.pop('user-data', None) |
903 | + crawled_metadata.pop('vendor-data', None) |
904 | + instance_data = {'ds': crawled_metadata} |
905 | + else: |
906 | + instance_data = {'ds': {'meta_data': self.metadata}} |
907 | + if hasattr(self, 'network_json'): |
908 | + network_json = getattr(self, 'network_json') |
909 | + if network_json != UNSET: |
910 | + instance_data['ds']['network_json'] = network_json |
911 | + if hasattr(self, 'ec2_metadata'): |
912 | + ec2_metadata = getattr(self, 'ec2_metadata') |
913 | + if ec2_metadata != UNSET: |
914 | + instance_data['ds']['ec2_metadata'] = ec2_metadata |
915 | instance_data.update( |
916 | self._get_standardized_metadata()) |
917 | + instance_data['ds']['_doc'] = EXPERIMENTAL_TEXT |
918 | try: |
919 | # Process content base64encoding unserializable values |
920 | content = util.json_dumps(instance_data) |
921 | @@ -347,6 +377,40 @@ class DataSource(object): |
922 | return self._fallback_interface |
923 | |
924 | @property |
925 | + def platform_type(self): |
926 | + if not hasattr(self, '_platform_type'): |
927 | + # Handle upgrade path where pickled datasource has no _platform. |
928 | + self._platform_type = self.dsname.lower() |
929 | + if not self._platform_type: |
930 | + self._platform_type = self.dsname.lower() |
931 | + return self._platform_type |
932 | + |
933 | + @property |
934 | + def subplatform(self): |
935 | + """Return a string representing subplatform details for the datasource. |
936 | + |
937 | + This should be guidance for where the metadata is sourced. |
938 | + Examples of this on different clouds: |
939 | + ec2: metadata (http://169.254.169.254) |
940 | + openstack: configdrive (/dev/path) |
941 | + openstack: metadata (http://169.254.169.254) |
942 | + nocloud: seed-dir (/seed/dir/path) |
943 | + lxd: nocloud (/seed/dir/path) |
944 | + """ |
945 | + if not hasattr(self, '_subplatform'): |
946 | + # Handle upgrade path where pickled datasource has no _platform. |
947 | + self._subplatform = self._get_subplatform() |
948 | + if not self._subplatform: |
949 | + self._subplatform = self._get_subplatform() |
950 | + return self._subplatform |
951 | + |
952 | + def _get_subplatform(self): |
953 | + """Subclasses should implement to return a "slug (detail)" string.""" |
954 | + if hasattr(self, 'metadata_address'): |
955 | + return 'metadata (%s)' % getattr(self, 'metadata_address') |
956 | + return METADATA_UNKNOWN |
957 | + |
958 | + @property |
959 | def cloud_name(self): |
960 | """Return lowercase cloud name as determined by the datasource. |
961 | |
962 | @@ -359,9 +423,11 @@ class DataSource(object): |
963 | cloud_name = self.metadata.get(METADATA_CLOUD_NAME_KEY) |
964 | if isinstance(cloud_name, six.string_types): |
965 | self._cloud_name = cloud_name.lower() |
966 | - LOG.debug( |
967 | - 'Ignoring metadata provided key %s: non-string type %s', |
968 | - METADATA_CLOUD_NAME_KEY, type(cloud_name)) |
969 | + else: |
970 | + self._cloud_name = self._get_cloud_name().lower() |
971 | + LOG.debug( |
972 | + 'Ignoring metadata provided key %s: non-string type %s', |
973 | + METADATA_CLOUD_NAME_KEY, type(cloud_name)) |
974 | else: |
975 | self._cloud_name = self._get_cloud_name().lower() |
976 | return self._cloud_name |
977 | @@ -714,6 +780,26 @@ def instance_id_matches_system_uuid(instance_id, field='system-uuid'): |
978 | return instance_id.lower() == dmi_value.lower() |
979 | |
980 | |
981 | +def canonical_cloud_id(cloud_name, region, platform): |
982 | + """Lookup the canonical cloud-id for a given cloud_name and region.""" |
983 | + if not region or region == METADATA_UNKNOWN: |
984 | + if cloud_name != METADATA_UNKNOWN: |
985 | + return cloud_name |
986 | + return platform |
987 | + for prefix, cloud_id_test in CLOUD_ID_REGION_PREFIX_MAP.items(): |
988 | + (cloud_id, valid_cloud) = cloud_id_test |
989 | + if isinstance(prefix, tuple): |
990 | + for item in prefix: |
991 | + if item in region and valid_cloud(cloud_name, platform): |
992 | + return cloud_id |
993 | + else: |
994 | + if prefix in region and valid_cloud(cloud_name, platform): |
995 | + return cloud_id |
996 | + if cloud_name != METADATA_UNKNOWN: |
997 | + return cloud_name |
998 | + return platform |
999 | + |
1000 | + |
1001 | def convert_vendordata(data, recurse=True): |
1002 | """data: a loaded object (strings, arrays, dicts). |
1003 | return something suitable for cloudinit vendordata_raw. |
1004 | diff --git a/cloudinit/sources/tests/test_init.py b/cloudinit/sources/tests/test_init.py |
1005 | index 8082019..391b343 100644 |
1006 | --- a/cloudinit/sources/tests/test_init.py |
1007 | +++ b/cloudinit/sources/tests/test_init.py |
1008 | @@ -295,6 +295,7 @@ class TestDataSource(CiTestCase): |
1009 | 'base64_encoded_keys': [], |
1010 | 'sensitive_keys': [], |
1011 | 'v1': { |
1012 | + '_beta_keys': ['subplatform'], |
1013 | 'availability-zone': 'myaz', |
1014 | 'availability_zone': 'myaz', |
1015 | 'cloud-name': 'subclasscloudname', |
1016 | @@ -303,7 +304,10 @@ class TestDataSource(CiTestCase): |
1017 | 'instance_id': 'iid-datasource', |
1018 | 'local-hostname': 'test-subclass-hostname', |
1019 | 'local_hostname': 'test-subclass-hostname', |
1020 | - 'region': 'myregion'}, |
1021 | + 'platform': 'mytestsubclass', |
1022 | + 'public_ssh_keys': [], |
1023 | + 'region': 'myregion', |
1024 | + 'subplatform': 'unknown'}, |
1025 | 'ds': { |
1026 | '_doc': EXPERIMENTAL_TEXT, |
1027 | 'meta_data': {'availability_zone': 'myaz', |
1028 | @@ -339,6 +343,7 @@ class TestDataSource(CiTestCase): |
1029 | 'base64_encoded_keys': [], |
1030 | 'sensitive_keys': ['ds/meta_data/some/security-credentials'], |
1031 | 'v1': { |
1032 | + '_beta_keys': ['subplatform'], |
1033 | 'availability-zone': 'myaz', |
1034 | 'availability_zone': 'myaz', |
1035 | 'cloud-name': 'subclasscloudname', |
1036 | @@ -347,7 +352,10 @@ class TestDataSource(CiTestCase): |
1037 | 'instance_id': 'iid-datasource', |
1038 | 'local-hostname': 'test-subclass-hostname', |
1039 | 'local_hostname': 'test-subclass-hostname', |
1040 | - 'region': 'myregion'}, |
1041 | + 'platform': 'mytestsubclass', |
1042 | + 'public_ssh_keys': [], |
1043 | + 'region': 'myregion', |
1044 | + 'subplatform': 'unknown'}, |
1045 | 'ds': { |
1046 | '_doc': EXPERIMENTAL_TEXT, |
1047 | 'meta_data': { |
1048 | diff --git a/cloudinit/sources/tests/test_oracle.py b/cloudinit/sources/tests/test_oracle.py |
1049 | index 7599126..97d6294 100644 |
1050 | --- a/cloudinit/sources/tests/test_oracle.py |
1051 | +++ b/cloudinit/sources/tests/test_oracle.py |
1052 | @@ -71,6 +71,14 @@ class TestDataSourceOracle(test_helpers.CiTestCase): |
1053 | self.assertFalse(ds._get_data()) |
1054 | mocks._is_platform_viable.assert_called_once_with() |
1055 | |
1056 | + def test_platform_info(self): |
1057 | + """Return platform-related information for Oracle Datasource.""" |
1058 | + ds, _mocks = self._get_ds() |
1059 | + self.assertEqual('oracle', ds.cloud_name) |
1060 | + self.assertEqual('oracle', ds.platform_type) |
1061 | + self.assertEqual( |
1062 | + 'metadata (http://169.254.169.254/openstack/)', ds.subplatform) |
1063 | + |
1064 | @mock.patch(DS_PATH + "._is_iscsi_root", return_value=True) |
1065 | def test_without_userdata(self, m_is_iscsi_root): |
1066 | """If no user-data is provided, it should not be in return dict.""" |
1067 | diff --git a/cloudinit/tests/test_util.py b/cloudinit/tests/test_util.py |
1068 | index edb0c18..749a384 100644 |
1069 | --- a/cloudinit/tests/test_util.py |
1070 | +++ b/cloudinit/tests/test_util.py |
1071 | @@ -478,4 +478,20 @@ class TestGetLinuxDistro(CiTestCase): |
1072 | dist = util.get_linux_distro() |
1073 | self.assertEqual(('foo', '1.1', 'aarch64'), dist) |
1074 | |
1075 | + |
1076 | +@mock.patch('os.path.exists') |
1077 | +class TestIsLXD(CiTestCase): |
1078 | + |
1079 | + def test_is_lxd_true_on_sock_device(self, m_exists): |
1080 | + """When lxd's /dev/lxd/sock exists, is_lxd returns true.""" |
1081 | + m_exists.return_value = True |
1082 | + self.assertTrue(util.is_lxd()) |
1083 | + m_exists.assert_called_once_with('/dev/lxd/sock') |
1084 | + |
1085 | + def test_is_lxd_false_when_sock_device_absent(self, m_exists): |
1086 | + """When lxd's /dev/lxd/sock is absent, is_lxd returns false.""" |
1087 | + m_exists.return_value = False |
1088 | + self.assertFalse(util.is_lxd()) |
1089 | + m_exists.assert_called_once_with('/dev/lxd/sock') |
1090 | + |
1091 | # vi: ts=4 expandtab |
1092 | diff --git a/cloudinit/util.py b/cloudinit/util.py |
1093 | index 5068096..c67d6be 100644 |
1094 | --- a/cloudinit/util.py |
1095 | +++ b/cloudinit/util.py |
1096 | @@ -2171,6 +2171,11 @@ def is_container(): |
1097 | return False |
1098 | |
1099 | |
1100 | +def is_lxd(): |
1101 | + """Check to see if we are running in a lxd container.""" |
1102 | + return os.path.exists('/dev/lxd/sock') |
1103 | + |
1104 | + |
1105 | def get_proc_env(pid, encoding='utf-8', errors='replace'): |
1106 | """ |
1107 | Return the environment in a dict that a given process id was started with. |
1108 | diff --git a/doc/rtd/topics/instancedata.rst b/doc/rtd/topics/instancedata.rst |
1109 | index 634e180..5d2dc94 100644 |
1110 | --- a/doc/rtd/topics/instancedata.rst |
1111 | +++ b/doc/rtd/topics/instancedata.rst |
1112 | @@ -90,24 +90,46 @@ There are three basic top-level keys: |
1113 | |
1114 | The standardized keys present: |
1115 | |
1116 | -+----------------------+-----------------------------------------------+---------------------------+ |
1117 | -| Key path | Description | Examples | |
1118 | -+======================+===============================================+===========================+ |
1119 | -| v1.cloud_name | The name of the cloud provided by metadata | aws, openstack, azure, | |
1120 | -| | key 'cloud-name' or the cloud-init datasource | configdrive, nocloud, | |
1121 | -| | name which was discovered. | ovf, etc. | |
1122 | -+----------------------+-----------------------------------------------+---------------------------+ |
1123 | -| v1.instance_id | Unique instance_id allocated by the cloud | i-<somehash> | |
1124 | -+----------------------+-----------------------------------------------+---------------------------+ |
1125 | -| v1.local_hostname | The internal or local hostname of the system | ip-10-41-41-70, | |
1126 | -| | | <user-provided-hostname> | |
1127 | -+----------------------+-----------------------------------------------+---------------------------+ |
1128 | -| v1.region | The physical region/datacenter in which the | us-east-2 | |
1129 | -| | instance is deployed | | |
1130 | -+----------------------+-----------------------------------------------+---------------------------+ |
1131 | -| v1.availability_zone | The physical availability zone in which the | us-east-2b, nova, null | |
1132 | -| | instance is deployed | | |
1133 | -+----------------------+-----------------------------------------------+---------------------------+ |
1134 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1135 | +| Key path | Description | Examples | |
1136 | ++======================+===============================================+===================================+ |
1137 | +| v1._beta_keys | List of standardized keys still in 'beta'. | [subplatform] | |
1138 | +| | The format, intent or presence of these keys | | |
1139 | +| | can change. Do not consider them | | |
1140 | +| | production-ready. | | |
1141 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1142 | +| v1.cloud_name | Where possible this will indicate the 'name' | aws, openstack, azure, | |
1143 | +| | of the cloud this system is running on. This | configdrive, nocloud, | |
1144 | +| | is specifically different than the 'platform' | ovf, etc. | |
1145 | +| | below. As an example, the name of Amazon Web | | |
1146 | +| | Services is 'aws' while the platform is 'ec2'.| | |
1147 | +| | | | |
1148 | +| | If no specific name is determinable or | | |
1149 | +| | provided in meta-data, then this field may | | |
1150 | +| | contain the same content as 'platform'. | | |
1151 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1152 | +| v1.instance_id | Unique instance_id allocated by the cloud | i-<somehash> | |
1153 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1154 | +| v1.local_hostname | The internal or local hostname of the system | ip-10-41-41-70, | |
1155 | +| | | <user-provided-hostname> | |
1156 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1157 | +| v1.platform | An attempt to identify the cloud platform | ec2, openstack, lxd, gce | |
1158 | +| | instance that the system is running on. | nocloud, ovf | |
1159 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1160 | +| v1.subplatform | Additional platform details describing the | metadata (http://168.254.169.254),| |
1161 | +| | specific source or type of metadata used. | seed-dir (/path/to/seed-dir/), | |
1162 | +| | The format of subplatform will be: | config-disk (/dev/cd0), | |
1163 | +| | <subplatform_type> (<url_file_or_dev_path>) | configdrive (/dev/sr0) | |
1164 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1165 | +| v1.public_ssh_keys | A list of ssh keys provided to the instance | ['ssh-rsa AA...', ...] | |
1166 | +| | by the datasource metadata. | | |
1167 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1168 | +| v1.region | The physical region/datacenter in which the | us-east-2 | |
1169 | +| | instance is deployed | | |
1170 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1171 | +| v1.availability_zone | The physical availability zone in which the | us-east-2b, nova, null | |
1172 | +| | instance is deployed | | |
1173 | ++----------------------+-----------------------------------------------+-----------------------------------+ |
1174 | |
1175 | |
1176 | Below is an example of ``/run/cloud-init/instance_data.json`` on an EC2 |
1177 | @@ -117,10 +139,75 @@ instance: |
1178 | |
1179 | { |
1180 | "base64_encoded_keys": [], |
1181 | - "sensitive_keys": [], |
1182 | "ds": { |
1183 | - "meta_data": { |
1184 | - "ami-id": "ami-014e1416b628b0cbf", |
1185 | + "_doc": "EXPERIMENTAL: The structure and format of content scoped under the 'ds' key may change in subsequent releases of cloud-init.", |
1186 | + "_metadata_api_version": "2016-09-02", |
1187 | + "dynamic": { |
1188 | + "instance-identity": { |
1189 | + "document": { |
1190 | + "accountId": "437526006925", |
1191 | + "architecture": "x86_64", |
1192 | + "availabilityZone": "us-east-2b", |
1193 | + "billingProducts": null, |
1194 | + "devpayProductCodes": null, |
1195 | + "imageId": "ami-079638aae7046bdd2", |
1196 | + "instanceId": "i-075f088c72ad3271c", |
1197 | + "instanceType": "t2.micro", |
1198 | + "kernelId": null, |
1199 | + "marketplaceProductCodes": null, |
1200 | + "pendingTime": "2018-10-05T20:10:43Z", |
1201 | + "privateIp": "10.41.41.95", |
1202 | + "ramdiskId": null, |
1203 | + "region": "us-east-2", |
1204 | + "version": "2017-09-30" |
1205 | + }, |
1206 | + "pkcs7": [ |
1207 | + "MIAGCSqGSIb3DQEHAqCAMIACAQExCzAJBgUrDgMCGgUAMIAGCSqGSIb3DQEHAaCAJIAEggHbewog", |
1208 | + "ICJkZXZwYXlQcm9kdWN0Q29kZXMiIDogbnVsbCwKICAibWFya2V0cGxhY2VQcm9kdWN0Q29kZXMi", |
1209 | + "IDogbnVsbCwKICAicHJpdmF0ZUlwIiA6ICIxMC40MS40MS45NSIsCiAgInZlcnNpb24iIDogIjIw", |
1210 | + "MTctMDktMzAiLAogICJpbnN0YW5jZUlkIiA6ICJpLTA3NWYwODhjNzJhZDMyNzFjIiwKICAiYmls", |
1211 | + "bGluZ1Byb2R1Y3RzIiA6IG51bGwsCiAgImluc3RhbmNlVHlwZSIgOiAidDIubWljcm8iLAogICJh", |
1212 | + "Y2NvdW50SWQiIDogIjQzNzUyNjAwNjkyNSIsCiAgImF2YWlsYWJpbGl0eVpvbmUiIDogInVzLWVh", |
1213 | + "c3QtMmIiLAogICJrZXJuZWxJZCIgOiBudWxsLAogICJyYW1kaXNrSWQiIDogbnVsbCwKICAiYXJj", |
1214 | + "aGl0ZWN0dXJlIiA6ICJ4ODZfNjQiLAogICJpbWFnZUlkIiA6ICJhbWktMDc5NjM4YWFlNzA0NmJk", |
1215 | + "ZDIiLAogICJwZW5kaW5nVGltZSIgOiAiMjAxOC0xMC0wNVQyMDoxMDo0M1oiLAogICJyZWdpb24i", |
1216 | + "IDogInVzLWVhc3QtMiIKfQAAAAAAADGCARcwggETAgEBMGkwXDELMAkGA1UEBhMCVVMxGTAXBgNV", |
1217 | + "BAgTEFdhc2hpbmd0b24gU3RhdGUxEDAOBgNVBAcTB1NlYXR0bGUxIDAeBgNVBAoTF0FtYXpvbiBX", |
1218 | + "ZWIgU2VydmljZXMgTExDAgkAlrpI2eVeGmcwCQYFKw4DAhoFAKBdMBgGCSqGSIb3DQEJAzELBgkq", |
1219 | + "hkiG9w0BBwEwHAYJKoZIhvcNAQkFMQ8XDTE4MTAwNTIwMTA0OFowIwYJKoZIhvcNAQkEMRYEFK0k", |
1220 | + "Tz6n1A8/zU1AzFj0riNQORw2MAkGByqGSM44BAMELjAsAhRNrr174y98grPBVXUforN/6wZp8AIU", |
1221 | + "JLZBkrB2GJA8A4WJ1okq++jSrBIAAAAAAAA=" |
1222 | + ], |
1223 | + "rsa2048": [ |
1224 | + "MIAGCSqGSIb3DQEHAqCAMIACAQExDzANBglghkgBZQMEAgEFADCABgkqhkiG9w0BBwGggCSABIIB", |
1225 | + "23sKICAiZGV2cGF5UHJvZHVjdENvZGVzIiA6IG51bGwsCiAgIm1hcmtldHBsYWNlUHJvZHVjdENv", |
1226 | + "ZGVzIiA6IG51bGwsCiAgInByaXZhdGVJcCIgOiAiMTAuNDEuNDEuOTUiLAogICJ2ZXJzaW9uIiA6", |
1227 | + "ICIyMDE3LTA5LTMwIiwKICAiaW5zdGFuY2VJZCIgOiAiaS0wNzVmMDg4YzcyYWQzMjcxYyIsCiAg", |
1228 | + "ImJpbGxpbmdQcm9kdWN0cyIgOiBudWxsLAogICJpbnN0YW5jZVR5cGUiIDogInQyLm1pY3JvIiwK", |
1229 | + "ICAiYWNjb3VudElkIiA6ICI0Mzc1MjYwMDY5MjUiLAogICJhdmFpbGFiaWxpdHlab25lIiA6ICJ1", |
1230 | + "cy1lYXN0LTJiIiwKICAia2VybmVsSWQiIDogbnVsbCwKICAicmFtZGlza0lkIiA6IG51bGwsCiAg", |
1231 | + "ImFyY2hpdGVjdHVyZSIgOiAieDg2XzY0IiwKICAiaW1hZ2VJZCIgOiAiYW1pLTA3OTYzOGFhZTcw", |
1232 | + "NDZiZGQyIiwKICAicGVuZGluZ1RpbWUiIDogIjIwMTgtMTAtMDVUMjA6MTA6NDNaIiwKICAicmVn", |
1233 | + "aW9uIiA6ICJ1cy1lYXN0LTIiCn0AAAAAAAAxggH/MIIB+wIBATBpMFwxCzAJBgNVBAYTAlVTMRkw", |
1234 | + "FwYDVQQIExBXYXNoaW5ndG9uIFN0YXRlMRAwDgYDVQQHEwdTZWF0dGxlMSAwHgYDVQQKExdBbWF6", |
1235 | + "b24gV2ViIFNlcnZpY2VzIExMQwIJAM07oeX4xevdMA0GCWCGSAFlAwQCAQUAoGkwGAYJKoZIhvcN", |
1236 | + "AQkDMQsGCSqGSIb3DQEHATAcBgkqhkiG9w0BCQUxDxcNMTgxMDA1MjAxMDQ4WjAvBgkqhkiG9w0B", |
1237 | + "CQQxIgQgkYz0pZk3zJKBi4KP4egeOKJl/UYwu5UdE7id74pmPwMwDQYJKoZIhvcNAQEBBQAEggEA", |
1238 | + "dC3uIGGNul1OC1mJKSH3XoBWsYH20J/xhIdftYBoXHGf2BSFsrs9ZscXd2rKAKea4pSPOZEYMXgz", |
1239 | + "lPuT7W0WU89N3ZKviy/ReMSRjmI/jJmsY1lea6mlgcsJXreBXFMYucZvyeWGHdnCjamoKWXkmZlM", |
1240 | + "mSB1gshWy8Y7DzoKviYPQZi5aI54XK2Upt4kGme1tH1NI2Cq+hM4K+adxTbNhS3uzvWaWzMklUuU", |
1241 | + "QHX2GMmjAVRVc8vnA8IAsBCJJp+gFgYzi09IK+cwNgCFFPADoG6jbMHHf4sLB3MUGpiA+G9JlCnM", |
1242 | + "fmkjI2pNRB8spc0k4UG4egqLrqCz67WuK38tjwAAAAAAAA==" |
1243 | + ], |
1244 | + "signature": [ |
1245 | + "Tsw6h+V3WnxrNVSXBYIOs1V4j95YR1mLPPH45XnhX0/Ei3waJqf7/7EEKGYP1Cr4PTYEULtZ7Mvf", |
1246 | + "+xJpM50Ivs2bdF7o0c4vnplRWe3f06NI9pv50dr110j/wNzP4MZ1pLhJCqubQOaaBTF3LFutgRrt", |
1247 | + "r4B0mN3p7EcqD8G+ll0=" |
1248 | + ] |
1249 | + } |
1250 | + }, |
1251 | + "meta-data": { |
1252 | + "ami-id": "ami-079638aae7046bdd2", |
1253 | "ami-launch-index": "0", |
1254 | "ami-manifest-path": "(unknown)", |
1255 | "block-device-mapping": { |
1256 | @@ -129,31 +216,31 @@ instance: |
1257 | "ephemeral1": "sdc", |
1258 | "root": "/dev/sda1" |
1259 | }, |
1260 | - "hostname": "ip-10-41-41-70.us-east-2.compute.internal", |
1261 | + "hostname": "ip-10-41-41-95.us-east-2.compute.internal", |
1262 | "instance-action": "none", |
1263 | - "instance-id": "i-04fa31cfc55aa7976", |
1264 | + "instance-id": "i-075f088c72ad3271c", |
1265 | "instance-type": "t2.micro", |
1266 | - "local-hostname": "ip-10-41-41-70.us-east-2.compute.internal", |
1267 | - "local-ipv4": "10.41.41.70", |
1268 | - "mac": "06:b6:92:dd:9d:24", |
1269 | + "local-hostname": "ip-10-41-41-95.us-east-2.compute.internal", |
1270 | + "local-ipv4": "10.41.41.95", |
1271 | + "mac": "06:74:8f:39:cd:a6", |
1272 | "metrics": { |
1273 | "vhostmd": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" |
1274 | }, |
1275 | "network": { |
1276 | "interfaces": { |
1277 | "macs": { |
1278 | - "06:b6:92:dd:9d:24": { |
1279 | + "06:74:8f:39:cd:a6": { |
1280 | "device-number": "0", |
1281 | - "interface-id": "eni-08c0c9fdb99b6e6f4", |
1282 | + "interface-id": "eni-052058bbd7831eaae", |
1283 | "ipv4-associations": { |
1284 | - "18.224.22.43": "10.41.41.70" |
1285 | + "18.218.221.122": "10.41.41.95" |
1286 | }, |
1287 | - "local-hostname": "ip-10-41-41-70.us-east-2.compute.internal", |
1288 | - "local-ipv4s": "10.41.41.70", |
1289 | - "mac": "06:b6:92:dd:9d:24", |
1290 | + "local-hostname": "ip-10-41-41-95.us-east-2.compute.internal", |
1291 | + "local-ipv4s": "10.41.41.95", |
1292 | + "mac": "06:74:8f:39:cd:a6", |
1293 | "owner-id": "437526006925", |
1294 | - "public-hostname": "ec2-18-224-22-43.us-east-2.compute.amazonaws.com", |
1295 | - "public-ipv4s": "18.224.22.43", |
1296 | + "public-hostname": "ec2-18-218-221-122.us-east-2.compute.amazonaws.com", |
1297 | + "public-ipv4s": "18.218.221.122", |
1298 | "security-group-ids": "sg-828247e9", |
1299 | "security-groups": "Cloud-init integration test secgroup", |
1300 | "subnet-id": "subnet-282f3053", |
1301 | @@ -171,16 +258,14 @@ instance: |
1302 | "availability-zone": "us-east-2b" |
1303 | }, |
1304 | "profile": "default-hvm", |
1305 | - "public-hostname": "ec2-18-224-22-43.us-east-2.compute.amazonaws.com", |
1306 | - "public-ipv4": "18.224.22.43", |
1307 | + "public-hostname": "ec2-18-218-221-122.us-east-2.compute.amazonaws.com", |
1308 | + "public-ipv4": "18.218.221.122", |
1309 | "public-keys": { |
1310 | "cloud-init-integration": [ |
1311 | - "ssh-rsa |
1312 | - AAAAB3NzaC1yc2EAAAADAQABAAABAQDSL7uWGj8cgWyIOaspgKdVy0cKJ+UTjfv7jBOjG2H/GN8bJVXy72XAvnhM0dUM+CCs8FOf0YlPX+Frvz2hKInrmRhZVwRSL129PasD12MlI3l44u6IwS1o/W86Q+tkQYEljtqDOo0a+cOsaZkvUNzUyEXUwz/lmYa6G4hMKZH4NBj7nbAAF96wsMCoyNwbWryBnDYUr6wMbjRR1J9Pw7Xh7WRC73wy4Va2YuOgbD3V/5ZrFPLbWZW/7TFXVrql04QVbyei4aiFR5n//GvoqwQDNe58LmbzX/xvxyKJYdny2zXmdAhMxbrpFQsfpkJ9E/H5w0yOdSvnWbUoG5xNGoOB |
1313 | - cloud-init-integration" |
1314 | + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSL7uWGj8cgWyIOaspgKdVy0cKJ+UTjfv7jBOjG2H/GN8bJVXy72XAvnhM0dUM+CCs8FOf0YlPX+Frvz2hKInrmRhZVwRSL129PasD12MlI3l44u6IwS1o/W86Q+tkQYEljtqDOo0a+cOsaZkvUNzUyEXUwz/lmYa6G4hMKZH4NBj7nbAAF96wsMCoyNwbWryBnDYUr6wMbjRR1J9Pw7Xh7WRC73wy4Va2YuOgbD3V/5ZrFPLbWZW/7TFXVrql04QVbyei4aiFR5n//GvoqwQDNe58LmbzX/xvxyKJYdny2zXmdAhMxbrpFQsfpkJ9E/H5w0yOdSvnWbUoG5xNGoOB cloud-init-integration" |
1315 | ] |
1316 | }, |
1317 | - "reservation-id": "r-06ab75e9346f54333", |
1318 | + "reservation-id": "r-0594a20e31f6cfe46", |
1319 | "security-groups": "Cloud-init integration test secgroup", |
1320 | "services": { |
1321 | "domain": "amazonaws.com", |
1322 | @@ -188,16 +273,22 @@ instance: |
1323 | } |
1324 | } |
1325 | }, |
1326 | + "sensitive_keys": [], |
1327 | "v1": { |
1328 | + "_beta_keys": [ |
1329 | + "subplatform" |
1330 | + ], |
1331 | "availability-zone": "us-east-2b", |
1332 | "availability_zone": "us-east-2b", |
1333 | - "cloud-name": "aws", |
1334 | "cloud_name": "aws", |
1335 | - "instance-id": "i-04fa31cfc55aa7976", |
1336 | - "instance_id": "i-04fa31cfc55aa7976", |
1337 | - "local-hostname": "ip-10-41-41-70", |
1338 | - "local_hostname": "ip-10-41-41-70", |
1339 | - "region": "us-east-2" |
1340 | + "instance_id": "i-075f088c72ad3271c", |
1341 | + "local_hostname": "ip-10-41-41-95", |
1342 | + "platform": "ec2", |
1343 | + "public_ssh_keys": [ |
1344 | + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDSL7uWGj8cgWyIOaspgKdVy0cKJ+UTjfv7jBOjG2H/GN8bJVXy72XAvnhM0dUM+CCs8FOf0YlPX+Frvz2hKInrmRhZVwRSL129PasD12MlI3l44u6IwS1o/W86Q+tkQYEljtqDOo0a+cOsaZkvUNzUyEXUwz/lmYa6G4hMKZH4NBj7nbAAF96wsMCoyNwbWryBnDYUr6wMbjRR1J9Pw7Xh7WRC73wy4Va2YuOgbD3V/5ZrFPLbWZW/7TFXVrql04QVbyei4aiFR5n//GvoqwQDNe58LmbzX/xvxyKJYdny2zXmdAhMxbrpFQsfpkJ9E/H5w0yOdSvnWbUoG5xNGoOB cloud-init-integration" |
1345 | + ], |
1346 | + "region": "us-east-2", |
1347 | + "subplatform": "metadata (http://169.254.169.254)" |
1348 | } |
1349 | } |
1350 | |
1351 | diff --git a/setup.py b/setup.py |
1352 | index 5ed8eae..ea37efc 100755 |
1353 | --- a/setup.py |
1354 | +++ b/setup.py |
1355 | @@ -282,7 +282,8 @@ setuptools.setup( |
1356 | cmdclass=cmdclass, |
1357 | entry_points={ |
1358 | 'console_scripts': [ |
1359 | - 'cloud-init = cloudinit.cmd.main:main' |
1360 | + 'cloud-init = cloudinit.cmd.main:main', |
1361 | + 'cloud-id = cloudinit.cmd.cloud_id:main' |
1362 | ], |
1363 | } |
1364 | ) |
1365 | diff --git a/tests/cloud_tests/testcases/base.py b/tests/cloud_tests/testcases/base.py |
1366 | index e18d601..16b268e 100644 |
1367 | --- a/tests/cloud_tests/testcases/base.py |
1368 | +++ b/tests/cloud_tests/testcases/base.py |
1369 | @@ -195,6 +195,9 @@ class CloudTestCase(unittest2.TestCase): |
1370 | self.assertIsNotNone( |
1371 | v1_data['availability_zone'], 'expected ec2 availability_zone') |
1372 | self.assertEqual('aws', v1_data['cloud_name']) |
1373 | + self.assertEqual('ec2', v1_data['platform']) |
1374 | + self.assertEqual( |
1375 | + 'metadata (http://169.254.169.254)', v1_data['subplatform']) |
1376 | self.assertIn('i-', v1_data['instance_id']) |
1377 | self.assertIn('ip-', v1_data['local_hostname']) |
1378 | self.assertIsNotNone(v1_data['region'], 'expected ec2 region') |
1379 | @@ -220,7 +223,11 @@ class CloudTestCase(unittest2.TestCase): |
1380 | instance_data = json.loads(out) |
1381 | v1_data = instance_data.get('v1', {}) |
1382 | self.assertItemsEqual([], sorted(instance_data['base64_encoded_keys'])) |
1383 | - self.assertEqual('nocloud', v1_data['cloud_name']) |
1384 | + self.assertEqual('unknown', v1_data['cloud_name']) |
1385 | + self.assertEqual('lxd', v1_data['platform']) |
1386 | + self.assertEqual( |
1387 | + 'seed-dir (/var/lib/cloud/seed/nocloud-net)', |
1388 | + v1_data['subplatform']) |
1389 | self.assertIsNone( |
1390 | v1_data['availability_zone'], |
1391 | 'found unexpected lxd availability_zone %s' % |
1392 | @@ -253,7 +260,9 @@ class CloudTestCase(unittest2.TestCase): |
1393 | instance_data = json.loads(out) |
1394 | v1_data = instance_data.get('v1', {}) |
1395 | self.assertItemsEqual([], instance_data['base64_encoded_keys']) |
1396 | - self.assertEqual('nocloud', v1_data['cloud_name']) |
1397 | + self.assertEqual('unknown', v1_data['cloud_name']) |
1398 | + self.assertEqual('nocloud', v1_data['platform']) |
1399 | + self.assertEqual('config-disk (/dev/vda)', v1_data['subplatform']) |
1400 | self.assertIsNone( |
1401 | v1_data['availability_zone'], |
1402 | 'found unexpected kvm availability_zone %s' % |
1403 | diff --git a/tests/unittests/test_datasource/test_aliyun.py b/tests/unittests/test_datasource/test_aliyun.py |
1404 | index 1e77842..e9213ca 100644 |
1405 | --- a/tests/unittests/test_datasource/test_aliyun.py |
1406 | +++ b/tests/unittests/test_datasource/test_aliyun.py |
1407 | @@ -140,6 +140,10 @@ class TestAliYunDatasource(test_helpers.HttprettyTestCase): |
1408 | self._test_get_sshkey() |
1409 | self._test_get_iid() |
1410 | self._test_host_name() |
1411 | + self.assertEqual('aliyun', self.ds.cloud_name) |
1412 | + self.assertEqual('ec2', self.ds.platform) |
1413 | + self.assertEqual( |
1414 | + 'metadata (http://100.100.100.200)', self.ds.subplatform) |
1415 | |
1416 | @mock.patch("cloudinit.sources.DataSourceAliYun._is_aliyun") |
1417 | def test_returns_false_when_not_on_aliyun(self, m_is_aliyun): |
1418 | diff --git a/tests/unittests/test_datasource/test_altcloud.py b/tests/unittests/test_datasource/test_altcloud.py |
1419 | index ff35904..3119bfa 100644 |
1420 | --- a/tests/unittests/test_datasource/test_altcloud.py |
1421 | +++ b/tests/unittests/test_datasource/test_altcloud.py |
1422 | @@ -10,7 +10,6 @@ |
1423 | This test file exercises the code in sources DataSourceAltCloud.py |
1424 | ''' |
1425 | |
1426 | -import mock |
1427 | import os |
1428 | import shutil |
1429 | import tempfile |
1430 | @@ -18,32 +17,13 @@ import tempfile |
1431 | from cloudinit import helpers |
1432 | from cloudinit import util |
1433 | |
1434 | -from cloudinit.tests.helpers import CiTestCase |
1435 | +from cloudinit.tests.helpers import CiTestCase, mock |
1436 | |
1437 | import cloudinit.sources.DataSourceAltCloud as dsac |
1438 | |
1439 | OS_UNAME_ORIG = getattr(os, 'uname') |
1440 | |
1441 | |
1442 | -def _write_cloud_info_file(value): |
1443 | - ''' |
1444 | - Populate the CLOUD_INFO_FILE which would be populated |
1445 | - with a cloud backend identifier ImageFactory when building |
1446 | - an image with ImageFactory. |
1447 | - ''' |
1448 | - cifile = open(dsac.CLOUD_INFO_FILE, 'w') |
1449 | - cifile.write(value) |
1450 | - cifile.close() |
1451 | - os.chmod(dsac.CLOUD_INFO_FILE, 0o664) |
1452 | - |
1453 | - |
1454 | -def _remove_cloud_info_file(): |
1455 | - ''' |
1456 | - Remove the test CLOUD_INFO_FILE |
1457 | - ''' |
1458 | - os.remove(dsac.CLOUD_INFO_FILE) |
1459 | - |
1460 | - |
1461 | def _write_user_data_files(mount_dir, value): |
1462 | ''' |
1463 | Populate the deltacloud_user_data_file the user_data_file |
1464 | @@ -98,13 +78,15 @@ def _dmi_data(expected): |
1465 | |
1466 | |
1467 | class TestGetCloudType(CiTestCase): |
1468 | - ''' |
1469 | - Test to exercise method: DataSourceAltCloud.get_cloud_type() |
1470 | - ''' |
1471 | + '''Test to exercise method: DataSourceAltCloud.get_cloud_type()''' |
1472 | + |
1473 | + with_logs = True |
1474 | |
1475 | def setUp(self): |
1476 | '''Set up.''' |
1477 | - self.paths = helpers.Paths({'cloud_dir': '/tmp'}) |
1478 | + super(TestGetCloudType, self).setUp() |
1479 | + self.tmp = self.tmp_dir() |
1480 | + self.paths = helpers.Paths({'cloud_dir': self.tmp}) |
1481 | self.dmi_data = util.read_dmi_data |
1482 | # We have a different code path for arm to deal with LP1243287 |
1483 | # We have to switch arch to x86_64 to avoid test failure |
1484 | @@ -115,6 +97,26 @@ class TestGetCloudType(CiTestCase): |
1485 | util.read_dmi_data = self.dmi_data |
1486 | force_arch() |
1487 | |
1488 | + def test_cloud_info_file_ioerror(self): |
1489 | + """Return UNKNOWN when /etc/sysconfig/cloud-info exists but errors.""" |
1490 | + self.assertEqual('/etc/sysconfig/cloud-info', dsac.CLOUD_INFO_FILE) |
1491 | + dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1492 | + # Attempting to read the directory generates IOError |
1493 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', self.tmp): |
1494 | + self.assertEqual('UNKNOWN', dsrc.get_cloud_type()) |
1495 | + self.assertIn( |
1496 | + "[Errno 21] Is a directory: '%s'" % self.tmp, |
1497 | + self.logs.getvalue()) |
1498 | + |
1499 | + def test_cloud_info_file(self): |
1500 | + """Return uppercase stripped content from /etc/sysconfig/cloud-info.""" |
1501 | + dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1502 | + cloud_info = self.tmp_path('cloud-info', dir=self.tmp) |
1503 | + util.write_file(cloud_info, ' OverRiDdeN CloudType ') |
1504 | + # Attempting to read the directory generates IOError |
1505 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', cloud_info): |
1506 | + self.assertEqual('OVERRIDDEN CLOUDTYPE', dsrc.get_cloud_type()) |
1507 | + |
1508 | def test_rhev(self): |
1509 | ''' |
1510 | Test method get_cloud_type() for RHEVm systems. |
1511 | @@ -153,60 +155,57 @@ class TestGetDataCloudInfoFile(CiTestCase): |
1512 | self.tmp = self.tmp_dir() |
1513 | self.paths = helpers.Paths( |
1514 | {'cloud_dir': self.tmp, 'run_dir': self.tmp}) |
1515 | - self.cloud_info_file = tempfile.mkstemp()[1] |
1516 | - self.dmi_data = util.read_dmi_data |
1517 | - dsac.CLOUD_INFO_FILE = self.cloud_info_file |
1518 | - |
1519 | - def tearDown(self): |
1520 | - # Reset |
1521 | - |
1522 | - # Attempt to remove the temp file ignoring errors |
1523 | - try: |
1524 | - os.remove(self.cloud_info_file) |
1525 | - except OSError: |
1526 | - pass |
1527 | - |
1528 | - util.read_dmi_data = self.dmi_data |
1529 | - dsac.CLOUD_INFO_FILE = '/etc/sysconfig/cloud-info' |
1530 | + self.cloud_info_file = self.tmp_path('cloud-info', dir=self.tmp) |
1531 | |
1532 | def test_rhev(self): |
1533 | '''Success Test module get_data() forcing RHEV.''' |
1534 | |
1535 | - _write_cloud_info_file('RHEV') |
1536 | + util.write_file(self.cloud_info_file, 'RHEV') |
1537 | dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1538 | dsrc.user_data_rhevm = lambda: True |
1539 | - self.assertEqual(True, dsrc.get_data()) |
1540 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', self.cloud_info_file): |
1541 | + self.assertEqual(True, dsrc.get_data()) |
1542 | + self.assertEqual('altcloud', dsrc.cloud_name) |
1543 | + self.assertEqual('altcloud', dsrc.platform_type) |
1544 | + self.assertEqual('rhev (/dev/fd0)', dsrc.subplatform) |
1545 | |
1546 | def test_vsphere(self): |
1547 | '''Success Test module get_data() forcing VSPHERE.''' |
1548 | |
1549 | - _write_cloud_info_file('VSPHERE') |
1550 | + util.write_file(self.cloud_info_file, 'VSPHERE') |
1551 | dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1552 | dsrc.user_data_vsphere = lambda: True |
1553 | - self.assertEqual(True, dsrc.get_data()) |
1554 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', self.cloud_info_file): |
1555 | + self.assertEqual(True, dsrc.get_data()) |
1556 | + self.assertEqual('altcloud', dsrc.cloud_name) |
1557 | + self.assertEqual('altcloud', dsrc.platform_type) |
1558 | + self.assertEqual('vsphere (unknown)', dsrc.subplatform) |
1559 | |
1560 | def test_fail_rhev(self): |
1561 | '''Failure Test module get_data() forcing RHEV.''' |
1562 | |
1563 | - _write_cloud_info_file('RHEV') |
1564 | + util.write_file(self.cloud_info_file, 'RHEV') |
1565 | dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1566 | dsrc.user_data_rhevm = lambda: False |
1567 | - self.assertEqual(False, dsrc.get_data()) |
1568 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', self.cloud_info_file): |
1569 | + self.assertEqual(False, dsrc.get_data()) |
1570 | |
1571 | def test_fail_vsphere(self): |
1572 | '''Failure Test module get_data() forcing VSPHERE.''' |
1573 | |
1574 | - _write_cloud_info_file('VSPHERE') |
1575 | + util.write_file(self.cloud_info_file, 'VSPHERE') |
1576 | dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1577 | dsrc.user_data_vsphere = lambda: False |
1578 | - self.assertEqual(False, dsrc.get_data()) |
1579 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', self.cloud_info_file): |
1580 | + self.assertEqual(False, dsrc.get_data()) |
1581 | |
1582 | def test_unrecognized(self): |
1583 | '''Failure Test module get_data() forcing unrecognized.''' |
1584 | |
1585 | - _write_cloud_info_file('unrecognized') |
1586 | + util.write_file(self.cloud_info_file, 'unrecognized') |
1587 | dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1588 | - self.assertEqual(False, dsrc.get_data()) |
1589 | + with mock.patch.object(dsac, 'CLOUD_INFO_FILE', self.cloud_info_file): |
1590 | + self.assertEqual(False, dsrc.get_data()) |
1591 | |
1592 | |
1593 | class TestGetDataNoCloudInfoFile(CiTestCase): |
1594 | @@ -322,7 +321,8 @@ class TestUserDataVsphere(CiTestCase): |
1595 | ''' |
1596 | def setUp(self): |
1597 | '''Set up.''' |
1598 | - self.paths = helpers.Paths({'cloud_dir': '/tmp'}) |
1599 | + self.tmp = self.tmp_dir() |
1600 | + self.paths = helpers.Paths({'cloud_dir': self.tmp}) |
1601 | self.mount_dir = tempfile.mkdtemp() |
1602 | |
1603 | _write_user_data_files(self.mount_dir, 'test user data') |
1604 | @@ -363,6 +363,22 @@ class TestUserDataVsphere(CiTestCase): |
1605 | self.assertEqual(1, m_find_devs_with.call_count) |
1606 | self.assertEqual(1, m_mount_cb.call_count) |
1607 | |
1608 | + @mock.patch("cloudinit.sources.DataSourceAltCloud.util.find_devs_with") |
1609 | + @mock.patch("cloudinit.sources.DataSourceAltCloud.util.mount_cb") |
1610 | + def test_user_data_vsphere_success(self, m_mount_cb, m_find_devs_with): |
1611 | + """Test user_data_vsphere() where successful.""" |
1612 | + m_find_devs_with.return_value = ["/dev/mock/cdrom"] |
1613 | + m_mount_cb.return_value = 'raw userdata from cdrom' |
1614 | + dsrc = dsac.DataSourceAltCloud({}, None, self.paths) |
1615 | + cloud_info = self.tmp_path('cloud-info', dir=self.tmp) |
1616 | + util.write_file(cloud_info, 'VSPHERE') |
1617 | + self.assertEqual(True, dsrc.user_data_vsphere()) |
1618 | + m_find_devs_with.assert_called_once_with('LABEL=CDROM') |
1619 | + m_mount_cb.assert_called_once_with( |
1620 | + '/dev/mock/cdrom', dsac.read_user_data_callback) |
1621 | + with mock.patch.object(dsrc, 'get_cloud_type', return_value='VSPHERE'): |
1622 | + self.assertEqual('vsphere (/dev/mock/cdrom)', dsrc.subplatform) |
1623 | + |
1624 | |
1625 | class TestReadUserDataCallback(CiTestCase): |
1626 | ''' |
1627 | diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py |
1628 | index 4e428b7..0f4b7bf 100644 |
1629 | --- a/tests/unittests/test_datasource/test_azure.py |
1630 | +++ b/tests/unittests/test_datasource/test_azure.py |
1631 | @@ -110,6 +110,8 @@ NETWORK_METADATA = { |
1632 | } |
1633 | } |
1634 | |
1635 | +MOCKPATH = 'cloudinit.sources.DataSourceAzure.' |
1636 | + |
1637 | |
1638 | class TestGetMetadataFromIMDS(HttprettyTestCase): |
1639 | |
1640 | @@ -119,9 +121,9 @@ class TestGetMetadataFromIMDS(HttprettyTestCase): |
1641 | super(TestGetMetadataFromIMDS, self).setUp() |
1642 | self.network_md_url = dsaz.IMDS_URL + "instance?api-version=2017-12-01" |
1643 | |
1644 | - @mock.patch('cloudinit.sources.DataSourceAzure.readurl') |
1645 | - @mock.patch('cloudinit.sources.DataSourceAzure.EphemeralDHCPv4') |
1646 | - @mock.patch('cloudinit.sources.DataSourceAzure.net.is_up') |
1647 | + @mock.patch(MOCKPATH + 'readurl') |
1648 | + @mock.patch(MOCKPATH + 'EphemeralDHCPv4') |
1649 | + @mock.patch(MOCKPATH + 'net.is_up') |
1650 | def test_get_metadata_does_not_dhcp_if_network_is_up( |
1651 | self, m_net_is_up, m_dhcp, m_readurl): |
1652 | """Do not perform DHCP setup when nic is already up.""" |
1653 | @@ -138,9 +140,9 @@ class TestGetMetadataFromIMDS(HttprettyTestCase): |
1654 | "Crawl of Azure Instance Metadata Service (IMDS) took", # log_time |
1655 | self.logs.getvalue()) |
1656 | |
1657 | - @mock.patch('cloudinit.sources.DataSourceAzure.readurl') |
1658 | - @mock.patch('cloudinit.sources.DataSourceAzure.EphemeralDHCPv4') |
1659 | - @mock.patch('cloudinit.sources.DataSourceAzure.net.is_up') |
1660 | + @mock.patch(MOCKPATH + 'readurl') |
1661 | + @mock.patch(MOCKPATH + 'EphemeralDHCPv4') |
1662 | + @mock.patch(MOCKPATH + 'net.is_up') |
1663 | def test_get_metadata_performs_dhcp_when_network_is_down( |
1664 | self, m_net_is_up, m_dhcp, m_readurl): |
1665 | """Perform DHCP setup when nic is not up.""" |
1666 | @@ -163,7 +165,7 @@ class TestGetMetadataFromIMDS(HttprettyTestCase): |
1667 | headers={'Metadata': 'true'}, retries=2, timeout=1) |
1668 | |
1669 | @mock.patch('cloudinit.url_helper.time.sleep') |
1670 | - @mock.patch('cloudinit.sources.DataSourceAzure.net.is_up') |
1671 | + @mock.patch(MOCKPATH + 'net.is_up') |
1672 | def test_get_metadata_from_imds_empty_when_no_imds_present( |
1673 | self, m_net_is_up, m_sleep): |
1674 | """Return empty dict when IMDS network metadata is absent.""" |
1675 | @@ -380,7 +382,7 @@ fdescfs /dev/fd fdescfs rw 0 0 |
1676 | res = get_path_dev_freebsd('/etc', mnt_list) |
1677 | self.assertIsNotNone(res) |
1678 | |
1679 | - @mock.patch('cloudinit.sources.DataSourceAzure._is_platform_viable') |
1680 | + @mock.patch(MOCKPATH + '_is_platform_viable') |
1681 | def test_call_is_platform_viable_seed(self, m_is_platform_viable): |
1682 | """Check seed_dir using _is_platform_viable and return False.""" |
1683 | # Return a non-matching asset tag value |
1684 | @@ -401,6 +403,24 @@ fdescfs /dev/fd fdescfs rw 0 0 |
1685 | self.assertEqual(dsrc.metadata['local-hostname'], odata['HostName']) |
1686 | self.assertTrue(os.path.isfile( |
1687 | os.path.join(self.waagent_d, 'ovf-env.xml'))) |
1688 | + self.assertEqual('azure', dsrc.cloud_name) |
1689 | + self.assertEqual('azure', dsrc.platform_type) |
1690 | + self.assertEqual( |
1691 | + 'seed-dir (%s/seed/azure)' % self.tmp, dsrc.subplatform) |
1692 | + |
1693 | + def test_basic_dev_file(self): |
1694 | + """When a device path is used, present that in subplatform.""" |
1695 | + data = {'sys_cfg': {}, 'dsdevs': ['/dev/cd0']} |
1696 | + dsrc = self._get_ds(data) |
1697 | + with mock.patch(MOCKPATH + 'util.mount_cb') as m_mount_cb: |
1698 | + m_mount_cb.return_value = ( |
1699 | + {'local-hostname': 'me'}, 'ud', {'cfg': ''}, {}) |
1700 | + self.assertTrue(dsrc.get_data()) |
1701 | + self.assertEqual(dsrc.userdata_raw, 'ud') |
1702 | + self.assertEqual(dsrc.metadata['local-hostname'], 'me') |
1703 | + self.assertEqual('azure', dsrc.cloud_name) |
1704 | + self.assertEqual('azure', dsrc.platform_type) |
1705 | + self.assertEqual('config-disk (/dev/cd0)', dsrc.subplatform) |
1706 | |
1707 | def test_get_data_non_ubuntu_will_not_remove_network_scripts(self): |
1708 | """get_data on non-Ubuntu will not remove ubuntu net scripts.""" |
1709 | @@ -769,8 +789,8 @@ fdescfs /dev/fd fdescfs rw 0 0 |
1710 | ds.get_data() |
1711 | self.assertEqual(self.instance_id, ds.metadata['instance-id']) |
1712 | |
1713 | - @mock.patch("cloudinit.sources.DataSourceAzure.util.is_FreeBSD") |
1714 | - @mock.patch("cloudinit.sources.DataSourceAzure._check_freebsd_cdrom") |
1715 | + @mock.patch(MOCKPATH + 'util.is_FreeBSD') |
1716 | + @mock.patch(MOCKPATH + '_check_freebsd_cdrom') |
1717 | def test_list_possible_azure_ds_devs(self, m_check_fbsd_cdrom, |
1718 | m_is_FreeBSD): |
1719 | """On FreeBSD, possible devs should show /dev/cd0.""" |
1720 | @@ -885,17 +905,17 @@ fdescfs /dev/fd fdescfs rw 0 0 |
1721 | expected_config['config'].append(blacklist_config) |
1722 | self.assertEqual(netconfig, expected_config) |
1723 | |
1724 | - @mock.patch("cloudinit.sources.DataSourceAzure.util.subp") |
1725 | + @mock.patch(MOCKPATH + 'util.subp') |
1726 | def test_get_hostname_with_no_args(self, subp): |
1727 | dsaz.get_hostname() |
1728 | subp.assert_called_once_with(("hostname",), capture=True) |
1729 | |
1730 | - @mock.patch("cloudinit.sources.DataSourceAzure.util.subp") |
1731 | + @mock.patch(MOCKPATH + 'util.subp') |
1732 | def test_get_hostname_with_string_arg(self, subp): |
1733 | dsaz.get_hostname(hostname_command="hostname") |
1734 | subp.assert_called_once_with(("hostname",), capture=True) |
1735 | |
1736 | - @mock.patch("cloudinit.sources.DataSourceAzure.util.subp") |
1737 | + @mock.patch(MOCKPATH + 'util.subp') |
1738 | def test_get_hostname_with_iterable_arg(self, subp): |
1739 | dsaz.get_hostname(hostname_command=("hostname",)) |
1740 | subp.assert_called_once_with(("hostname",), capture=True) |
1741 | @@ -949,7 +969,7 @@ class TestAzureBounce(CiTestCase): |
1742 | self.set_hostname = self.patches.enter_context( |
1743 | mock.patch.object(dsaz, 'set_hostname')) |
1744 | self.subp = self.patches.enter_context( |
1745 | - mock.patch('cloudinit.sources.DataSourceAzure.util.subp')) |
1746 | + mock.patch(MOCKPATH + 'util.subp')) |
1747 | self.find_fallback_nic = self.patches.enter_context( |
1748 | mock.patch('cloudinit.net.find_fallback_nic', return_value='eth9')) |
1749 | |
1750 | @@ -989,7 +1009,7 @@ class TestAzureBounce(CiTestCase): |
1751 | ds.get_data() |
1752 | self.assertEqual(0, self.set_hostname.call_count) |
1753 | |
1754 | - @mock.patch('cloudinit.sources.DataSourceAzure.perform_hostname_bounce') |
1755 | + @mock.patch(MOCKPATH + 'perform_hostname_bounce') |
1756 | def test_disabled_bounce_does_not_perform_bounce( |
1757 | self, perform_hostname_bounce): |
1758 | cfg = {'hostname_bounce': {'policy': 'off'}} |
1759 | @@ -1005,7 +1025,7 @@ class TestAzureBounce(CiTestCase): |
1760 | ds.get_data() |
1761 | self.assertEqual(0, self.set_hostname.call_count) |
1762 | |
1763 | - @mock.patch('cloudinit.sources.DataSourceAzure.perform_hostname_bounce') |
1764 | + @mock.patch(MOCKPATH + 'perform_hostname_bounce') |
1765 | def test_unchanged_hostname_does_not_perform_bounce( |
1766 | self, perform_hostname_bounce): |
1767 | host_name = 'unchanged-host-name' |
1768 | @@ -1015,7 +1035,7 @@ class TestAzureBounce(CiTestCase): |
1769 | ds.get_data() |
1770 | self.assertEqual(0, perform_hostname_bounce.call_count) |
1771 | |
1772 | - @mock.patch('cloudinit.sources.DataSourceAzure.perform_hostname_bounce') |
1773 | + @mock.patch(MOCKPATH + 'perform_hostname_bounce') |
1774 | def test_force_performs_bounce_regardless(self, perform_hostname_bounce): |
1775 | host_name = 'unchanged-host-name' |
1776 | self.get_hostname.return_value = host_name |
1777 | @@ -1032,7 +1052,7 @@ class TestAzureBounce(CiTestCase): |
1778 | cfg = {'hostname_bounce': {'policy': 'force'}} |
1779 | dsrc = self._get_ds(self.get_ovf_env_with_dscfg(host_name, cfg), |
1780 | agent_command=['not', '__builtin__']) |
1781 | - patch_path = 'cloudinit.sources.DataSourceAzure.util.which' |
1782 | + patch_path = MOCKPATH + 'util.which' |
1783 | with mock.patch(patch_path) as m_which: |
1784 | m_which.return_value = None |
1785 | ret = self._get_and_setup(dsrc) |
1786 | @@ -1053,7 +1073,7 @@ class TestAzureBounce(CiTestCase): |
1787 | self.assertEqual(expected_hostname, |
1788 | self.set_hostname.call_args_list[0][0][0]) |
1789 | |
1790 | - @mock.patch('cloudinit.sources.DataSourceAzure.perform_hostname_bounce') |
1791 | + @mock.patch(MOCKPATH + 'perform_hostname_bounce') |
1792 | def test_different_hostnames_performs_bounce( |
1793 | self, perform_hostname_bounce): |
1794 | expected_hostname = 'azure-expected-host-name' |
1795 | @@ -1076,7 +1096,7 @@ class TestAzureBounce(CiTestCase): |
1796 | self.assertEqual(initial_host_name, |
1797 | self.set_hostname.call_args_list[-1][0][0]) |
1798 | |
1799 | - @mock.patch('cloudinit.sources.DataSourceAzure.perform_hostname_bounce') |
1800 | + @mock.patch(MOCKPATH + 'perform_hostname_bounce') |
1801 | def test_failure_in_bounce_still_resets_host_name( |
1802 | self, perform_hostname_bounce): |
1803 | perform_hostname_bounce.side_effect = Exception |
1804 | @@ -1117,7 +1137,7 @@ class TestAzureBounce(CiTestCase): |
1805 | self.assertEqual( |
1806 | dsaz.BOUNCE_COMMAND_IFUP, bounce_args) |
1807 | |
1808 | - @mock.patch('cloudinit.sources.DataSourceAzure.perform_hostname_bounce') |
1809 | + @mock.patch(MOCKPATH + 'perform_hostname_bounce') |
1810 | def test_set_hostname_option_can_disable_bounce( |
1811 | self, perform_hostname_bounce): |
1812 | cfg = {'set_hostname': False, 'hostname_bounce': {'policy': 'force'}} |
1813 | @@ -1218,12 +1238,12 @@ class TestCanDevBeReformatted(CiTestCase): |
1814 | def has_ntfs_fs(device): |
1815 | return bypath.get(device, {}).get('fs') == 'ntfs' |
1816 | |
1817 | - p = 'cloudinit.sources.DataSourceAzure' |
1818 | - self._domock(p + "._partitions_on_device", 'm_partitions_on_device') |
1819 | - self._domock(p + "._has_ntfs_filesystem", 'm_has_ntfs_filesystem') |
1820 | - self._domock(p + ".util.mount_cb", 'm_mount_cb') |
1821 | - self._domock(p + ".os.path.realpath", 'm_realpath') |
1822 | - self._domock(p + ".os.path.exists", 'm_exists') |
1823 | + p = MOCKPATH |
1824 | + self._domock(p + "_partitions_on_device", 'm_partitions_on_device') |
1825 | + self._domock(p + "_has_ntfs_filesystem", 'm_has_ntfs_filesystem') |
1826 | + self._domock(p + "util.mount_cb", 'm_mount_cb') |
1827 | + self._domock(p + "os.path.realpath", 'm_realpath') |
1828 | + self._domock(p + "os.path.exists", 'm_exists') |
1829 | |
1830 | self.m_exists.side_effect = lambda p: p in bypath |
1831 | self.m_realpath.side_effect = realpath |
1832 | @@ -1488,7 +1508,7 @@ class TestPreprovisioningShouldReprovision(CiTestCase): |
1833 | self.paths = helpers.Paths({'cloud_dir': tmp}) |
1834 | dsaz.BUILTIN_DS_CONFIG['data_dir'] = self.waagent_d |
1835 | |
1836 | - @mock.patch('cloudinit.sources.DataSourceAzure.util.write_file') |
1837 | + @mock.patch(MOCKPATH + 'util.write_file') |
1838 | def test__should_reprovision_with_true_cfg(self, isfile, write_f): |
1839 | """The _should_reprovision method should return true with config |
1840 | flag present.""" |
1841 | @@ -1512,7 +1532,7 @@ class TestPreprovisioningShouldReprovision(CiTestCase): |
1842 | dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths) |
1843 | self.assertFalse(dsa._should_reprovision((None, None, {}, None))) |
1844 | |
1845 | - @mock.patch('cloudinit.sources.DataSourceAzure.DataSourceAzure._poll_imds') |
1846 | + @mock.patch(MOCKPATH + 'DataSourceAzure._poll_imds') |
1847 | def test_reprovision_calls__poll_imds(self, _poll_imds, isfile): |
1848 | """_reprovision will poll IMDS.""" |
1849 | isfile.return_value = False |
1850 | @@ -1528,8 +1548,7 @@ class TestPreprovisioningShouldReprovision(CiTestCase): |
1851 | @mock.patch('cloudinit.net.dhcp.EphemeralIPv4Network') |
1852 | @mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery') |
1853 | @mock.patch('requests.Session.request') |
1854 | -@mock.patch( |
1855 | - 'cloudinit.sources.DataSourceAzure.DataSourceAzure._report_ready') |
1856 | +@mock.patch(MOCKPATH + 'DataSourceAzure._report_ready') |
1857 | class TestPreprovisioningPollIMDS(CiTestCase): |
1858 | |
1859 | def setUp(self): |
1860 | @@ -1539,7 +1558,7 @@ class TestPreprovisioningPollIMDS(CiTestCase): |
1861 | self.paths = helpers.Paths({'cloud_dir': self.tmp}) |
1862 | dsaz.BUILTIN_DS_CONFIG['data_dir'] = self.waagent_d |
1863 | |
1864 | - @mock.patch('cloudinit.sources.DataSourceAzure.util.write_file') |
1865 | + @mock.patch(MOCKPATH + 'util.write_file') |
1866 | def test_poll_imds_calls_report_ready(self, write_f, report_ready_func, |
1867 | fake_resp, m_dhcp, m_net): |
1868 | """The poll_imds will call report_ready after creating marker file.""" |
1869 | @@ -1550,8 +1569,7 @@ class TestPreprovisioningPollIMDS(CiTestCase): |
1870 | 'unknown-245': '624c3620'} |
1871 | m_dhcp.return_value = [lease] |
1872 | dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths) |
1873 | - mock_path = ( |
1874 | - 'cloudinit.sources.DataSourceAzure.REPORTED_READY_MARKER_FILE') |
1875 | + mock_path = (MOCKPATH + 'REPORTED_READY_MARKER_FILE') |
1876 | with mock.patch(mock_path, report_marker): |
1877 | dsa._poll_imds() |
1878 | self.assertEqual(report_ready_func.call_count, 1) |
1879 | @@ -1561,23 +1579,21 @@ class TestPreprovisioningPollIMDS(CiTestCase): |
1880 | fake_resp, m_dhcp, m_net): |
1881 | """The poll_imds should not call reporting ready |
1882 | when flag is false""" |
1883 | - report_marker = self.tmp_path('report_marker', self.tmp) |
1884 | - write_file(report_marker, content='dont run report_ready :)') |
1885 | + report_file = self.tmp_path('report_marker', self.tmp) |
1886 | + write_file(report_file, content='dont run report_ready :)') |
1887 | m_dhcp.return_value = [{ |
1888 | 'interface': 'eth9', 'fixed-address': '192.168.2.9', |
1889 | 'routers': '192.168.2.1', 'subnet-mask': '255.255.255.0', |
1890 | 'unknown-245': '624c3620'}] |
1891 | dsa = dsaz.DataSourceAzure({}, distro=None, paths=self.paths) |
1892 | - mock_path = ( |
1893 | - 'cloudinit.sources.DataSourceAzure.REPORTED_READY_MARKER_FILE') |
1894 | - with mock.patch(mock_path, report_marker): |
1895 | + with mock.patch(MOCKPATH + 'REPORTED_READY_MARKER_FILE', report_file): |
1896 | dsa._poll_imds() |
1897 | self.assertEqual(report_ready_func.call_count, 0) |
1898 | |
1899 | |
1900 | -@mock.patch('cloudinit.sources.DataSourceAzure.util.subp') |
1901 | -@mock.patch('cloudinit.sources.DataSourceAzure.util.write_file') |
1902 | -@mock.patch('cloudinit.sources.DataSourceAzure.util.is_FreeBSD') |
1903 | +@mock.patch(MOCKPATH + 'util.subp') |
1904 | +@mock.patch(MOCKPATH + 'util.write_file') |
1905 | +@mock.patch(MOCKPATH + 'util.is_FreeBSD') |
1906 | @mock.patch('cloudinit.net.dhcp.EphemeralIPv4Network') |
1907 | @mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery') |
1908 | @mock.patch('requests.Session.request') |
1909 | @@ -1688,7 +1704,7 @@ class TestRemoveUbuntuNetworkConfigScripts(CiTestCase): |
1910 | self.tmp_path('notfilehere', dir=self.tmp)]) |
1911 | self.assertNotIn('/not/a', self.logs.getvalue()) # No delete logs |
1912 | |
1913 | - @mock.patch('cloudinit.sources.DataSourceAzure.os.path.exists') |
1914 | + @mock.patch(MOCKPATH + 'os.path.exists') |
1915 | def test_remove_network_scripts_default_removes_stock_scripts(self, |
1916 | m_exists): |
1917 | """Azure's stock ubuntu image scripts and artifacts are removed.""" |
1918 | @@ -1704,14 +1720,14 @@ class TestWBIsPlatformViable(CiTestCase): |
1919 | """White box tests for _is_platform_viable.""" |
1920 | with_logs = True |
1921 | |
1922 | - @mock.patch('cloudinit.sources.DataSourceAzure.util.read_dmi_data') |
1923 | + @mock.patch(MOCKPATH + 'util.read_dmi_data') |
1924 | def test_true_on_non_azure_chassis(self, m_read_dmi_data): |
1925 | """Return True if DMI chassis-asset-tag is AZURE_CHASSIS_ASSET_TAG.""" |
1926 | m_read_dmi_data.return_value = dsaz.AZURE_CHASSIS_ASSET_TAG |
1927 | self.assertTrue(dsaz._is_platform_viable('doesnotmatter')) |
1928 | |
1929 | - @mock.patch('cloudinit.sources.DataSourceAzure.os.path.exists') |
1930 | - @mock.patch('cloudinit.sources.DataSourceAzure.util.read_dmi_data') |
1931 | + @mock.patch(MOCKPATH + 'os.path.exists') |
1932 | + @mock.patch(MOCKPATH + 'util.read_dmi_data') |
1933 | def test_true_on_azure_ovf_env_in_seed_dir(self, m_read_dmi_data, m_exist): |
1934 | """Return True if ovf-env.xml exists in known seed dirs.""" |
1935 | # Non-matching Azure chassis-asset-tag |
1936 | @@ -1729,7 +1745,7 @@ class TestWBIsPlatformViable(CiTestCase): |
1937 | and no devices have a label starting with prefix 'rd_rdfe_'. |
1938 | """ |
1939 | self.assertFalse(wrap_and_call( |
1940 | - 'cloudinit.sources.DataSourceAzure', |
1941 | + MOCKPATH, |
1942 | {'os.path.exists': False, |
1943 | # Non-matching Azure chassis-asset-tag |
1944 | 'util.read_dmi_data': dsaz.AZURE_CHASSIS_ASSET_TAG + 'X', |
1945 | diff --git a/tests/unittests/test_datasource/test_cloudsigma.py b/tests/unittests/test_datasource/test_cloudsigma.py |
1946 | index 380ad1b..3bf52e6 100644 |
1947 | --- a/tests/unittests/test_datasource/test_cloudsigma.py |
1948 | +++ b/tests/unittests/test_datasource/test_cloudsigma.py |
1949 | @@ -68,6 +68,12 @@ class DataSourceCloudSigmaTest(test_helpers.CiTestCase): |
1950 | self.assertEqual(SERVER_CONTEXT['uuid'], |
1951 | self.datasource.get_instance_id()) |
1952 | |
1953 | + def test_platform(self): |
1954 | + """All platform-related attributes are set.""" |
1955 | + self.assertEqual(self.datasource.cloud_name, 'cloudsigma') |
1956 | + self.assertEqual(self.datasource.platform_type, 'cloudsigma') |
1957 | + self.assertEqual(self.datasource.subplatform, 'cepko (/dev/ttyS1)') |
1958 | + |
1959 | def test_metadata(self): |
1960 | self.assertEqual(self.datasource.metadata, SERVER_CONTEXT) |
1961 | |
1962 | diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py |
1963 | index 231619c..dcdabea 100644 |
1964 | --- a/tests/unittests/test_datasource/test_configdrive.py |
1965 | +++ b/tests/unittests/test_datasource/test_configdrive.py |
1966 | @@ -478,6 +478,9 @@ class TestConfigDriveDataSource(CiTestCase): |
1967 | myds = cfg_ds_from_dir(self.tmp, files=CFG_DRIVE_FILES_V2) |
1968 | self.assertEqual(myds.get_public_ssh_keys(), |
1969 | [OSTACK_META['public_keys']['mykey']]) |
1970 | + self.assertEqual('configdrive', myds.cloud_name) |
1971 | + self.assertEqual('openstack', myds.platform) |
1972 | + self.assertEqual('seed-dir (%s/seed)' % self.tmp, myds.subplatform) |
1973 | |
1974 | |
1975 | class TestNetJson(CiTestCase): |
1976 | diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py |
1977 | index 497e761..9f81255 100644 |
1978 | --- a/tests/unittests/test_datasource/test_ec2.py |
1979 | +++ b/tests/unittests/test_datasource/test_ec2.py |
1980 | @@ -351,7 +351,9 @@ class TestEc2(test_helpers.HttprettyTestCase): |
1981 | m_get_interface_mac.return_value = mac1 |
1982 | nc = ds.network_config # Will re-crawl network metadata |
1983 | self.assertIsNotNone(nc) |
1984 | - self.assertIn('Re-crawl of metadata service', self.logs.getvalue()) |
1985 | + self.assertIn( |
1986 | + 'Refreshing stale metadata from prior to upgrade', |
1987 | + self.logs.getvalue()) |
1988 | expected = {'version': 1, 'config': [ |
1989 | {'mac_address': '06:17:04:d7:26:09', |
1990 | 'name': 'eth9', |
1991 | @@ -386,7 +388,7 @@ class TestEc2(test_helpers.HttprettyTestCase): |
1992 | register_mock_metaserver( |
1993 | '{0}/{1}/dynamic/'.format(ds.metadata_address, all_versions[-1]), |
1994 | DYNAMIC_METADATA) |
1995 | - ds._cloud_platform = ec2.Platforms.AWS |
1996 | + ds._cloud_name = ec2.CloudNames.AWS |
1997 | # Setup cached metadata on the Datasource |
1998 | ds.metadata = DEFAULT_METADATA |
1999 | self.assertEqual('my-identity-id', ds.get_instance_id()) |
2000 | @@ -401,6 +403,9 @@ class TestEc2(test_helpers.HttprettyTestCase): |
2001 | ret = ds.get_data() |
2002 | self.assertTrue(ret) |
2003 | self.assertEqual(0, m_dhcp.call_count) |
2004 | + self.assertEqual('aws', ds.cloud_name) |
2005 | + self.assertEqual('ec2', ds.platform_type) |
2006 | + self.assertEqual('metadata (%s)' % ds.metadata_address, ds.subplatform) |
2007 | |
2008 | def test_valid_platform_with_strict_false(self): |
2009 | """Valid platform data should return true with strict_id false.""" |
2010 | @@ -439,16 +444,17 @@ class TestEc2(test_helpers.HttprettyTestCase): |
2011 | sys_cfg={'datasource': {'Ec2': {'strict_id': False}}}, |
2012 | md=DEFAULT_METADATA) |
2013 | platform_attrs = [ |
2014 | - attr for attr in ec2.Platforms.__dict__.keys() |
2015 | + attr for attr in ec2.CloudNames.__dict__.keys() |
2016 | if not attr.startswith('__')] |
2017 | for attr_name in platform_attrs: |
2018 | - platform_name = getattr(ec2.Platforms, attr_name) |
2019 | - if platform_name != 'AWS': |
2020 | - ds._cloud_platform = platform_name |
2021 | + platform_name = getattr(ec2.CloudNames, attr_name) |
2022 | + if platform_name != 'aws': |
2023 | + ds._cloud_name = platform_name |
2024 | ret = ds.get_data() |
2025 | + self.assertEqual('ec2', ds.platform_type) |
2026 | self.assertFalse(ret) |
2027 | message = ( |
2028 | - "Local Ec2 mode only supported on ('AWS',)," |
2029 | + "Local Ec2 mode only supported on ('aws',)," |
2030 | ' not {0}'.format(platform_name)) |
2031 | self.assertIn(message, self.logs.getvalue()) |
2032 | |
2033 | diff --git a/tests/unittests/test_datasource/test_ibmcloud.py b/tests/unittests/test_datasource/test_ibmcloud.py |
2034 | index e639ae4..0b54f58 100644 |
2035 | --- a/tests/unittests/test_datasource/test_ibmcloud.py |
2036 | +++ b/tests/unittests/test_datasource/test_ibmcloud.py |
2037 | @@ -1,14 +1,17 @@ |
2038 | # This file is part of cloud-init. See LICENSE file for license information. |
2039 | |
2040 | +from cloudinit.helpers import Paths |
2041 | from cloudinit.sources import DataSourceIBMCloud as ibm |
2042 | from cloudinit.tests import helpers as test_helpers |
2043 | +from cloudinit import util |
2044 | |
2045 | import base64 |
2046 | import copy |
2047 | import json |
2048 | -import mock |
2049 | from textwrap import dedent |
2050 | |
2051 | +mock = test_helpers.mock |
2052 | + |
2053 | D_PATH = "cloudinit.sources.DataSourceIBMCloud." |
2054 | |
2055 | |
2056 | @@ -309,4 +312,39 @@ class TestIsIBMProvisioning(test_helpers.FilesystemMockingTestCase): |
2057 | self.assertIn("no reference file", self.logs.getvalue()) |
2058 | |
2059 | |
2060 | +class TestDataSourceIBMCloud(test_helpers.CiTestCase): |
2061 | + |
2062 | + def setUp(self): |
2063 | + super(TestDataSourceIBMCloud, self).setUp() |
2064 | + self.tmp = self.tmp_dir() |
2065 | + self.cloud_dir = self.tmp_path('cloud', dir=self.tmp) |
2066 | + util.ensure_dir(self.cloud_dir) |
2067 | + paths = Paths({'run_dir': self.tmp, 'cloud_dir': self.cloud_dir}) |
2068 | + self.ds = ibm.DataSourceIBMCloud( |
2069 | + sys_cfg={}, distro=None, paths=paths) |
2070 | + |
2071 | + def test_get_data_false(self): |
2072 | + """When read_md returns None, get_data returns False.""" |
2073 | + with mock.patch(D_PATH + 'read_md', return_value=None): |
2074 | + self.assertFalse(self.ds.get_data()) |
2075 | + |
2076 | + def test_get_data_processes_read_md(self): |
2077 | + """get_data processes and caches content returned by read_md.""" |
2078 | + md = { |
2079 | + 'metadata': {}, 'networkdata': 'net', 'platform': 'plat', |
2080 | + 'source': 'src', 'system-uuid': 'uuid', 'userdata': 'ud', |
2081 | + 'vendordata': 'vd'} |
2082 | + with mock.patch(D_PATH + 'read_md', return_value=md): |
2083 | + self.assertTrue(self.ds.get_data()) |
2084 | + self.assertEqual('src', self.ds.source) |
2085 | + self.assertEqual('plat', self.ds.platform) |
2086 | + self.assertEqual({}, self.ds.metadata) |
2087 | + self.assertEqual('ud', self.ds.userdata_raw) |
2088 | + self.assertEqual('net', self.ds.network_json) |
2089 | + self.assertEqual('vd', self.ds.vendordata_pure) |
2090 | + self.assertEqual('uuid', self.ds.system_uuid) |
2091 | + self.assertEqual('ibmcloud', self.ds.cloud_name) |
2092 | + self.assertEqual('ibmcloud', self.ds.platform_type) |
2093 | + self.assertEqual('plat (src)', self.ds.subplatform) |
2094 | + |
2095 | # vi: ts=4 expandtab |
2096 | diff --git a/tests/unittests/test_datasource/test_nocloud.py b/tests/unittests/test_datasource/test_nocloud.py |
2097 | index 21931eb..b6468b6 100644 |
2098 | --- a/tests/unittests/test_datasource/test_nocloud.py |
2099 | +++ b/tests/unittests/test_datasource/test_nocloud.py |
2100 | @@ -10,6 +10,7 @@ import textwrap |
2101 | import yaml |
2102 | |
2103 | |
2104 | +@mock.patch('cloudinit.sources.DataSourceNoCloud.util.is_lxd') |
2105 | class TestNoCloudDataSource(CiTestCase): |
2106 | |
2107 | def setUp(self): |
2108 | @@ -28,10 +29,11 @@ class TestNoCloudDataSource(CiTestCase): |
2109 | self.mocks.enter_context( |
2110 | mock.patch.object(util, 'read_dmi_data', return_value=None)) |
2111 | |
2112 | - def test_nocloud_seed_dir(self): |
2113 | + def test_nocloud_seed_dir_on_lxd(self, m_is_lxd): |
2114 | md = {'instance-id': 'IID', 'dsmode': 'local'} |
2115 | ud = b"USER_DATA_HERE" |
2116 | - populate_dir(os.path.join(self.paths.seed_dir, "nocloud"), |
2117 | + seed_dir = os.path.join(self.paths.seed_dir, "nocloud") |
2118 | + populate_dir(seed_dir, |
2119 | {'user-data': ud, 'meta-data': yaml.safe_dump(md)}) |
2120 | |
2121 | sys_cfg = { |
2122 | @@ -44,9 +46,32 @@ class TestNoCloudDataSource(CiTestCase): |
2123 | ret = dsrc.get_data() |
2124 | self.assertEqual(dsrc.userdata_raw, ud) |
2125 | self.assertEqual(dsrc.metadata, md) |
2126 | + self.assertEqual(dsrc.platform_type, 'lxd') |
2127 | + self.assertEqual( |
2128 | + dsrc.subplatform, 'seed-dir (%s)' % seed_dir) |
2129 | self.assertTrue(ret) |
2130 | |
2131 | - def test_fs_label(self): |
2132 | + def test_nocloud_seed_dir_non_lxd_platform_is_nocloud(self, m_is_lxd): |
2133 | + """Non-lxd environments will list nocloud as the platform.""" |
2134 | + m_is_lxd.return_value = False |
2135 | + md = {'instance-id': 'IID', 'dsmode': 'local'} |
2136 | + seed_dir = os.path.join(self.paths.seed_dir, "nocloud") |
2137 | + populate_dir(seed_dir, |
2138 | + {'user-data': '', 'meta-data': yaml.safe_dump(md)}) |
2139 | + |
2140 | + sys_cfg = { |
2141 | + 'datasource': {'NoCloud': {'fs_label': None}} |
2142 | + } |
2143 | + |
2144 | + ds = DataSourceNoCloud.DataSourceNoCloud |
2145 | + |
2146 | + dsrc = ds(sys_cfg=sys_cfg, distro=None, paths=self.paths) |
2147 | + self.assertTrue(dsrc.get_data()) |
2148 | + self.assertEqual(dsrc.platform_type, 'nocloud') |
2149 | + self.assertEqual( |
2150 | + dsrc.subplatform, 'seed-dir (%s)' % seed_dir) |
2151 | + |
2152 | + def test_fs_label(self, m_is_lxd): |
2153 | # find_devs_with should not be called ff fs_label is None |
2154 | ds = DataSourceNoCloud.DataSourceNoCloud |
2155 | |
2156 | @@ -68,7 +93,7 @@ class TestNoCloudDataSource(CiTestCase): |
2157 | ret = dsrc.get_data() |
2158 | self.assertFalse(ret) |
2159 | |
2160 | - def test_no_datasource_expected(self): |
2161 | + def test_no_datasource_expected(self, m_is_lxd): |
2162 | # no source should be found if no cmdline, config, and fs_label=None |
2163 | sys_cfg = {'datasource': {'NoCloud': {'fs_label': None}}} |
2164 | |
2165 | @@ -76,7 +101,7 @@ class TestNoCloudDataSource(CiTestCase): |
2166 | dsrc = ds(sys_cfg=sys_cfg, distro=None, paths=self.paths) |
2167 | self.assertFalse(dsrc.get_data()) |
2168 | |
2169 | - def test_seed_in_config(self): |
2170 | + def test_seed_in_config(self, m_is_lxd): |
2171 | ds = DataSourceNoCloud.DataSourceNoCloud |
2172 | |
2173 | data = { |
2174 | @@ -92,7 +117,7 @@ class TestNoCloudDataSource(CiTestCase): |
2175 | self.assertEqual(dsrc.metadata.get('instance-id'), 'IID') |
2176 | self.assertTrue(ret) |
2177 | |
2178 | - def test_nocloud_seed_with_vendordata(self): |
2179 | + def test_nocloud_seed_with_vendordata(self, m_is_lxd): |
2180 | md = {'instance-id': 'IID', 'dsmode': 'local'} |
2181 | ud = b"USER_DATA_HERE" |
2182 | vd = b"THIS IS MY VENDOR_DATA" |
2183 | @@ -114,7 +139,7 @@ class TestNoCloudDataSource(CiTestCase): |
2184 | self.assertEqual(dsrc.vendordata_raw, vd) |
2185 | self.assertTrue(ret) |
2186 | |
2187 | - def test_nocloud_no_vendordata(self): |
2188 | + def test_nocloud_no_vendordata(self, m_is_lxd): |
2189 | populate_dir(os.path.join(self.paths.seed_dir, "nocloud"), |
2190 | {'user-data': b"ud", 'meta-data': "instance-id: IID\n"}) |
2191 | |
2192 | @@ -128,7 +153,7 @@ class TestNoCloudDataSource(CiTestCase): |
2193 | self.assertFalse(dsrc.vendordata) |
2194 | self.assertTrue(ret) |
2195 | |
2196 | - def test_metadata_network_interfaces(self): |
2197 | + def test_metadata_network_interfaces(self, m_is_lxd): |
2198 | gateway = "103.225.10.1" |
2199 | md = { |
2200 | 'instance-id': 'i-abcd', |
2201 | @@ -157,7 +182,7 @@ class TestNoCloudDataSource(CiTestCase): |
2202 | # very simple check just for the strings above |
2203 | self.assertIn(gateway, str(dsrc.network_config)) |
2204 | |
2205 | - def test_metadata_network_config(self): |
2206 | + def test_metadata_network_config(self, m_is_lxd): |
2207 | # network-config needs to get into network_config |
2208 | netconf = {'version': 1, |
2209 | 'config': [{'type': 'physical', 'name': 'interface0', |
2210 | @@ -177,7 +202,7 @@ class TestNoCloudDataSource(CiTestCase): |
2211 | self.assertTrue(ret) |
2212 | self.assertEqual(netconf, dsrc.network_config) |
2213 | |
2214 | - def test_metadata_network_config_over_interfaces(self): |
2215 | + def test_metadata_network_config_over_interfaces(self, m_is_lxd): |
2216 | # network-config should override meta-data/network-interfaces |
2217 | gateway = "103.225.10.1" |
2218 | md = { |
2219 | diff --git a/tests/unittests/test_datasource/test_opennebula.py b/tests/unittests/test_datasource/test_opennebula.py |
2220 | index 6159101..bb399f6 100644 |
2221 | --- a/tests/unittests/test_datasource/test_opennebula.py |
2222 | +++ b/tests/unittests/test_datasource/test_opennebula.py |
2223 | @@ -123,6 +123,10 @@ class TestOpenNebulaDataSource(CiTestCase): |
2224 | self.assertTrue(ret) |
2225 | finally: |
2226 | util.find_devs_with = orig_find_devs_with |
2227 | + self.assertEqual('opennebula', dsrc.cloud_name) |
2228 | + self.assertEqual('opennebula', dsrc.platform_type) |
2229 | + self.assertEqual( |
2230 | + 'seed-dir (%s/seed/opennebula)' % self.tmp, dsrc.subplatform) |
2231 | |
2232 | def test_seed_dir_non_contextdisk(self): |
2233 | self.assertRaises(ds.NonContextDiskDir, ds.read_context_disk_dir, |
2234 | diff --git a/tests/unittests/test_datasource/test_ovf.py b/tests/unittests/test_datasource/test_ovf.py |
2235 | index 9d52eb9..a226c03 100644 |
2236 | --- a/tests/unittests/test_datasource/test_ovf.py |
2237 | +++ b/tests/unittests/test_datasource/test_ovf.py |
2238 | @@ -11,7 +11,7 @@ from collections import OrderedDict |
2239 | from textwrap import dedent |
2240 | |
2241 | from cloudinit import util |
2242 | -from cloudinit.tests.helpers import CiTestCase, wrap_and_call |
2243 | +from cloudinit.tests.helpers import CiTestCase, mock, wrap_and_call |
2244 | from cloudinit.helpers import Paths |
2245 | from cloudinit.sources import DataSourceOVF as dsovf |
2246 | from cloudinit.sources.helpers.vmware.imc.config_custom_script import ( |
2247 | @@ -120,7 +120,7 @@ class TestDatasourceOVF(CiTestCase): |
2248 | |
2249 | def test_get_data_false_on_none_dmi_data(self): |
2250 | """When dmi for system-product-name is None, get_data returns False.""" |
2251 | - paths = Paths({'seed_dir': self.tdir}) |
2252 | + paths = Paths({'cloud_dir': self.tdir}) |
2253 | ds = self.datasource(sys_cfg={}, distro={}, paths=paths) |
2254 | retcode = wrap_and_call( |
2255 | 'cloudinit.sources.DataSourceOVF', |
2256 | @@ -134,7 +134,7 @@ class TestDatasourceOVF(CiTestCase): |
2257 | |
2258 | def test_get_data_no_vmware_customization_disabled(self): |
2259 | """When vmware customization is disabled via sys_cfg log a message.""" |
2260 | - paths = Paths({'seed_dir': self.tdir}) |
2261 | + paths = Paths({'cloud_dir': self.tdir}) |
2262 | ds = self.datasource( |
2263 | sys_cfg={'disable_vmware_customization': True}, distro={}, |
2264 | paths=paths) |
2265 | @@ -153,7 +153,7 @@ class TestDatasourceOVF(CiTestCase): |
2266 | """When cloud-init workflow for vmware is enabled via sys_cfg log a |
2267 | message. |
2268 | """ |
2269 | - paths = Paths({'seed_dir': self.tdir}) |
2270 | + paths = Paths({'cloud_dir': self.tdir}) |
2271 | ds = self.datasource( |
2272 | sys_cfg={'disable_vmware_customization': False}, distro={}, |
2273 | paths=paths) |
2274 | @@ -178,6 +178,50 @@ class TestDatasourceOVF(CiTestCase): |
2275 | self.assertIn('Script %s not found!!' % customscript, |
2276 | str(context.exception)) |
2277 | |
2278 | + def test_get_data_non_vmware_seed_platform_info(self): |
2279 | + """Platform info properly reports when on non-vmware platforms.""" |
2280 | + paths = Paths({'cloud_dir': self.tdir, 'run_dir': self.tdir}) |
2281 | + # Write ovf-env.xml seed file |
2282 | + seed_dir = self.tmp_path('seed', dir=self.tdir) |
2283 | + ovf_env = self.tmp_path('ovf-env.xml', dir=seed_dir) |
2284 | + util.write_file(ovf_env, OVF_ENV_CONTENT) |
2285 | + ds = self.datasource(sys_cfg={}, distro={}, paths=paths) |
2286 | + |
2287 | + self.assertEqual('ovf', ds.cloud_name) |
2288 | + self.assertEqual('ovf', ds.platform_type) |
2289 | + MPATH = 'cloudinit.sources.DataSourceOVF.' |
2290 | + with mock.patch(MPATH + 'util.read_dmi_data', return_value='!VMware'): |
2291 | + with mock.patch(MPATH + 'transport_vmware_guestd') as m_guestd: |
2292 | + with mock.patch(MPATH + 'transport_iso9660') as m_iso9660: |
2293 | + m_iso9660.return_value = (None, 'ignored', 'ignored') |
2294 | + m_guestd.return_value = (None, 'ignored', 'ignored') |
2295 | + self.assertTrue(ds.get_data()) |
2296 | + self.assertEqual( |
2297 | + 'ovf (%s/seed/ovf-env.xml)' % self.tdir, |
2298 | + ds.subplatform) |
2299 | + |
2300 | + def test_get_data_vmware_seed_platform_info(self): |
2301 | + """Platform info properly reports when on VMware platform.""" |
2302 | + paths = Paths({'cloud_dir': self.tdir, 'run_dir': self.tdir}) |
2303 | + # Write ovf-env.xml seed file |
2304 | + seed_dir = self.tmp_path('seed', dir=self.tdir) |
2305 | + ovf_env = self.tmp_path('ovf-env.xml', dir=seed_dir) |
2306 | + util.write_file(ovf_env, OVF_ENV_CONTENT) |
2307 | + ds = self.datasource(sys_cfg={}, distro={}, paths=paths) |
2308 | + |
2309 | + self.assertEqual('ovf', ds.cloud_name) |
2310 | + self.assertEqual('ovf', ds.platform_type) |
2311 | + MPATH = 'cloudinit.sources.DataSourceOVF.' |
2312 | + with mock.patch(MPATH + 'util.read_dmi_data', return_value='VMWare'): |
2313 | + with mock.patch(MPATH + 'transport_vmware_guestd') as m_guestd: |
2314 | + with mock.patch(MPATH + 'transport_iso9660') as m_iso9660: |
2315 | + m_iso9660.return_value = (None, 'ignored', 'ignored') |
2316 | + m_guestd.return_value = (None, 'ignored', 'ignored') |
2317 | + self.assertTrue(ds.get_data()) |
2318 | + self.assertEqual( |
2319 | + 'vmware (%s/seed/ovf-env.xml)' % self.tdir, |
2320 | + ds.subplatform) |
2321 | + |
2322 | |
2323 | class TestTransportIso9660(CiTestCase): |
2324 | |
2325 | diff --git a/tests/unittests/test_datasource/test_smartos.py b/tests/unittests/test_datasource/test_smartos.py |
2326 | index 46d67b9..42ac697 100644 |
2327 | --- a/tests/unittests/test_datasource/test_smartos.py |
2328 | +++ b/tests/unittests/test_datasource/test_smartos.py |
2329 | @@ -426,6 +426,13 @@ class TestSmartOSDataSource(FilesystemMockingTestCase): |
2330 | self.assertEqual(MOCK_RETURNS['sdc:uuid'], |
2331 | dsrc.metadata['instance-id']) |
2332 | |
2333 | + def test_platform_info(self): |
2334 | + """All platform-related attributes are properly set.""" |
2335 | + dsrc = self._get_ds(mockdata=MOCK_RETURNS) |
2336 | + self.assertEqual('joyent', dsrc.cloud_name) |
2337 | + self.assertEqual('joyent', dsrc.platform_type) |
2338 | + self.assertEqual('serial (/dev/ttyS1)', dsrc.subplatform) |
2339 | + |
2340 | def test_root_keys(self): |
2341 | dsrc = self._get_ds(mockdata=MOCK_RETURNS) |
2342 | ret = dsrc.get_data() |
FAILED: Continuous integration, rev:abc12004af1 c150d5f3504766e d858373ed254d5 /jenkins. ubuntu. com/server/ job/cloud- init-ci/ 381/
https:/
Executed test runs:
SUCCESS: Checkout
FAILED: Unit & Style Tests
Click here to trigger a rebuild: /jenkins. ubuntu. com/server/ job/cloud- init-ci/ 381/rebuild
https:/