Merge ~chad.smith/cloud-init:ubuntu/devel into cloud-init:ubuntu/devel
- Git
- lp:~chad.smith/cloud-init
- ubuntu/devel
- Merge into ubuntu/devel
Proposed by
Chad Smith
Status: | Merged | ||||||||||||||||
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Merged at revision: | 23919e31120edf37c7ff7f9cba3eda4f46b6f5cb | ||||||||||||||||
Proposed branch: | ~chad.smith/cloud-init:ubuntu/devel | ||||||||||||||||
Merge into: | cloud-init:ubuntu/devel | ||||||||||||||||
Diff against target: |
955 lines (+415/-129) 14 files modified
cloudinit/sources/DataSourceConfigDrive.py (+2/-2) cloudinit/sources/DataSourceGCE.py (+95/-39) cloudinit/util.py (+51/-43) debian/changelog (+14/-0) tests/cloud_tests/platforms/ec2/instance.py (+8/-2) tests/cloud_tests/platforms/ec2/platform.py (+30/-3) tests/cloud_tests/releases.yaml (+0/-16) tests/cloud_tests/testcases/modules/ntp_pools.yaml (+1/-1) tests/cloud_tests/testcases/modules/ntp_servers.yaml (+1/-1) tests/unittests/test_datasource/test_configdrive.py (+6/-0) tests/unittests/test_datasource/test_gce.py (+172/-21) tests/unittests/test_ds_identify.py (+17/-0) tests/unittests/test_util.py (+15/-0) tools/ds-identify (+3/-1) |
||||||||||||||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Server Team CI bot | continuous-integration | Approve | |
Scott Moser | Pending | ||
Review via email:
|
Commit message
Description of the change
Sync upstream master for Bionic release.
To post a comment you must log in.
Revision history for this message

Chad Smith (chad.smith) wrote : | # |
Revision history for this message

Server Team CI bot (server-team-bot) wrote : | # |
PASSED: Continuous integration, rev:23919e31120
https:/
Executed test runs:
SUCCESS: Checkout
SUCCESS: Unit & Style Tests
SUCCESS: Ubuntu LTS: Build
SUCCESS: Ubuntu LTS: Integration
SUCCESS: MAAS Compatability Testing
IN_PROGRESS: Declarative: Post Actions
Click here to trigger a rebuild:
https:/
review:
Approve
(continuous-integration)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py |
2 | index 870b368..b8db626 100644 |
3 | --- a/cloudinit/sources/DataSourceConfigDrive.py |
4 | +++ b/cloudinit/sources/DataSourceConfigDrive.py |
5 | @@ -25,7 +25,7 @@ DEFAULT_METADATA = { |
6 | "instance-id": DEFAULT_IID, |
7 | } |
8 | FS_TYPES = ('vfat', 'iso9660') |
9 | -LABEL_TYPES = ('config-2',) |
10 | +LABEL_TYPES = ('config-2', 'CONFIG-2') |
11 | POSSIBLE_MOUNTS = ('sr', 'cd') |
12 | OPTICAL_DEVICES = tuple(('/dev/%s%s' % (z, i) for z in POSSIBLE_MOUNTS |
13 | for i in range(0, 2))) |
14 | @@ -224,7 +224,7 @@ def find_candidate_devs(probe_optical=True): |
15 | config drive v2: |
16 | Disk should be: |
17 | * either vfat or iso9660 formated |
18 | - * labeled with 'config-2' |
19 | + * labeled with 'config-2' or 'CONFIG-2' |
20 | """ |
21 | # query optical drive to get it in blkid cache for 2.6 kernels |
22 | if probe_optical: |
23 | diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py |
24 | index ad6dae3..2da34a9 100644 |
25 | --- a/cloudinit/sources/DataSourceGCE.py |
26 | +++ b/cloudinit/sources/DataSourceGCE.py |
27 | @@ -2,8 +2,12 @@ |
28 | # |
29 | # This file is part of cloud-init. See LICENSE file for license information. |
30 | |
31 | +import datetime |
32 | +import json |
33 | + |
34 | from base64 import b64decode |
35 | |
36 | +from cloudinit.distros import ug_util |
37 | from cloudinit import log as logging |
38 | from cloudinit import sources |
39 | from cloudinit import url_helper |
40 | @@ -17,16 +21,18 @@ REQUIRED_FIELDS = ('instance-id', 'availability-zone', 'local-hostname') |
41 | |
42 | |
43 | class GoogleMetadataFetcher(object): |
44 | - headers = {'X-Google-Metadata-Request': 'True'} |
45 | + headers = {'Metadata-Flavor': 'Google'} |
46 | |
47 | def __init__(self, metadata_address): |
48 | self.metadata_address = metadata_address |
49 | |
50 | - def get_value(self, path, is_text): |
51 | + def get_value(self, path, is_text, is_recursive=False): |
52 | value = None |
53 | try: |
54 | - resp = url_helper.readurl(url=self.metadata_address + path, |
55 | - headers=self.headers) |
56 | + url = self.metadata_address + path |
57 | + if is_recursive: |
58 | + url += '/?recursive=True' |
59 | + resp = url_helper.readurl(url=url, headers=self.headers) |
60 | except url_helper.UrlError as exc: |
61 | msg = "url %s raised exception %s" |
62 | LOG.debug(msg, path, exc) |
63 | @@ -35,7 +41,7 @@ class GoogleMetadataFetcher(object): |
64 | if is_text: |
65 | value = util.decode_binary(resp.contents) |
66 | else: |
67 | - value = resp.contents |
68 | + value = resp.contents.decode('utf-8') |
69 | else: |
70 | LOG.debug("url %s returned code %s", path, resp.code) |
71 | return value |
72 | @@ -47,6 +53,10 @@ class DataSourceGCE(sources.DataSource): |
73 | |
74 | def __init__(self, sys_cfg, distro, paths): |
75 | sources.DataSource.__init__(self, sys_cfg, distro, paths) |
76 | + self.default_user = None |
77 | + if distro: |
78 | + (users, _groups) = ug_util.normalize_users_groups(sys_cfg, distro) |
79 | + (self.default_user, _user_config) = ug_util.extract_default(users) |
80 | self.metadata = dict() |
81 | self.ds_cfg = util.mergemanydict([ |
82 | util.get_cfg_by_path(sys_cfg, ["datasource", "GCE"], {}), |
83 | @@ -70,17 +80,18 @@ class DataSourceGCE(sources.DataSource): |
84 | |
85 | @property |
86 | def launch_index(self): |
87 | - # GCE does not provide lauch_index property |
88 | + # GCE does not provide lauch_index property. |
89 | return None |
90 | |
91 | def get_instance_id(self): |
92 | return self.metadata['instance-id'] |
93 | |
94 | def get_public_ssh_keys(self): |
95 | - return self.metadata['public-keys'] |
96 | + public_keys_data = self.metadata['public-keys-data'] |
97 | + return _parse_public_keys(public_keys_data, self.default_user) |
98 | |
99 | def get_hostname(self, fqdn=False, resolve_ip=False): |
100 | - # GCE has long FDQN's and has asked for short hostnames |
101 | + # GCE has long FDQN's and has asked for short hostnames. |
102 | return self.metadata['local-hostname'].split('.')[0] |
103 | |
104 | @property |
105 | @@ -92,15 +103,58 @@ class DataSourceGCE(sources.DataSource): |
106 | return self.availability_zone.rsplit('-', 1)[0] |
107 | |
108 | |
109 | -def _trim_key(public_key): |
110 | - # GCE takes sshKeys attribute in the format of '<user>:<public_key>' |
111 | - # so we have to trim each key to remove the username part |
112 | +def _has_expired(public_key): |
113 | + # Check whether an SSH key is expired. Public key input is a single SSH |
114 | + # public key in the GCE specific key format documented here: |
115 | + # https://cloud.google.com/compute/docs/instances/adding-removing-ssh-keys#sshkeyformat |
116 | + try: |
117 | + # Check for the Google-specific schema identifier. |
118 | + schema, json_str = public_key.split(None, 3)[2:] |
119 | + except (ValueError, AttributeError): |
120 | + return False |
121 | + |
122 | + # Do not expire keys if they do not have the expected schema identifier. |
123 | + if schema != 'google-ssh': |
124 | + return False |
125 | + |
126 | + try: |
127 | + json_obj = json.loads(json_str) |
128 | + except ValueError: |
129 | + return False |
130 | + |
131 | + # Do not expire keys if there is no expriation timestamp. |
132 | + if 'expireOn' not in json_obj: |
133 | + return False |
134 | + |
135 | + expire_str = json_obj['expireOn'] |
136 | + format_str = '%Y-%m-%dT%H:%M:%S+0000' |
137 | try: |
138 | - index = public_key.index(':') |
139 | - if index > 0: |
140 | - return public_key[(index + 1):] |
141 | - except Exception: |
142 | - return public_key |
143 | + expire_time = datetime.datetime.strptime(expire_str, format_str) |
144 | + except ValueError: |
145 | + return False |
146 | + |
147 | + # Expire the key if and only if we have exceeded the expiration timestamp. |
148 | + return datetime.datetime.utcnow() > expire_time |
149 | + |
150 | + |
151 | +def _parse_public_keys(public_keys_data, default_user=None): |
152 | + # Parse the SSH key data for the default user account. Public keys input is |
153 | + # a list containing SSH public keys in the GCE specific key format |
154 | + # documented here: |
155 | + # https://cloud.google.com/compute/docs/instances/adding-removing-ssh-keys#sshkeyformat |
156 | + public_keys = [] |
157 | + if not public_keys_data: |
158 | + return public_keys |
159 | + for public_key in public_keys_data: |
160 | + if not public_key or not all(ord(c) < 128 for c in public_key): |
161 | + continue |
162 | + split_public_key = public_key.split(':', 1) |
163 | + if len(split_public_key) != 2: |
164 | + continue |
165 | + user, key = split_public_key |
166 | + if user in ('cloudinit', default_user) and not _has_expired(key): |
167 | + public_keys.append(key) |
168 | + return public_keys |
169 | |
170 | |
171 | def read_md(address=None, platform_check=True): |
172 | @@ -116,31 +170,28 @@ def read_md(address=None, platform_check=True): |
173 | ret['reason'] = "Not running on GCE." |
174 | return ret |
175 | |
176 | - # if we cannot resolve the metadata server, then no point in trying |
177 | + # If we cannot resolve the metadata server, then no point in trying. |
178 | if not util.is_resolvable_url(address): |
179 | LOG.debug("%s is not resolvable", address) |
180 | ret['reason'] = 'address "%s" is not resolvable' % address |
181 | return ret |
182 | |
183 | - # url_map: (our-key, path, required, is_text) |
184 | + # url_map: (our-key, path, required, is_text, is_recursive) |
185 | url_map = [ |
186 | - ('instance-id', ('instance/id',), True, True), |
187 | - ('availability-zone', ('instance/zone',), True, True), |
188 | - ('local-hostname', ('instance/hostname',), True, True), |
189 | - ('public-keys', ('project/attributes/sshKeys', |
190 | - 'instance/attributes/ssh-keys'), False, True), |
191 | - ('user-data', ('instance/attributes/user-data',), False, False), |
192 | - ('user-data-encoding', ('instance/attributes/user-data-encoding',), |
193 | - False, True), |
194 | + ('instance-id', ('instance/id',), True, True, False), |
195 | + ('availability-zone', ('instance/zone',), True, True, False), |
196 | + ('local-hostname', ('instance/hostname',), True, True, False), |
197 | + ('instance-data', ('instance/attributes',), False, False, True), |
198 | + ('project-data', ('project/attributes',), False, False, True), |
199 | ] |
200 | |
201 | metadata_fetcher = GoogleMetadataFetcher(address) |
202 | md = {} |
203 | - # iterate over url_map keys to get metadata items |
204 | - for (mkey, paths, required, is_text) in url_map: |
205 | + # Iterate over url_map keys to get metadata items. |
206 | + for (mkey, paths, required, is_text, is_recursive) in url_map: |
207 | value = None |
208 | for path in paths: |
209 | - new_value = metadata_fetcher.get_value(path, is_text) |
210 | + new_value = metadata_fetcher.get_value(path, is_text, is_recursive) |
211 | if new_value is not None: |
212 | value = new_value |
213 | if required and value is None: |
214 | @@ -149,17 +200,23 @@ def read_md(address=None, platform_check=True): |
215 | return ret |
216 | md[mkey] = value |
217 | |
218 | - if md['public-keys']: |
219 | - lines = md['public-keys'].splitlines() |
220 | - md['public-keys'] = [_trim_key(k) for k in lines] |
221 | + instance_data = json.loads(md['instance-data'] or '{}') |
222 | + project_data = json.loads(md['project-data'] or '{}') |
223 | + valid_keys = [instance_data.get('sshKeys'), instance_data.get('ssh-keys')] |
224 | + block_project = instance_data.get('block-project-ssh-keys', '').lower() |
225 | + if block_project != 'true' and not instance_data.get('sshKeys'): |
226 | + valid_keys.append(project_data.get('ssh-keys')) |
227 | + valid_keys.append(project_data.get('sshKeys')) |
228 | + public_keys_data = '\n'.join([key for key in valid_keys if key]) |
229 | + md['public-keys-data'] = public_keys_data.splitlines() |
230 | |
231 | if md['availability-zone']: |
232 | md['availability-zone'] = md['availability-zone'].split('/')[-1] |
233 | |
234 | - encoding = md.get('user-data-encoding') |
235 | + encoding = instance_data.get('user-data-encoding') |
236 | if encoding: |
237 | if encoding == 'base64': |
238 | - md['user-data'] = b64decode(md['user-data']) |
239 | + md['user-data'] = b64decode(instance_data.get('user-data')) |
240 | else: |
241 | LOG.warning('unknown user-data-encoding: %s, ignoring', encoding) |
242 | |
243 | @@ -188,20 +245,19 @@ def platform_reports_gce(): |
244 | return False |
245 | |
246 | |
247 | -# Used to match classes to dependencies |
248 | +# Used to match classes to dependencies. |
249 | datasources = [ |
250 | (DataSourceGCE, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), |
251 | ] |
252 | |
253 | |
254 | -# Return a list of data sources that match this set of dependencies |
255 | +# Return a list of data sources that match this set of dependencies. |
256 | def get_datasource_list(depends): |
257 | return sources.list_from_depends(depends, datasources) |
258 | |
259 | |
260 | if __name__ == "__main__": |
261 | import argparse |
262 | - import json |
263 | import sys |
264 | |
265 | from base64 import b64encode |
266 | @@ -217,7 +273,7 @@ if __name__ == "__main__": |
267 | data = read_md(address=args.endpoint, platform_check=args.platform_check) |
268 | if 'user-data' in data: |
269 | # user-data is bytes not string like other things. Handle it specially. |
270 | - # if it can be represented as utf-8 then do so. Otherwise print base64 |
271 | + # If it can be represented as utf-8 then do so. Otherwise print base64 |
272 | # encoded value in the key user-data-b64. |
273 | try: |
274 | data['user-data'] = data['user-data'].decode() |
275 | @@ -225,7 +281,7 @@ if __name__ == "__main__": |
276 | sys.stderr.write("User-data cannot be decoded. " |
277 | "Writing as base64\n") |
278 | del data['user-data'] |
279 | - # b64encode returns a bytes value. decode to get the string. |
280 | + # b64encode returns a bytes value. Decode to get the string. |
281 | data['user-data-b64'] = b64encode(data['user-data']).decode() |
282 | |
283 | print(json.dumps(data, indent=1, sort_keys=True, separators=(',', ': '))) |
284 | diff --git a/cloudinit/util.py b/cloudinit/util.py |
285 | index e42498d..df0aa5d 100644 |
286 | --- a/cloudinit/util.py |
287 | +++ b/cloudinit/util.py |
288 | @@ -253,12 +253,18 @@ class ProcessExecutionError(IOError): |
289 | self.exit_code = exit_code |
290 | |
291 | if not stderr: |
292 | - self.stderr = self.empty_attr |
293 | + if stderr is None: |
294 | + self.stderr = self.empty_attr |
295 | + else: |
296 | + self.stderr = stderr |
297 | else: |
298 | self.stderr = self._indent_text(stderr) |
299 | |
300 | if not stdout: |
301 | - self.stdout = self.empty_attr |
302 | + if stdout is None: |
303 | + self.stdout = self.empty_attr |
304 | + else: |
305 | + self.stdout = stdout |
306 | else: |
307 | self.stdout = self._indent_text(stdout) |
308 | |
309 | @@ -1829,58 +1835,60 @@ def subp(args, data=None, rcs=None, env=None, capture=True, shell=False, |
310 | env = env.copy() |
311 | env.update(update_env) |
312 | |
313 | - try: |
314 | - if target_path(target) != "/": |
315 | - args = ['chroot', target] + list(args) |
316 | + if target_path(target) != "/": |
317 | + args = ['chroot', target] + list(args) |
318 | |
319 | - if not logstring: |
320 | - LOG.debug(("Running command %s with allowed return codes %s" |
321 | - " (shell=%s, capture=%s)"), args, rcs, shell, capture) |
322 | - else: |
323 | - LOG.debug(("Running hidden command to protect sensitive " |
324 | - "input/output logstring: %s"), logstring) |
325 | - |
326 | - stdin = None |
327 | - stdout = None |
328 | - stderr = None |
329 | - if capture: |
330 | - stdout = subprocess.PIPE |
331 | - stderr = subprocess.PIPE |
332 | - if data is None: |
333 | - # using devnull assures any reads get null, rather |
334 | - # than possibly waiting on input. |
335 | - devnull_fp = open(os.devnull) |
336 | - stdin = devnull_fp |
337 | - else: |
338 | - stdin = subprocess.PIPE |
339 | - if not isinstance(data, bytes): |
340 | - data = data.encode() |
341 | + if not logstring: |
342 | + LOG.debug(("Running command %s with allowed return codes %s" |
343 | + " (shell=%s, capture=%s)"), args, rcs, shell, capture) |
344 | + else: |
345 | + LOG.debug(("Running hidden command to protect sensitive " |
346 | + "input/output logstring: %s"), logstring) |
347 | + |
348 | + stdin = None |
349 | + stdout = None |
350 | + stderr = None |
351 | + if capture: |
352 | + stdout = subprocess.PIPE |
353 | + stderr = subprocess.PIPE |
354 | + if data is None: |
355 | + # using devnull assures any reads get null, rather |
356 | + # than possibly waiting on input. |
357 | + devnull_fp = open(os.devnull) |
358 | + stdin = devnull_fp |
359 | + else: |
360 | + stdin = subprocess.PIPE |
361 | + if not isinstance(data, bytes): |
362 | + data = data.encode() |
363 | |
364 | + try: |
365 | sp = subprocess.Popen(args, stdout=stdout, |
366 | stderr=stderr, stdin=stdin, |
367 | env=env, shell=shell) |
368 | (out, err) = sp.communicate(data) |
369 | - |
370 | - # Just ensure blank instead of none. |
371 | - if not out and capture: |
372 | - out = b'' |
373 | - if not err and capture: |
374 | - err = b'' |
375 | - if decode: |
376 | - def ldecode(data, m='utf-8'): |
377 | - if not isinstance(data, bytes): |
378 | - return data |
379 | - return data.decode(m, decode) |
380 | - |
381 | - out = ldecode(out) |
382 | - err = ldecode(err) |
383 | except OSError as e: |
384 | - raise ProcessExecutionError(cmd=args, reason=e, |
385 | - errno=e.errno) |
386 | + raise ProcessExecutionError( |
387 | + cmd=args, reason=e, errno=e.errno, |
388 | + stdout="-" if decode else b"-", |
389 | + stderr="-" if decode else b"-") |
390 | finally: |
391 | if devnull_fp: |
392 | devnull_fp.close() |
393 | |
394 | + # Just ensure blank instead of none. |
395 | + if not out and capture: |
396 | + out = b'' |
397 | + if not err and capture: |
398 | + err = b'' |
399 | + if decode: |
400 | + def ldecode(data, m='utf-8'): |
401 | + if not isinstance(data, bytes): |
402 | + return data |
403 | + return data.decode(m, decode) |
404 | + |
405 | + out = ldecode(out) |
406 | + err = ldecode(err) |
407 | + |
408 | rc = sp.returncode |
409 | if rc not in rcs: |
410 | raise ProcessExecutionError(stdout=out, stderr=err, |
411 | diff --git a/debian/changelog b/debian/changelog |
412 | index fc09908..093ed09 100644 |
413 | --- a/debian/changelog |
414 | +++ b/debian/changelog |
415 | @@ -1,3 +1,17 @@ |
416 | +cloud-init (17.2-20-g32a6a176-0ubuntu1) bionic; urgency=medium |
417 | + |
418 | + * New upstream snapshot. |
419 | + - tests: Fix EC2 Platform to return console output as bytes. |
420 | + - tests: Fix attempted use of /run in a test case. |
421 | + - GCE: Improvements and changes to ssh key behavior for default user. |
422 | + [Max Illfelder] (LP: #1670456, #1707033, #1707037, #1707039) |
423 | + - subp: make ProcessExecutionError have expected types in stderr, stdout. |
424 | + - tests: when querying ntp server, do not do dns resolution. |
425 | + - Recognize uppercase vfat disk labels [James Penick] (LP: #1598783) |
426 | + - tests: remove zesty as supported OS to test |
427 | + |
428 | + -- Chad Smith <chad.smith@canonical.com> Tue, 23 Jan 2018 20:10:44 -0700 |
429 | + |
430 | cloud-init (17.2-13-g6299e8d0-0ubuntu1) bionic; urgency=medium |
431 | |
432 | * New upstream snapshot. |
433 | diff --git a/tests/cloud_tests/platforms/ec2/instance.py b/tests/cloud_tests/platforms/ec2/instance.py |
434 | index 4ba737a..ab6037b 100644 |
435 | --- a/tests/cloud_tests/platforms/ec2/instance.py |
436 | +++ b/tests/cloud_tests/platforms/ec2/instance.py |
437 | @@ -46,9 +46,15 @@ class EC2Instance(Instance): |
438 | may return empty string. |
439 | """ |
440 | try: |
441 | - return self.instance.console_output()['Output'].encode() |
442 | + # OutputBytes comes from platform._decode_console_output_as_bytes |
443 | + response = self.instance.console_output() |
444 | + return response['OutputBytes'] |
445 | except KeyError: |
446 | - return b'' |
447 | + if 'Output' in response: |
448 | + msg = ("'OutputBytes' did not exist in console_output() but " |
449 | + "'Output' did: %s..." % response['Output'][0:128]) |
450 | + raise util.PlatformError('console_log', msg) |
451 | + return ('No Console Output [%s]' % self.instance).encode() |
452 | |
453 | def destroy(self): |
454 | """Clean up instance.""" |
455 | diff --git a/tests/cloud_tests/platforms/ec2/platform.py b/tests/cloud_tests/platforms/ec2/platform.py |
456 | index fdb17ba..f188c27 100644 |
457 | --- a/tests/cloud_tests/platforms/ec2/platform.py |
458 | +++ b/tests/cloud_tests/platforms/ec2/platform.py |
459 | @@ -6,6 +6,8 @@ import os |
460 | |
461 | import boto3 |
462 | import botocore |
463 | +from botocore import session, handlers |
464 | +import base64 |
465 | |
466 | from ..platforms import Platform |
467 | from .image import EC2Image |
468 | @@ -28,9 +30,10 @@ class EC2Platform(Platform): |
469 | self.instance_type = config['instance-type'] |
470 | |
471 | try: |
472 | - self.ec2_client = boto3.client('ec2') |
473 | - self.ec2_resource = boto3.resource('ec2') |
474 | - self.ec2_region = boto3.Session().region_name |
475 | + b3session = get_session() |
476 | + self.ec2_client = b3session.client('ec2') |
477 | + self.ec2_resource = b3session.resource('ec2') |
478 | + self.ec2_region = b3session.region_name |
479 | self.key_name = self._upload_public_key(config) |
480 | except botocore.exceptions.NoRegionError: |
481 | raise RuntimeError( |
482 | @@ -228,4 +231,28 @@ class EC2Platform(Platform): |
483 | |
484 | return self.tag |
485 | |
486 | + |
487 | +def _decode_console_output_as_bytes(parsed, **kwargs): |
488 | + """Provide console output as bytes in OutputBytes. |
489 | + |
490 | + For this to be useful, the session has to have had the |
491 | + decode_console_output handler unregistered already. |
492 | + |
493 | + https://github.com/boto/botocore/issues/1351 .""" |
494 | + if 'Output' not in parsed: |
495 | + return |
496 | + orig = parsed['Output'] |
497 | + handlers.decode_console_output(parsed, **kwargs) |
498 | + parsed['OutputBytes'] = base64.b64decode(orig) |
499 | + |
500 | + |
501 | +def get_session(): |
502 | + mysess = session.get_session() |
503 | + mysess.unregister('after-call.ec2.GetConsoleOutput', |
504 | + handlers.decode_console_output) |
505 | + mysess.register('after-call.ec2.GetConsoleOutput', |
506 | + _decode_console_output_as_bytes) |
507 | + return boto3.Session(botocore_session=mysess) |
508 | + |
509 | + |
510 | # vi: ts=4 expandtab |
511 | diff --git a/tests/cloud_tests/releases.yaml b/tests/cloud_tests/releases.yaml |
512 | index 0a9fa60..d8bc170 100644 |
513 | --- a/tests/cloud_tests/releases.yaml |
514 | +++ b/tests/cloud_tests/releases.yaml |
515 | @@ -158,22 +158,6 @@ releases: |
516 | alias: artful |
517 | setup_overrides: null |
518 | override_templates: false |
519 | - zesty: |
520 | - # EOL: Jan 2018 |
521 | - default: |
522 | - enabled: true |
523 | - release: zesty |
524 | - version: 17.04 |
525 | - os: ubuntu |
526 | - feature_groups: |
527 | - - base |
528 | - - debian_base |
529 | - - ubuntu_specific |
530 | - lxd: |
531 | - sstreams_server: https://cloud-images.ubuntu.com/daily |
532 | - alias: zesty |
533 | - setup_overrides: null |
534 | - override_templates: false |
535 | xenial: |
536 | # EOL: Apr 2021 |
537 | default: |
538 | diff --git a/tests/cloud_tests/testcases/modules/ntp_pools.yaml b/tests/cloud_tests/testcases/modules/ntp_pools.yaml |
539 | index 3a93faa..d490b22 100644 |
540 | --- a/tests/cloud_tests/testcases/modules/ntp_pools.yaml |
541 | +++ b/tests/cloud_tests/testcases/modules/ntp_pools.yaml |
542 | @@ -26,6 +26,6 @@ collect_scripts: |
543 | grep '^pool' /etc/ntp.conf |
544 | ntpq_servers: | |
545 | #!/bin/sh |
546 | - ntpq -p -w |
547 | + ntpq -p -w -n |
548 | |
549 | # vi: ts=4 expandtab |
550 | diff --git a/tests/cloud_tests/testcases/modules/ntp_servers.yaml b/tests/cloud_tests/testcases/modules/ntp_servers.yaml |
551 | index d59d45a..6b13b70 100644 |
552 | --- a/tests/cloud_tests/testcases/modules/ntp_servers.yaml |
553 | +++ b/tests/cloud_tests/testcases/modules/ntp_servers.yaml |
554 | @@ -22,6 +22,6 @@ collect_scripts: |
555 | grep '^server' /etc/ntp.conf |
556 | ntpq_servers: | |
557 | #!/bin/sh |
558 | - ntpq -p -w |
559 | + ntpq -p -w -n |
560 | |
561 | # vi: ts=4 expandtab |
562 | diff --git a/tests/unittests/test_datasource/test_configdrive.py b/tests/unittests/test_datasource/test_configdrive.py |
563 | index 6ef5a35..68400f2 100644 |
564 | --- a/tests/unittests/test_datasource/test_configdrive.py |
565 | +++ b/tests/unittests/test_datasource/test_configdrive.py |
566 | @@ -458,6 +458,12 @@ class TestConfigDriveDataSource(CiTestCase): |
567 | self.assertEqual(["/dev/vdb3"], |
568 | ds.find_candidate_devs()) |
569 | |
570 | + # Verify that uppercase labels are also found. |
571 | + devs_with_answers = {"TYPE=vfat": [], |
572 | + "TYPE=iso9660": ["/dev/vdb"], |
573 | + "LABEL=CONFIG-2": ["/dev/vdb"]} |
574 | + self.assertEqual(["/dev/vdb"], ds.find_candidate_devs()) |
575 | + |
576 | finally: |
577 | util.find_devs_with = orig_find_devs_with |
578 | util.is_partition = orig_is_partition |
579 | diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py |
580 | index 82c788d..f77c2c4 100644 |
581 | --- a/tests/unittests/test_datasource/test_gce.py |
582 | +++ b/tests/unittests/test_datasource/test_gce.py |
583 | @@ -4,13 +4,16 @@ |
584 | # |
585 | # This file is part of cloud-init. See LICENSE file for license information. |
586 | |
587 | +import datetime |
588 | import httpretty |
589 | +import json |
590 | import mock |
591 | import re |
592 | |
593 | from base64 import b64encode, b64decode |
594 | from six.moves.urllib_parse import urlparse |
595 | |
596 | +from cloudinit import distros |
597 | from cloudinit import helpers |
598 | from cloudinit import settings |
599 | from cloudinit.sources import DataSourceGCE |
600 | @@ -21,10 +24,7 @@ from cloudinit.tests import helpers as test_helpers |
601 | GCE_META = { |
602 | 'instance/id': '123', |
603 | 'instance/zone': 'foo/bar', |
604 | - 'project/attributes/sshKeys': 'user:ssh-rsa AA2..+aRD0fyVw== root@server', |
605 | 'instance/hostname': 'server.project-foo.local', |
606 | - # UnicodeDecodeError below if set to ds.userdata instead of userdata_raw |
607 | - 'instance/attributes/user-data': b'/bin/echo \xff\n', |
608 | } |
609 | |
610 | GCE_META_PARTIAL = { |
611 | @@ -37,11 +37,13 @@ GCE_META_ENCODING = { |
612 | 'instance/id': '12345', |
613 | 'instance/hostname': 'server.project-baz.local', |
614 | 'instance/zone': 'baz/bang', |
615 | - 'instance/attributes/user-data': b64encode(b'/bin/echo baz\n'), |
616 | - 'instance/attributes/user-data-encoding': 'base64', |
617 | + 'instance/attributes': { |
618 | + 'user-data': b64encode(b'/bin/echo baz\n').decode('utf-8'), |
619 | + 'user-data-encoding': 'base64', |
620 | + } |
621 | } |
622 | |
623 | -HEADERS = {'X-Google-Metadata-Request': 'True'} |
624 | +HEADERS = {'Metadata-Flavor': 'Google'} |
625 | MD_URL_RE = re.compile( |
626 | r'http://metadata.google.internal/computeMetadata/v1/.*') |
627 | |
628 | @@ -54,10 +56,15 @@ def _set_mock_metadata(gce_meta=None): |
629 | url_path = urlparse(uri).path |
630 | if url_path.startswith('/computeMetadata/v1/'): |
631 | path = url_path.split('/computeMetadata/v1/')[1:][0] |
632 | + recursive = path.endswith('/') |
633 | + path = path.rstrip('/') |
634 | else: |
635 | path = None |
636 | if path in gce_meta: |
637 | - return (200, headers, gce_meta.get(path)) |
638 | + response = gce_meta.get(path) |
639 | + if recursive: |
640 | + response = json.dumps(response) |
641 | + return (200, headers, response) |
642 | else: |
643 | return (404, headers, '') |
644 | |
645 | @@ -69,6 +76,16 @@ def _set_mock_metadata(gce_meta=None): |
646 | @httpretty.activate |
647 | class TestDataSourceGCE(test_helpers.HttprettyTestCase): |
648 | |
649 | + def _make_distro(self, dtype, def_user=None): |
650 | + cfg = dict(settings.CFG_BUILTIN) |
651 | + cfg['system_info']['distro'] = dtype |
652 | + paths = helpers.Paths(cfg['system_info']['paths']) |
653 | + distro_cls = distros.fetch(dtype) |
654 | + if def_user: |
655 | + cfg['system_info']['default_user'] = def_user.copy() |
656 | + distro = distro_cls(dtype, cfg['system_info'], paths) |
657 | + return distro |
658 | + |
659 | def setUp(self): |
660 | tmp = self.tmp_dir() |
661 | self.ds = DataSourceGCE.DataSourceGCE( |
662 | @@ -90,6 +107,10 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase): |
663 | self.assertDictContainsSubset(HEADERS, req_header) |
664 | |
665 | def test_metadata(self): |
666 | + # UnicodeDecodeError if set to ds.userdata instead of userdata_raw |
667 | + meta = GCE_META.copy() |
668 | + meta['instance/attributes/user-data'] = b'/bin/echo \xff\n' |
669 | + |
670 | _set_mock_metadata() |
671 | self.ds.get_data() |
672 | |
673 | @@ -118,8 +139,8 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase): |
674 | _set_mock_metadata(GCE_META_ENCODING) |
675 | self.ds.get_data() |
676 | |
677 | - decoded = b64decode( |
678 | - GCE_META_ENCODING.get('instance/attributes/user-data')) |
679 | + instance_data = GCE_META_ENCODING.get('instance/attributes') |
680 | + decoded = b64decode(instance_data.get('user-data')) |
681 | self.assertEqual(decoded, self.ds.get_userdata_raw()) |
682 | |
683 | def test_missing_required_keys_return_false(self): |
684 | @@ -131,33 +152,124 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase): |
685 | self.assertEqual(False, self.ds.get_data()) |
686 | httpretty.reset() |
687 | |
688 | - def test_project_level_ssh_keys_are_used(self): |
689 | + def test_no_ssh_keys_metadata(self): |
690 | _set_mock_metadata() |
691 | self.ds.get_data() |
692 | + self.assertEqual([], self.ds.get_public_ssh_keys()) |
693 | + |
694 | + def test_cloudinit_ssh_keys(self): |
695 | + valid_key = 'ssh-rsa VALID {0}' |
696 | + invalid_key = 'ssh-rsa INVALID {0}' |
697 | + project_attributes = { |
698 | + 'sshKeys': '\n'.join([ |
699 | + 'cloudinit:{0}'.format(valid_key.format(0)), |
700 | + 'user:{0}'.format(invalid_key.format(0)), |
701 | + ]), |
702 | + 'ssh-keys': '\n'.join([ |
703 | + 'cloudinit:{0}'.format(valid_key.format(1)), |
704 | + 'user:{0}'.format(invalid_key.format(1)), |
705 | + ]), |
706 | + } |
707 | + instance_attributes = { |
708 | + 'ssh-keys': '\n'.join([ |
709 | + 'cloudinit:{0}'.format(valid_key.format(2)), |
710 | + 'user:{0}'.format(invalid_key.format(2)), |
711 | + ]), |
712 | + 'block-project-ssh-keys': 'False', |
713 | + } |
714 | + |
715 | + meta = GCE_META.copy() |
716 | + meta['project/attributes'] = project_attributes |
717 | + meta['instance/attributes'] = instance_attributes |
718 | + |
719 | + _set_mock_metadata(meta) |
720 | + self.ds.get_data() |
721 | + |
722 | + expected = [valid_key.format(key) for key in range(3)] |
723 | + self.assertEqual(set(expected), set(self.ds.get_public_ssh_keys())) |
724 | + |
725 | + @mock.patch("cloudinit.sources.DataSourceGCE.ug_util") |
726 | + def test_default_user_ssh_keys(self, mock_ug_util): |
727 | + mock_ug_util.normalize_users_groups.return_value = None, None |
728 | + mock_ug_util.extract_default.return_value = 'ubuntu', None |
729 | + ubuntu_ds = DataSourceGCE.DataSourceGCE( |
730 | + settings.CFG_BUILTIN, self._make_distro('ubuntu'), |
731 | + helpers.Paths({'run_dir': self.tmp_dir()})) |
732 | + |
733 | + valid_key = 'ssh-rsa VALID {0}' |
734 | + invalid_key = 'ssh-rsa INVALID {0}' |
735 | + project_attributes = { |
736 | + 'sshKeys': '\n'.join([ |
737 | + 'ubuntu:{0}'.format(valid_key.format(0)), |
738 | + 'user:{0}'.format(invalid_key.format(0)), |
739 | + ]), |
740 | + 'ssh-keys': '\n'.join([ |
741 | + 'ubuntu:{0}'.format(valid_key.format(1)), |
742 | + 'user:{0}'.format(invalid_key.format(1)), |
743 | + ]), |
744 | + } |
745 | + instance_attributes = { |
746 | + 'ssh-keys': '\n'.join([ |
747 | + 'ubuntu:{0}'.format(valid_key.format(2)), |
748 | + 'user:{0}'.format(invalid_key.format(2)), |
749 | + ]), |
750 | + 'block-project-ssh-keys': 'False', |
751 | + } |
752 | |
753 | - # we expect a list of public ssh keys with user names stripped |
754 | - self.assertEqual(['ssh-rsa AA2..+aRD0fyVw== root@server'], |
755 | - self.ds.get_public_ssh_keys()) |
756 | + meta = GCE_META.copy() |
757 | + meta['project/attributes'] = project_attributes |
758 | + meta['instance/attributes'] = instance_attributes |
759 | + |
760 | + _set_mock_metadata(meta) |
761 | + ubuntu_ds.get_data() |
762 | + |
763 | + expected = [valid_key.format(key) for key in range(3)] |
764 | + self.assertEqual(set(expected), set(ubuntu_ds.get_public_ssh_keys())) |
765 | + |
766 | + def test_instance_ssh_keys_override(self): |
767 | + valid_key = 'ssh-rsa VALID {0}' |
768 | + invalid_key = 'ssh-rsa INVALID {0}' |
769 | + project_attributes = { |
770 | + 'sshKeys': 'cloudinit:{0}'.format(invalid_key.format(0)), |
771 | + 'ssh-keys': 'cloudinit:{0}'.format(invalid_key.format(1)), |
772 | + } |
773 | + instance_attributes = { |
774 | + 'sshKeys': 'cloudinit:{0}'.format(valid_key.format(0)), |
775 | + 'ssh-keys': 'cloudinit:{0}'.format(valid_key.format(1)), |
776 | + 'block-project-ssh-keys': 'False', |
777 | + } |
778 | |
779 | - def test_instance_level_ssh_keys_are_used(self): |
780 | - key_content = 'ssh-rsa JustAUser root@server' |
781 | meta = GCE_META.copy() |
782 | - meta['instance/attributes/ssh-keys'] = 'user:{0}'.format(key_content) |
783 | + meta['project/attributes'] = project_attributes |
784 | + meta['instance/attributes'] = instance_attributes |
785 | |
786 | _set_mock_metadata(meta) |
787 | self.ds.get_data() |
788 | |
789 | - self.assertIn(key_content, self.ds.get_public_ssh_keys()) |
790 | + expected = [valid_key.format(key) for key in range(2)] |
791 | + self.assertEqual(set(expected), set(self.ds.get_public_ssh_keys())) |
792 | + |
793 | + def test_block_project_ssh_keys_override(self): |
794 | + valid_key = 'ssh-rsa VALID {0}' |
795 | + invalid_key = 'ssh-rsa INVALID {0}' |
796 | + project_attributes = { |
797 | + 'sshKeys': 'cloudinit:{0}'.format(invalid_key.format(0)), |
798 | + 'ssh-keys': 'cloudinit:{0}'.format(invalid_key.format(1)), |
799 | + } |
800 | + instance_attributes = { |
801 | + 'ssh-keys': 'cloudinit:{0}'.format(valid_key.format(0)), |
802 | + 'block-project-ssh-keys': 'True', |
803 | + } |
804 | |
805 | - def test_instance_level_keys_replace_project_level_keys(self): |
806 | - key_content = 'ssh-rsa JustAUser root@server' |
807 | meta = GCE_META.copy() |
808 | - meta['instance/attributes/ssh-keys'] = 'user:{0}'.format(key_content) |
809 | + meta['project/attributes'] = project_attributes |
810 | + meta['instance/attributes'] = instance_attributes |
811 | |
812 | _set_mock_metadata(meta) |
813 | self.ds.get_data() |
814 | |
815 | - self.assertEqual([key_content], self.ds.get_public_ssh_keys()) |
816 | + expected = [valid_key.format(0)] |
817 | + self.assertEqual(set(expected), set(self.ds.get_public_ssh_keys())) |
818 | |
819 | def test_only_last_part_of_zone_used_for_availability_zone(self): |
820 | _set_mock_metadata() |
821 | @@ -172,5 +284,44 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase): |
822 | self.assertEqual(False, ret) |
823 | m_fetcher.assert_not_called() |
824 | |
825 | + def test_has_expired(self): |
826 | + |
827 | + def _get_timestamp(days): |
828 | + format_str = '%Y-%m-%dT%H:%M:%S+0000' |
829 | + today = datetime.datetime.now() |
830 | + timestamp = today + datetime.timedelta(days=days) |
831 | + return timestamp.strftime(format_str) |
832 | + |
833 | + past = _get_timestamp(-1) |
834 | + future = _get_timestamp(1) |
835 | + ssh_keys = { |
836 | + None: False, |
837 | + '': False, |
838 | + 'Invalid': False, |
839 | + 'user:ssh-rsa key user@domain.com': False, |
840 | + 'user:ssh-rsa key google {"expireOn":"%s"}' % past: False, |
841 | + 'user:ssh-rsa key google-ssh': False, |
842 | + 'user:ssh-rsa key google-ssh {invalid:json}': False, |
843 | + 'user:ssh-rsa key google-ssh {"userName":"user"}': False, |
844 | + 'user:ssh-rsa key google-ssh {"expireOn":"invalid"}': False, |
845 | + 'user:xyz key google-ssh {"expireOn":"%s"}' % future: False, |
846 | + 'user:xyz key google-ssh {"expireOn":"%s"}' % past: True, |
847 | + } |
848 | + |
849 | + for key, expired in ssh_keys.items(): |
850 | + self.assertEqual(DataSourceGCE._has_expired(key), expired) |
851 | + |
852 | + def test_parse_public_keys_non_ascii(self): |
853 | + public_key_data = [ |
854 | + 'cloudinit:rsa ssh-ke%s invalid' % chr(165), |
855 | + 'use%sname:rsa ssh-key' % chr(174), |
856 | + 'cloudinit:test 1', |
857 | + 'default:test 2', |
858 | + 'user:test 3', |
859 | + ] |
860 | + expected = ['test 1', 'test 2'] |
861 | + found = DataSourceGCE._parse_public_keys( |
862 | + public_key_data, default_user='default') |
863 | + self.assertEqual(sorted(found), sorted(expected)) |
864 | |
865 | # vi: ts=4 expandtab |
866 | diff --git a/tests/unittests/test_ds_identify.py b/tests/unittests/test_ds_identify.py |
867 | index c9234ed..ad6c5cf 100644 |
868 | --- a/tests/unittests/test_ds_identify.py |
869 | +++ b/tests/unittests/test_ds_identify.py |
870 | @@ -232,6 +232,11 @@ class TestDsIdentify(CiTestCase): |
871 | self._test_ds_found('ConfigDrive') |
872 | return |
873 | |
874 | + def test_config_drive_upper(self): |
875 | + """ConfigDrive datasource has a disk with LABEL=CONFIG-2.""" |
876 | + self._test_ds_found('ConfigDriveUpper') |
877 | + return |
878 | + |
879 | def test_policy_disabled(self): |
880 | """A Builtin policy of 'disabled' should return not found. |
881 | |
882 | @@ -503,6 +508,18 @@ VALID_CFG = { |
883 | }, |
884 | ], |
885 | }, |
886 | + 'ConfigDriveUpper': { |
887 | + 'ds': 'ConfigDrive', |
888 | + 'mocks': [ |
889 | + {'name': 'blkid', 'ret': 0, |
890 | + 'out': blkid_out( |
891 | + [{'DEVNAME': 'vda1', 'TYPE': 'vfat', 'PARTUUID': uuid4()}, |
892 | + {'DEVNAME': 'vda2', 'TYPE': 'ext4', |
893 | + 'LABEL': 'cloudimg-rootfs', 'PARTUUID': uuid4()}, |
894 | + {'DEVNAME': 'vdb', 'TYPE': 'vfat', 'LABEL': 'CONFIG-2'}]) |
895 | + }, |
896 | + ], |
897 | + }, |
898 | } |
899 | |
900 | # vi: ts=4 expandtab |
901 | diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py |
902 | index d63b760..4a92e74 100644 |
903 | --- a/tests/unittests/test_util.py |
904 | +++ b/tests/unittests/test_util.py |
905 | @@ -623,6 +623,7 @@ class TestSubp(helpers.CiTestCase): |
906 | utf8_valid = b'start \xc3\xa9 end' |
907 | utf8_valid_2 = b'd\xc3\xa9j\xc8\xa7' |
908 | printenv = [BASH, '-c', 'for n in "$@"; do echo "$n=${!n}"; done', '--'] |
909 | + bogus_command = 'this-is-not-expected-to-be-a-program-name' |
910 | |
911 | def printf_cmd(self, *args): |
912 | # bash's printf supports \xaa. So does /usr/bin/printf |
913 | @@ -712,6 +713,20 @@ class TestSubp(helpers.CiTestCase): |
914 | self.assertIsNone(err) |
915 | self.assertIsNone(out) |
916 | |
917 | + def test_exception_has_out_err_are_bytes_if_decode_false(self): |
918 | + """Raised exc should have stderr, stdout as bytes if no decode.""" |
919 | + with self.assertRaises(util.ProcessExecutionError) as cm: |
920 | + util.subp([self.bogus_command], decode=False) |
921 | + self.assertTrue(isinstance(cm.exception.stdout, bytes)) |
922 | + self.assertTrue(isinstance(cm.exception.stderr, bytes)) |
923 | + |
924 | + def test_exception_has_out_err_are_bytes_if_decode_true(self): |
925 | + """Raised exc should have stderr, stdout as string if no decode.""" |
926 | + with self.assertRaises(util.ProcessExecutionError) as cm: |
927 | + util.subp([self.bogus_command], decode=True) |
928 | + self.assertTrue(isinstance(cm.exception.stdout, six.string_types)) |
929 | + self.assertTrue(isinstance(cm.exception.stderr, six.string_types)) |
930 | + |
931 | def test_bunch_of_slashes_in_path(self): |
932 | self.assertEqual("/target/my/path/", |
933 | util.target_path("/target/", "//my/path/")) |
934 | diff --git a/tools/ds-identify b/tools/ds-identify |
935 | index 5893a76..374c3ad 100755 |
936 | --- a/tools/ds-identify |
937 | +++ b/tools/ds-identify |
938 | @@ -579,6 +579,8 @@ dscheck_NoCloud() { |
939 | check_configdrive_v2() { |
940 | if has_fs_with_label "config-2"; then |
941 | return ${DS_FOUND} |
942 | + elif has_fs_with_label "CONFIG-2"; then |
943 | + return ${DS_FOUND} |
944 | fi |
945 | # look in /config-drive <vlc>/seed/config_drive for a directory |
946 | # openstack/YYYY-MM-DD format with a file meta_data.json |
947 | @@ -666,7 +668,7 @@ is_cdrom_ovf() { |
948 | |
949 | # explicitly skip known labels of other types. rd_rdfe is azure. |
950 | case "$label" in |
951 | - config-2|rd_rdfe_stable*|cidata) return 1;; |
952 | + config-2|CONFIG-2|rd_rdfe_stable*|cidata) return 1;; |
953 | esac |
954 | |
955 | local idstr="http://schemas.dmtf.org/ovf/environment/1" |
tox is happy with this branch. Thanks for the fix smoser.
py27: commands succeeded
py3: commands succeeded
flake8: commands succeeded
xenial: commands succeeded
pylint: commands succeeded
congratulations :)