Merge ~chad.smith/cloud-init:ubuntu/devel into cloud-init:ubuntu/devel

Proposed by Chad Smith
Status: Merged
Merged at revision: 5e4916bdc4f7be6bcbf5650e93ea8835cc495b95
Proposed branch: ~chad.smith/cloud-init:ubuntu/devel
Merge into: cloud-init:ubuntu/devel
Diff against target: 1433 lines (+661/-139)
21 files modified
cloudinit/cmd/devel/render.py (+24/-11)
cloudinit/cmd/devel/tests/test_render.py (+44/-1)
cloudinit/cmd/query.py (+24/-12)
cloudinit/cmd/tests/test_query.py (+71/-5)
cloudinit/config/cc_disk_setup.py (+1/-1)
cloudinit/handlers/jinja_template.py (+9/-1)
cloudinit/net/dhcp.py (+32/-10)
cloudinit/sources/DataSourceAzure.py (+46/-33)
cloudinit/tests/test_url_helper.py (+24/-1)
cloudinit/tests/test_util.py (+66/-17)
cloudinit/url_helper.py (+25/-6)
cloudinit/util.py (+4/-3)
debian/changelog (+28/-0)
doc/rtd/topics/datasources/azure.rst (+46/-0)
packages/redhat/cloud-init.spec.in (+1/-0)
packages/suse/cloud-init.spec.in (+1/-0)
systemd/cloud-init.service.tmpl (+1/-2)
tests/unittests/test_builtin_handlers.py (+25/-0)
tests/unittests/test_datasource/test_azure.py (+148/-19)
tests/unittests/test_datasource/test_ec2.py (+24/-16)
udev/66-azure-ephemeral.rules (+17/-1)
Reviewer Review Type Date Requested Status
Server Team CI bot continuous-integration Approve
Ryan Harper Approve
Review via email: mp+358684@code.launchpad.net

Commit message

new upstream snapshot for upload into Disco. Note changed series represented in debian/changelog

To post a comment you must log in.
Revision history for this message
Server Team CI bot (server-team-bot) wrote :

PASSED: Continuous integration, rev:db78de17b180d531624fd1e296246692d3db18bb
https://jenkins.ubuntu.com/server/job/cloud-init-ci/438/
Executed test runs:
    SUCCESS: Checkout
    SUCCESS: Unit & Style Tests
    SUCCESS: Ubuntu LTS: Build
    SUCCESS: Ubuntu LTS: Integration
    IN_PROGRESS: Declarative: Post Actions

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/cloud-init-ci/438/rebuild

review: Approve (continuous-integration)
Revision history for this message
Ryan Harper (raharper) wrote :

I get the same as you, but something is causing a typo in the LP: of the first changelog entry:

+cloud-init (18.4-22-g6062595b-0ubuntu1) disco; urgency=medium
+
+ * New upstream snapshot.
+ - azure: retry imds polling on requests.Timeout (LP: LP:1800223)

I think we can fix up in the changelog commit even if the original commit has a busted comment.

review: Needs Fixing
Revision history for this message
Chad Smith (chad.smith) wrote :

+1, I had forgotten to --force on my push which did just that. fixed.

Revision history for this message
Ryan Harper (raharper) wrote :

LGTM

review: Approve
Revision history for this message
Server Team CI bot (server-team-bot) wrote :

PASSED: Continuous integration, rev:5e4916bdc4f7be6bcbf5650e93ea8835cc495b95
https://jenkins.ubuntu.com/server/job/cloud-init-ci/440/
Executed test runs:
    SUCCESS: Checkout
    SUCCESS: Unit & Style Tests
    SUCCESS: Ubuntu LTS: Build
    SUCCESS: Ubuntu LTS: Integration
    IN_PROGRESS: Declarative: Post Actions

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/cloud-init-ci/440/rebuild

review: Approve (continuous-integration)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/cloudinit/cmd/devel/render.py b/cloudinit/cmd/devel/render.py
index 2ba6b68..1bc2240 100755
--- a/cloudinit/cmd/devel/render.py
+++ b/cloudinit/cmd/devel/render.py
@@ -8,11 +8,10 @@ import sys
88
9from cloudinit.handlers.jinja_template import render_jinja_payload_from_file9from cloudinit.handlers.jinja_template import render_jinja_payload_from_file
10from cloudinit import log10from cloudinit import log
11from cloudinit.sources import INSTANCE_JSON_FILE11from cloudinit.sources import INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE
12from . import addLogHandlerCLI, read_cfg_paths12from . import addLogHandlerCLI, read_cfg_paths
1313
14NAME = 'render'14NAME = 'render'
15DEFAULT_INSTANCE_DATA = '/run/cloud-init/instance-data.json'
1615
17LOG = log.getLogger(NAME)16LOG = log.getLogger(NAME)
1817
@@ -47,12 +46,22 @@ def handle_args(name, args):
47 @return 0 on success, 1 on failure.46 @return 0 on success, 1 on failure.
48 """47 """
49 addLogHandlerCLI(LOG, log.DEBUG if args.debug else log.WARNING)48 addLogHandlerCLI(LOG, log.DEBUG if args.debug else log.WARNING)
50 if not args.instance_data:49 if args.instance_data:
51 paths = read_cfg_paths()
52 instance_data_fn = os.path.join(
53 paths.run_dir, INSTANCE_JSON_FILE)
54 else:
55 instance_data_fn = args.instance_data50 instance_data_fn = args.instance_data
51 else:
52 paths = read_cfg_paths()
53 uid = os.getuid()
54 redacted_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_FILE)
55 if uid == 0:
56 instance_data_fn = os.path.join(
57 paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE)
58 if not os.path.exists(instance_data_fn):
59 LOG.warning(
60 'Missing root-readable %s. Using redacted %s instead.',
61 instance_data_fn, redacted_data_fn)
62 instance_data_fn = redacted_data_fn
63 else:
64 instance_data_fn = redacted_data_fn
56 if not os.path.exists(instance_data_fn):65 if not os.path.exists(instance_data_fn):
57 LOG.error('Missing instance-data.json file: %s', instance_data_fn)66 LOG.error('Missing instance-data.json file: %s', instance_data_fn)
58 return 167 return 1
@@ -62,10 +71,14 @@ def handle_args(name, args):
62 except IOError:71 except IOError:
63 LOG.error('Missing user-data file: %s', args.user_data)72 LOG.error('Missing user-data file: %s', args.user_data)
64 return 173 return 1
65 rendered_payload = render_jinja_payload_from_file(74 try:
66 payload=user_data, payload_fn=args.user_data,75 rendered_payload = render_jinja_payload_from_file(
67 instance_data_file=instance_data_fn,76 payload=user_data, payload_fn=args.user_data,
68 debug=True if args.debug else False)77 instance_data_file=instance_data_fn,
78 debug=True if args.debug else False)
79 except RuntimeError as e:
80 LOG.error('Cannot render from instance data: %s', str(e))
81 return 1
69 if not rendered_payload:82 if not rendered_payload:
70 LOG.error('Unable to render user-data file: %s', args.user_data)83 LOG.error('Unable to render user-data file: %s', args.user_data)
71 return 184 return 1
diff --git a/cloudinit/cmd/devel/tests/test_render.py b/cloudinit/cmd/devel/tests/test_render.py
index fc5d2c0..988bba0 100644
--- a/cloudinit/cmd/devel/tests/test_render.py
+++ b/cloudinit/cmd/devel/tests/test_render.py
@@ -6,7 +6,7 @@ import os
6from collections import namedtuple6from collections import namedtuple
7from cloudinit.cmd.devel import render7from cloudinit.cmd.devel import render
8from cloudinit.helpers import Paths8from cloudinit.helpers import Paths
9from cloudinit.sources import INSTANCE_JSON_FILE9from cloudinit.sources import INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE
10from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJinja10from cloudinit.tests.helpers import CiTestCase, mock, skipUnlessJinja
11from cloudinit.util import ensure_dir, write_file11from cloudinit.util import ensure_dir, write_file
1212
@@ -63,6 +63,49 @@ class TestRender(CiTestCase):
63 'Missing instance-data.json file: %s' % json_file,63 'Missing instance-data.json file: %s' % json_file,
64 self.logs.getvalue())64 self.logs.getvalue())
6565
66 def test_handle_args_root_fallback_from_sensitive_instance_data(self):
67 """When root user defaults to sensitive.json."""
68 user_data = self.tmp_path('user-data', dir=self.tmp)
69 run_dir = self.tmp_path('run_dir', dir=self.tmp)
70 ensure_dir(run_dir)
71 paths = Paths({'run_dir': run_dir})
72 self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths')
73 self.m_paths.return_value = paths
74 args = self.args(
75 user_data=user_data, instance_data=None, debug=False)
76 with mock.patch('sys.stderr', new_callable=StringIO):
77 with mock.patch('os.getuid') as m_getuid:
78 m_getuid.return_value = 0
79 self.assertEqual(1, render.handle_args('anyname', args))
80 json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)
81 json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE)
82 self.assertIn(
83 'WARNING: Missing root-readable %s. Using redacted %s' % (
84 json_sensitive, json_file), self.logs.getvalue())
85 self.assertIn(
86 'ERROR: Missing instance-data.json file: %s' % json_file,
87 self.logs.getvalue())
88
89 def test_handle_args_root_uses_sensitive_instance_data(self):
90 """When root user, and no instance-data arg, use sensitive.json."""
91 user_data = self.tmp_path('user-data', dir=self.tmp)
92 write_file(user_data, '##template: jinja\nrendering: {{ my_var }}')
93 run_dir = self.tmp_path('run_dir', dir=self.tmp)
94 ensure_dir(run_dir)
95 json_sensitive = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE)
96 write_file(json_sensitive, '{"my-var": "jinja worked"}')
97 paths = Paths({'run_dir': run_dir})
98 self.add_patch('cloudinit.cmd.devel.render.read_cfg_paths', 'm_paths')
99 self.m_paths.return_value = paths
100 args = self.args(
101 user_data=user_data, instance_data=None, debug=False)
102 with mock.patch('sys.stderr', new_callable=StringIO):
103 with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
104 with mock.patch('os.getuid') as m_getuid:
105 m_getuid.return_value = 0
106 self.assertEqual(0, render.handle_args('anyname', args))
107 self.assertIn('rendering: jinja worked', m_stdout.getvalue())
108
66 @skipUnlessJinja()109 @skipUnlessJinja()
67 def test_handle_args_renders_instance_data_vars_in_template(self):110 def test_handle_args_renders_instance_data_vars_in_template(self):
68 """If user_data file is a jinja template render instance-data vars."""111 """If user_data file is a jinja template render instance-data vars."""
diff --git a/cloudinit/cmd/query.py b/cloudinit/cmd/query.py
index 7d2d4fe..1d888b9 100644
--- a/cloudinit/cmd/query.py
+++ b/cloudinit/cmd/query.py
@@ -3,6 +3,7 @@
3"""Query standardized instance metadata from the command line."""3"""Query standardized instance metadata from the command line."""
44
5import argparse5import argparse
6from errno import EACCES
6import os7import os
7import six8import six
8import sys9import sys
@@ -79,27 +80,38 @@ def handle_args(name, args):
79 uid = os.getuid()80 uid = os.getuid()
80 if not all([args.instance_data, args.user_data, args.vendor_data]):81 if not all([args.instance_data, args.user_data, args.vendor_data]):
81 paths = read_cfg_paths()82 paths = read_cfg_paths()
82 if not args.instance_data:83 if args.instance_data:
84 instance_data_fn = args.instance_data
85 else:
86 redacted_data_fn = os.path.join(paths.run_dir, INSTANCE_JSON_FILE)
83 if uid == 0:87 if uid == 0:
84 default_json_fn = INSTANCE_JSON_SENSITIVE_FILE88 sensitive_data_fn = os.path.join(
89 paths.run_dir, INSTANCE_JSON_SENSITIVE_FILE)
90 if os.path.exists(sensitive_data_fn):
91 instance_data_fn = sensitive_data_fn
92 else:
93 LOG.warning(
94 'Missing root-readable %s. Using redacted %s instead.',
95 sensitive_data_fn, redacted_data_fn)
96 instance_data_fn = redacted_data_fn
85 else:97 else:
86 default_json_fn = INSTANCE_JSON_FILE # World readable98 instance_data_fn = redacted_data_fn
87 instance_data_fn = os.path.join(paths.run_dir, default_json_fn)99 if args.user_data:
100 user_data_fn = args.user_data
88 else:101 else:
89 instance_data_fn = args.instance_data
90 if not args.user_data:
91 user_data_fn = os.path.join(paths.instance_link, 'user-data.txt')102 user_data_fn = os.path.join(paths.instance_link, 'user-data.txt')
103 if args.vendor_data:
104 vendor_data_fn = args.vendor_data
92 else:105 else:
93 user_data_fn = args.user_data
94 if not args.vendor_data:
95 vendor_data_fn = os.path.join(paths.instance_link, 'vendor-data.txt')106 vendor_data_fn = os.path.join(paths.instance_link, 'vendor-data.txt')
96 else:
97 vendor_data_fn = args.vendor_data
98107
99 try:108 try:
100 instance_json = util.load_file(instance_data_fn)109 instance_json = util.load_file(instance_data_fn)
101 except IOError:110 except (IOError, OSError) as e:
102 LOG.error('Missing instance-data.json file: %s', instance_data_fn)111 if e.errno == EACCES:
112 LOG.error("No read permission on '%s'. Try sudo", instance_data_fn)
113 else:
114 LOG.error('Missing instance-data file: %s', instance_data_fn)
103 return 1115 return 1
104116
105 instance_data = util.load_json(instance_json)117 instance_data = util.load_json(instance_json)
diff --git a/cloudinit/cmd/tests/test_query.py b/cloudinit/cmd/tests/test_query.py
index fb87c6a..28738b1 100644
--- a/cloudinit/cmd/tests/test_query.py
+++ b/cloudinit/cmd/tests/test_query.py
@@ -1,5 +1,6 @@
1# This file is part of cloud-init. See LICENSE file for license information.1# This file is part of cloud-init. See LICENSE file for license information.
22
3import errno
3from six import StringIO4from six import StringIO
4from textwrap import dedent5from textwrap import dedent
5import os6import os
@@ -7,7 +8,8 @@ import os
7from collections import namedtuple8from collections import namedtuple
8from cloudinit.cmd import query9from cloudinit.cmd import query
9from cloudinit.helpers import Paths10from cloudinit.helpers import Paths
10from cloudinit.sources import REDACT_SENSITIVE_VALUE, INSTANCE_JSON_FILE11from cloudinit.sources import (
12 REDACT_SENSITIVE_VALUE, INSTANCE_JSON_FILE, INSTANCE_JSON_SENSITIVE_FILE)
11from cloudinit.tests.helpers import CiTestCase, mock13from cloudinit.tests.helpers import CiTestCase, mock
12from cloudinit.util import ensure_dir, write_file14from cloudinit.util import ensure_dir, write_file
1315
@@ -50,10 +52,28 @@ class TestQuery(CiTestCase):
50 with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:52 with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
51 self.assertEqual(1, query.handle_args('anyname', args))53 self.assertEqual(1, query.handle_args('anyname', args))
52 self.assertIn(54 self.assertIn(
53 'ERROR: Missing instance-data.json file: %s' % absent_fn,55 'ERROR: Missing instance-data file: %s' % absent_fn,
54 self.logs.getvalue())56 self.logs.getvalue())
55 self.assertIn(57 self.assertIn(
56 'ERROR: Missing instance-data.json file: %s' % absent_fn,58 'ERROR: Missing instance-data file: %s' % absent_fn,
59 m_stderr.getvalue())
60
61 def test_handle_args_error_when_no_read_permission_instance_data(self):
62 """When instance_data file is unreadable, log an error."""
63 noread_fn = self.tmp_path('unreadable', dir=self.tmp)
64 write_file(noread_fn, 'thou shall not pass')
65 args = self.args(
66 debug=False, dump_all=True, format=None, instance_data=noread_fn,
67 list_keys=False, user_data='ud', vendor_data='vd', varname=None)
68 with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
69 with mock.patch('cloudinit.cmd.query.util.load_file') as m_load:
70 m_load.side_effect = OSError(errno.EACCES, 'Not allowed')
71 self.assertEqual(1, query.handle_args('anyname', args))
72 self.assertIn(
73 "ERROR: No read permission on '%s'. Try sudo" % noread_fn,
74 self.logs.getvalue())
75 self.assertIn(
76 "ERROR: No read permission on '%s'. Try sudo" % noread_fn,
57 m_stderr.getvalue())77 m_stderr.getvalue())
5878
59 def test_handle_args_defaults_instance_data(self):79 def test_handle_args_defaults_instance_data(self):
@@ -70,12 +90,58 @@ class TestQuery(CiTestCase):
70 self.assertEqual(1, query.handle_args('anyname', args))90 self.assertEqual(1, query.handle_args('anyname', args))
71 json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)91 json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)
72 self.assertIn(92 self.assertIn(
73 'ERROR: Missing instance-data.json file: %s' % json_file,93 'ERROR: Missing instance-data file: %s' % json_file,
74 self.logs.getvalue())94 self.logs.getvalue())
75 self.assertIn(95 self.assertIn(
76 'ERROR: Missing instance-data.json file: %s' % json_file,96 'ERROR: Missing instance-data file: %s' % json_file,
77 m_stderr.getvalue())97 m_stderr.getvalue())
7898
99 def test_handle_args_root_fallsback_to_instance_data(self):
100 """When no instance_data argument, root falls back to redacted json."""
101 args = self.args(
102 debug=False, dump_all=True, format=None, instance_data=None,
103 list_keys=False, user_data=None, vendor_data=None, varname=None)
104 run_dir = self.tmp_path('run_dir', dir=self.tmp)
105 ensure_dir(run_dir)
106 paths = Paths({'run_dir': run_dir})
107 self.add_patch('cloudinit.cmd.query.read_cfg_paths', 'm_paths')
108 self.m_paths.return_value = paths
109 with mock.patch('sys.stderr', new_callable=StringIO) as m_stderr:
110 with mock.patch('os.getuid') as m_getuid:
111 m_getuid.return_value = 0
112 self.assertEqual(1, query.handle_args('anyname', args))
113 json_file = os.path.join(run_dir, INSTANCE_JSON_FILE)
114 sensitive_file = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE)
115 self.assertIn(
116 'WARNING: Missing root-readable %s. Using redacted %s instead.' % (
117 sensitive_file, json_file),
118 m_stderr.getvalue())
119
120 def test_handle_args_root_uses_instance_sensitive_data(self):
121 """When no instance_data argument, root uses semsitive json."""
122 user_data = self.tmp_path('user-data', dir=self.tmp)
123 vendor_data = self.tmp_path('vendor-data', dir=self.tmp)
124 write_file(user_data, 'ud')
125 write_file(vendor_data, 'vd')
126 run_dir = self.tmp_path('run_dir', dir=self.tmp)
127 sensitive_file = os.path.join(run_dir, INSTANCE_JSON_SENSITIVE_FILE)
128 write_file(sensitive_file, '{"my-var": "it worked"}')
129 ensure_dir(run_dir)
130 paths = Paths({'run_dir': run_dir})
131 self.add_patch('cloudinit.cmd.query.read_cfg_paths', 'm_paths')
132 self.m_paths.return_value = paths
133 args = self.args(
134 debug=False, dump_all=True, format=None, instance_data=None,
135 list_keys=False, user_data=vendor_data, vendor_data=vendor_data,
136 varname=None)
137 with mock.patch('sys.stdout', new_callable=StringIO) as m_stdout:
138 with mock.patch('os.getuid') as m_getuid:
139 m_getuid.return_value = 0
140 self.assertEqual(0, query.handle_args('anyname', args))
141 self.assertEqual(
142 '{\n "my_var": "it worked",\n "userdata": "vd",\n '
143 '"vendordata": "vd"\n}\n', m_stdout.getvalue())
144
79 def test_handle_args_dumps_all_instance_data(self):145 def test_handle_args_dumps_all_instance_data(self):
80 """When --all is specified query will dump all instance data vars."""146 """When --all is specified query will dump all instance data vars."""
81 write_file(self.instance_data, '{"my-var": "it worked"}')147 write_file(self.instance_data, '{"my-var": "it worked"}')
diff --git a/cloudinit/config/cc_disk_setup.py b/cloudinit/config/cc_disk_setup.py
index 943089e..29e192e 100644
--- a/cloudinit/config/cc_disk_setup.py
+++ b/cloudinit/config/cc_disk_setup.py
@@ -743,7 +743,7 @@ def assert_and_settle_device(device):
743 util.udevadm_settle()743 util.udevadm_settle()
744 if not os.path.exists(device):744 if not os.path.exists(device):
745 raise RuntimeError("Device %s did not exist and was not created "745 raise RuntimeError("Device %s did not exist and was not created "
746 "with a udevamd settle." % device)746 "with a udevadm settle." % device)
747747
748 # Whether or not the device existed above, it is possible that udev748 # Whether or not the device existed above, it is possible that udev
749 # events that would populate udev database (for reading by lsdname) have749 # events that would populate udev database (for reading by lsdname) have
diff --git a/cloudinit/handlers/jinja_template.py b/cloudinit/handlers/jinja_template.py
index 3fa4097..ce3accf 100644
--- a/cloudinit/handlers/jinja_template.py
+++ b/cloudinit/handlers/jinja_template.py
@@ -1,5 +1,6 @@
1# This file is part of cloud-init. See LICENSE file for license information.1# This file is part of cloud-init. See LICENSE file for license information.
22
3from errno import EACCES
3import os4import os
4import re5import re
56
@@ -76,7 +77,14 @@ def render_jinja_payload_from_file(
76 raise RuntimeError(77 raise RuntimeError(
77 'Cannot render jinja template vars. Instance data not yet'78 'Cannot render jinja template vars. Instance data not yet'
78 ' present at %s' % instance_data_file)79 ' present at %s' % instance_data_file)
79 instance_data = load_json(load_file(instance_data_file))80 try:
81 instance_data = load_json(load_file(instance_data_file))
82 except (IOError, OSError) as e:
83 if e.errno == EACCES:
84 raise RuntimeError(
85 'Cannot render jinja template vars. No read permission on'
86 " '%s'. Try sudo" % instance_data_file)
87
80 rendered_payload = render_jinja_payload(88 rendered_payload = render_jinja_payload(
81 payload, payload_fn, instance_data, debug)89 payload, payload_fn, instance_data, debug)
82 if not rendered_payload:90 if not rendered_payload:
diff --git a/cloudinit/net/dhcp.py b/cloudinit/net/dhcp.py
index 12cf509..bdc5799 100644
--- a/cloudinit/net/dhcp.py
+++ b/cloudinit/net/dhcp.py
@@ -40,34 +40,56 @@ class EphemeralDHCPv4(object):
40 def __init__(self, iface=None):40 def __init__(self, iface=None):
41 self.iface = iface41 self.iface = iface
42 self._ephipv4 = None42 self._ephipv4 = None
43 self.lease = None
4344
44 def __enter__(self):45 def __enter__(self):
46 """Setup sandboxed dhcp context."""
47 return self.obtain_lease()
48
49 def __exit__(self, excp_type, excp_value, excp_traceback):
50 """Teardown sandboxed dhcp context."""
51 self.clean_network()
52
53 def clean_network(self):
54 """Exit _ephipv4 context to teardown of ip configuration performed."""
55 if self.lease:
56 self.lease = None
57 if not self._ephipv4:
58 return
59 self._ephipv4.__exit__(None, None, None)
60
61 def obtain_lease(self):
62 """Perform dhcp discovery in a sandboxed environment if possible.
63
64 @return: A dict representing dhcp options on the most recent lease
65 obtained from the dhclient discovery if run, otherwise an error
66 is raised.
67
68 @raises: NoDHCPLeaseError if no leases could be obtained.
69 """
70 if self.lease:
71 return self.lease
45 try:72 try:
46 leases = maybe_perform_dhcp_discovery(self.iface)73 leases = maybe_perform_dhcp_discovery(self.iface)
47 except InvalidDHCPLeaseFileError:74 except InvalidDHCPLeaseFileError:
48 raise NoDHCPLeaseError()75 raise NoDHCPLeaseError()
49 if not leases:76 if not leases:
50 raise NoDHCPLeaseError()77 raise NoDHCPLeaseError()
51 lease = leases[-1]78 self.lease = leases[-1]
52 LOG.debug("Received dhcp lease on %s for %s/%s",79 LOG.debug("Received dhcp lease on %s for %s/%s",
53 lease['interface'], lease['fixed-address'],80 self.lease['interface'], self.lease['fixed-address'],
54 lease['subnet-mask'])81 self.lease['subnet-mask'])
55 nmap = {'interface': 'interface', 'ip': 'fixed-address',82 nmap = {'interface': 'interface', 'ip': 'fixed-address',
56 'prefix_or_mask': 'subnet-mask',83 'prefix_or_mask': 'subnet-mask',
57 'broadcast': 'broadcast-address',84 'broadcast': 'broadcast-address',
58 'router': 'routers'}85 'router': 'routers'}
59 kwargs = dict([(k, lease.get(v)) for k, v in nmap.items()])86 kwargs = dict([(k, self.lease.get(v)) for k, v in nmap.items()])
60 if not kwargs['broadcast']:87 if not kwargs['broadcast']:
61 kwargs['broadcast'] = bcip(kwargs['prefix_or_mask'], kwargs['ip'])88 kwargs['broadcast'] = bcip(kwargs['prefix_or_mask'], kwargs['ip'])
62 ephipv4 = EphemeralIPv4Network(**kwargs)89 ephipv4 = EphemeralIPv4Network(**kwargs)
63 ephipv4.__enter__()90 ephipv4.__enter__()
64 self._ephipv4 = ephipv491 self._ephipv4 = ephipv4
65 return lease92 return self.lease
66
67 def __exit__(self, excp_type, excp_value, excp_traceback):
68 if not self._ephipv4:
69 return
70 self._ephipv4.__exit__(excp_type, excp_value, excp_traceback)
7193
7294
73def maybe_perform_dhcp_discovery(nic=None):95def maybe_perform_dhcp_discovery(nic=None):
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 39391d0..9e8a1a8 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -22,7 +22,7 @@ from cloudinit.event import EventType
22from cloudinit.net.dhcp import EphemeralDHCPv422from cloudinit.net.dhcp import EphemeralDHCPv4
23from cloudinit import sources23from cloudinit import sources
24from cloudinit.sources.helpers.azure import get_metadata_from_fabric24from cloudinit.sources.helpers.azure import get_metadata_from_fabric
25from cloudinit.url_helper import readurl, UrlError25from cloudinit.url_helper import UrlError, readurl, retry_on_url_exc
26from cloudinit import util26from cloudinit import util
2727
28LOG = logging.getLogger(__name__)28LOG = logging.getLogger(__name__)
@@ -57,7 +57,7 @@ IMDS_URL = "http://169.254.169.254/metadata/"
57# List of static scripts and network config artifacts created by57# List of static scripts and network config artifacts created by
58# stock ubuntu suported images.58# stock ubuntu suported images.
59UBUNTU_EXTENDED_NETWORK_SCRIPTS = [59UBUNTU_EXTENDED_NETWORK_SCRIPTS = [
60 '/etc/netplan/90-azure-hotplug.yaml',60 '/etc/netplan/90-hotplug-azure.yaml',
61 '/usr/local/sbin/ephemeral_eth.sh',61 '/usr/local/sbin/ephemeral_eth.sh',
62 '/etc/udev/rules.d/10-net-device-added.rules',62 '/etc/udev/rules.d/10-net-device-added.rules',
63 '/run/network/interfaces.ephemeral.d',63 '/run/network/interfaces.ephemeral.d',
@@ -207,7 +207,9 @@ BUILTIN_DS_CONFIG = {
207 },207 },
208 'disk_aliases': {'ephemeral0': RESOURCE_DISK_PATH},208 'disk_aliases': {'ephemeral0': RESOURCE_DISK_PATH},
209 'dhclient_lease_file': LEASE_FILE,209 'dhclient_lease_file': LEASE_FILE,
210 'apply_network_config': True, # Use IMDS published network configuration
210}211}
212# RELEASE_BLOCKER: Xenial and earlier apply_network_config default is False
211213
212BUILTIN_CLOUD_CONFIG = {214BUILTIN_CLOUD_CONFIG = {
213 'disk_setup': {215 'disk_setup': {
@@ -278,6 +280,7 @@ class DataSourceAzure(sources.DataSource):
278 self._network_config = None280 self._network_config = None
279 # Regenerate network config new_instance boot and every boot281 # Regenerate network config new_instance boot and every boot
280 self.update_events['network'].add(EventType.BOOT)282 self.update_events['network'].add(EventType.BOOT)
283 self._ephemeral_dhcp_ctx = None
281284
282 def __str__(self):285 def __str__(self):
283 root = sources.DataSource.__str__(self)286 root = sources.DataSource.__str__(self)
@@ -404,7 +407,8 @@ class DataSourceAzure(sources.DataSource):
404 LOG.warning("%s was not mountable", cdev)407 LOG.warning("%s was not mountable", cdev)
405 continue408 continue
406409
407 if reprovision or self._should_reprovision(ret):410 perform_reprovision = reprovision or self._should_reprovision(ret)
411 if perform_reprovision:
408 ret = self._reprovision()412 ret = self._reprovision()
409 imds_md = get_metadata_from_imds(413 imds_md = get_metadata_from_imds(
410 self.fallback_interface, retries=3)414 self.fallback_interface, retries=3)
@@ -432,6 +436,18 @@ class DataSourceAzure(sources.DataSource):
432 crawled_data['metadata']['random_seed'] = seed436 crawled_data['metadata']['random_seed'] = seed
433 crawled_data['metadata']['instance-id'] = util.read_dmi_data(437 crawled_data['metadata']['instance-id'] = util.read_dmi_data(
434 'system-uuid')438 'system-uuid')
439
440 if perform_reprovision:
441 LOG.info("Reporting ready to Azure after getting ReprovisionData")
442 use_cached_ephemeral = (net.is_up(self.fallback_interface) and
443 getattr(self, '_ephemeral_dhcp_ctx', None))
444 if use_cached_ephemeral:
445 self._report_ready(lease=self._ephemeral_dhcp_ctx.lease)
446 self._ephemeral_dhcp_ctx.clean_network() # Teardown ephemeral
447 else:
448 with EphemeralDHCPv4() as lease:
449 self._report_ready(lease=lease)
450
435 return crawled_data451 return crawled_data
436452
437 def _is_platform_viable(self):453 def _is_platform_viable(self):
@@ -458,7 +474,8 @@ class DataSourceAzure(sources.DataSource):
458 except sources.InvalidMetaDataException as e:474 except sources.InvalidMetaDataException as e:
459 LOG.warning('Could not crawl Azure metadata: %s', e)475 LOG.warning('Could not crawl Azure metadata: %s', e)
460 return False476 return False
461 if self.distro and self.distro.name == 'ubuntu':477 if (self.distro and self.distro.name == 'ubuntu' and
478 self.ds_cfg.get('apply_network_config')):
462 maybe_remove_ubuntu_network_config_scripts()479 maybe_remove_ubuntu_network_config_scripts()
463480
464 # Process crawled data and augment with various config defaults481 # Process crawled data and augment with various config defaults
@@ -509,32 +526,29 @@ class DataSourceAzure(sources.DataSource):
509 report_ready = bool(not os.path.isfile(REPORTED_READY_MARKER_FILE))526 report_ready = bool(not os.path.isfile(REPORTED_READY_MARKER_FILE))
510 LOG.debug("Start polling IMDS")527 LOG.debug("Start polling IMDS")
511528
512 def exc_cb(msg, exception):
513 if isinstance(exception, UrlError) and exception.code == 404:
514 return True
515 # If we get an exception while trying to call IMDS, we
516 # call DHCP and setup the ephemeral network to acquire the new IP.
517 return False
518
519 while True:529 while True:
520 try:530 try:
521 with EphemeralDHCPv4() as lease:531 # Save our EphemeralDHCPv4 context so we avoid repeated dhcp
522 if report_ready:532 self._ephemeral_dhcp_ctx = EphemeralDHCPv4()
523 path = REPORTED_READY_MARKER_FILE533 lease = self._ephemeral_dhcp_ctx.obtain_lease()
524 LOG.info(534 if report_ready:
525 "Creating a marker file to report ready: %s", path)535 path = REPORTED_READY_MARKER_FILE
526 util.write_file(path, "{pid}: {time}\n".format(536 LOG.info(
527 pid=os.getpid(), time=time()))537 "Creating a marker file to report ready: %s", path)
528 self._report_ready(lease=lease)538 util.write_file(path, "{pid}: {time}\n".format(
529 report_ready = False539 pid=os.getpid(), time=time()))
530 return readurl(url, timeout=1, headers=headers,540 self._report_ready(lease=lease)
531 exception_cb=exc_cb, infinite=True).contents541 report_ready = False
542 return readurl(url, timeout=1, headers=headers,
543 exception_cb=retry_on_url_exc, infinite=True,
544 log_req_resp=False).contents
532 except UrlError:545 except UrlError:
546 # Teardown our EphemeralDHCPv4 context on failure as we retry
547 self._ephemeral_dhcp_ctx.clean_network()
533 pass548 pass
534549
535 def _report_ready(self, lease):550 def _report_ready(self, lease):
536 """Tells the fabric provisioning has completed551 """Tells the fabric provisioning has completed """
537 before we go into our polling loop."""
538 try:552 try:
539 get_metadata_from_fabric(None, lease['unknown-245'])553 get_metadata_from_fabric(None, lease['unknown-245'])
540 except Exception:554 except Exception:
@@ -619,7 +633,11 @@ class DataSourceAzure(sources.DataSource):
619 the blacklisted devices.633 the blacklisted devices.
620 """634 """
621 if not self._network_config:635 if not self._network_config:
622 self._network_config = parse_network_config(self._metadata_imds)636 if self.ds_cfg.get('apply_network_config'):
637 nc_src = self._metadata_imds
638 else:
639 nc_src = None
640 self._network_config = parse_network_config(nc_src)
623 return self._network_config641 return self._network_config
624642
625643
@@ -700,7 +718,7 @@ def can_dev_be_reformatted(devpath, preserve_ntfs):
700 file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",718 file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
701 update_env_for_mount={'LANG': 'C'})719 update_env_for_mount={'LANG': 'C'})
702 except util.MountFailedError as e:720 except util.MountFailedError as e:
703 if "mount: unknown filesystem type 'ntfs'" in str(e):721 if "unknown filesystem type 'ntfs'" in str(e):
704 return True, (bmsg + ' but this system cannot mount NTFS,'722 return True, (bmsg + ' but this system cannot mount NTFS,'
705 ' assuming there are no important files.'723 ' assuming there are no important files.'
706 ' Formatting allowed.')724 ' Formatting allowed.')
@@ -1162,17 +1180,12 @@ def get_metadata_from_imds(fallback_nic, retries):
11621180
1163def _get_metadata_from_imds(retries):1181def _get_metadata_from_imds(retries):
11641182
1165 def retry_on_url_error(msg, exception):
1166 if isinstance(exception, UrlError) and exception.code == 404:
1167 return True # Continue retries
1168 return False # Stop retries on all other exceptions
1169
1170 url = IMDS_URL + "instance?api-version=2017-12-01"1183 url = IMDS_URL + "instance?api-version=2017-12-01"
1171 headers = {"Metadata": "true"}1184 headers = {"Metadata": "true"}
1172 try:1185 try:
1173 response = readurl(1186 response = readurl(
1174 url, timeout=1, headers=headers, retries=retries,1187 url, timeout=1, headers=headers, retries=retries,
1175 exception_cb=retry_on_url_error)1188 exception_cb=retry_on_url_exc)
1176 except Exception as e:1189 except Exception as e:
1177 LOG.debug('Ignoring IMDS instance metadata: %s', e)1190 LOG.debug('Ignoring IMDS instance metadata: %s', e)
1178 return {}1191 return {}
@@ -1195,7 +1208,7 @@ def maybe_remove_ubuntu_network_config_scripts(paths=None):
1195 additional interfaces which get attached by a customer at some point1208 additional interfaces which get attached by a customer at some point
1196 after initial boot. Since the Azure datasource can now regenerate1209 after initial boot. Since the Azure datasource can now regenerate
1197 network configuration as metadata reports these new devices, we no longer1210 network configuration as metadata reports these new devices, we no longer
1198 want the udev rules or netplan's 90-azure-hotplug.yaml to configure1211 want the udev rules or netplan's 90-hotplug-azure.yaml to configure
1199 networking on eth1 or greater as it might collide with cloud-init's1212 networking on eth1 or greater as it might collide with cloud-init's
1200 configuration.1213 configuration.
12011214
diff --git a/cloudinit/tests/test_url_helper.py b/cloudinit/tests/test_url_helper.py
index 113249d..aa9f3ec 100644
--- a/cloudinit/tests/test_url_helper.py
+++ b/cloudinit/tests/test_url_helper.py
@@ -1,10 +1,12 @@
1# This file is part of cloud-init. See LICENSE file for license information.1# This file is part of cloud-init. See LICENSE file for license information.
22
3from cloudinit.url_helper import oauth_headers, read_file_or_url3from cloudinit.url_helper import (
4 NOT_FOUND, UrlError, oauth_headers, read_file_or_url, retry_on_url_exc)
4from cloudinit.tests.helpers import CiTestCase, mock, skipIf5from cloudinit.tests.helpers import CiTestCase, mock, skipIf
5from cloudinit import util6from cloudinit import util
67
7import httpretty8import httpretty
9import requests
810
911
10try:12try:
@@ -64,3 +66,24 @@ class TestReadFileOrUrl(CiTestCase):
64 result = read_file_or_url(url)66 result = read_file_or_url(url)
65 self.assertEqual(result.contents, data)67 self.assertEqual(result.contents, data)
66 self.assertEqual(str(result), data.decode('utf-8'))68 self.assertEqual(str(result), data.decode('utf-8'))
69
70
71class TestRetryOnUrlExc(CiTestCase):
72
73 def test_do_not_retry_non_urlerror(self):
74 """When exception is not UrlError return False."""
75 myerror = IOError('something unexcpected')
76 self.assertFalse(retry_on_url_exc(msg='', exc=myerror))
77
78 def test_perform_retries_on_not_found(self):
79 """When exception is UrlError with a 404 status code return True."""
80 myerror = UrlError(cause=RuntimeError(
81 'something was not found'), code=NOT_FOUND)
82 self.assertTrue(retry_on_url_exc(msg='', exc=myerror))
83
84 def test_perform_retries_on_timeout(self):
85 """When exception is a requests.Timout return True."""
86 myerror = UrlError(cause=requests.Timeout('something timed out'))
87 self.assertTrue(retry_on_url_exc(msg='', exc=myerror))
88
89# vi: ts=4 expandtab
diff --git a/cloudinit/tests/test_util.py b/cloudinit/tests/test_util.py
index 749a384..e3d2dba 100644
--- a/cloudinit/tests/test_util.py
+++ b/cloudinit/tests/test_util.py
@@ -18,25 +18,51 @@ MOUNT_INFO = [
18]18]
1919
20OS_RELEASE_SLES = dedent("""\20OS_RELEASE_SLES = dedent("""\
21 NAME="SLES"\n21 NAME="SLES"
22 VERSION="12-SP3"\n22 VERSION="12-SP3"
23 VERSION_ID="12.3"\n23 VERSION_ID="12.3"
24 PRETTY_NAME="SUSE Linux Enterprise Server 12 SP3"\n24 PRETTY_NAME="SUSE Linux Enterprise Server 12 SP3"
25 ID="sles"\nANSI_COLOR="0;32"\n25 ID="sles"
26 CPE_NAME="cpe:/o:suse:sles:12:sp3"\n26 ANSI_COLOR="0;32"
27 CPE_NAME="cpe:/o:suse:sles:12:sp3"
27""")28""")
2829
29OS_RELEASE_OPENSUSE = dedent("""\30OS_RELEASE_OPENSUSE = dedent("""\
30NAME="openSUSE Leap"31 NAME="openSUSE Leap"
31VERSION="42.3"32 VERSION="42.3"
32ID=opensuse33 ID=opensuse
33ID_LIKE="suse"34 ID_LIKE="suse"
34VERSION_ID="42.3"35 VERSION_ID="42.3"
35PRETTY_NAME="openSUSE Leap 42.3"36 PRETTY_NAME="openSUSE Leap 42.3"
36ANSI_COLOR="0;32"37 ANSI_COLOR="0;32"
37CPE_NAME="cpe:/o:opensuse:leap:42.3"38 CPE_NAME="cpe:/o:opensuse:leap:42.3"
38BUG_REPORT_URL="https://bugs.opensuse.org"39 BUG_REPORT_URL="https://bugs.opensuse.org"
39HOME_URL="https://www.opensuse.org/"40 HOME_URL="https://www.opensuse.org/"
41""")
42
43OS_RELEASE_OPENSUSE_L15 = dedent("""\
44 NAME="openSUSE Leap"
45 VERSION="15.0"
46 ID="opensuse-leap"
47 ID_LIKE="suse opensuse"
48 VERSION_ID="15.0"
49 PRETTY_NAME="openSUSE Leap 15.0"
50 ANSI_COLOR="0;32"
51 CPE_NAME="cpe:/o:opensuse:leap:15.0"
52 BUG_REPORT_URL="https://bugs.opensuse.org"
53 HOME_URL="https://www.opensuse.org/"
54""")
55
56OS_RELEASE_OPENSUSE_TW = dedent("""\
57 NAME="openSUSE Tumbleweed"
58 ID="opensuse-tumbleweed"
59 ID_LIKE="opensuse suse"
60 VERSION_ID="20180920"
61 PRETTY_NAME="openSUSE Tumbleweed"
62 ANSI_COLOR="0;32"
63 CPE_NAME="cpe:/o:opensuse:tumbleweed:20180920"
64 BUG_REPORT_URL="https://bugs.opensuse.org"
65 HOME_URL="https://www.opensuse.org/"
40""")66""")
4167
42OS_RELEASE_CENTOS = dedent("""\68OS_RELEASE_CENTOS = dedent("""\
@@ -447,12 +473,35 @@ class TestGetLinuxDistro(CiTestCase):
447473
448 @mock.patch('cloudinit.util.load_file')474 @mock.patch('cloudinit.util.load_file')
449 def test_get_linux_opensuse(self, m_os_release, m_path_exists):475 def test_get_linux_opensuse(self, m_os_release, m_path_exists):
450 """Verify we get the correct name and machine arch on OpenSUSE."""476 """Verify we get the correct name and machine arch on openSUSE
477 prior to openSUSE Leap 15.
478 """
451 m_os_release.return_value = OS_RELEASE_OPENSUSE479 m_os_release.return_value = OS_RELEASE_OPENSUSE
452 m_path_exists.side_effect = TestGetLinuxDistro.os_release_exists480 m_path_exists.side_effect = TestGetLinuxDistro.os_release_exists
453 dist = util.get_linux_distro()481 dist = util.get_linux_distro()
454 self.assertEqual(('opensuse', '42.3', platform.machine()), dist)482 self.assertEqual(('opensuse', '42.3', platform.machine()), dist)
455483
484 @mock.patch('cloudinit.util.load_file')
485 def test_get_linux_opensuse_l15(self, m_os_release, m_path_exists):
486 """Verify we get the correct name and machine arch on openSUSE
487 for openSUSE Leap 15.0 and later.
488 """
489 m_os_release.return_value = OS_RELEASE_OPENSUSE_L15
490 m_path_exists.side_effect = TestGetLinuxDistro.os_release_exists
491 dist = util.get_linux_distro()
492 self.assertEqual(('opensuse-leap', '15.0', platform.machine()), dist)
493
494 @mock.patch('cloudinit.util.load_file')
495 def test_get_linux_opensuse_tw(self, m_os_release, m_path_exists):
496 """Verify we get the correct name and machine arch on openSUSE
497 for openSUSE Tumbleweed
498 """
499 m_os_release.return_value = OS_RELEASE_OPENSUSE_TW
500 m_path_exists.side_effect = TestGetLinuxDistro.os_release_exists
501 dist = util.get_linux_distro()
502 self.assertEqual(
503 ('opensuse-tumbleweed', '20180920', platform.machine()), dist)
504
456 @mock.patch('platform.dist')505 @mock.patch('platform.dist')
457 def test_get_linux_distro_no_data(self, m_platform_dist, m_path_exists):506 def test_get_linux_distro_no_data(self, m_platform_dist, m_path_exists):
458 """Verify we get no information if os-release does not exist"""507 """Verify we get no information if os-release does not exist"""
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index 8067979..396d69a 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -199,7 +199,7 @@ def _get_ssl_args(url, ssl_details):
199def readurl(url, data=None, timeout=None, retries=0, sec_between=1,199def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
200 headers=None, headers_cb=None, ssl_details=None,200 headers=None, headers_cb=None, ssl_details=None,
201 check_status=True, allow_redirects=True, exception_cb=None,201 check_status=True, allow_redirects=True, exception_cb=None,
202 session=None, infinite=False):202 session=None, infinite=False, log_req_resp=True):
203 url = _cleanurl(url)203 url = _cleanurl(url)
204 req_args = {204 req_args = {
205 'url': url,205 'url': url,
@@ -256,9 +256,11 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
256 continue256 continue
257 filtered_req_args[k] = v257 filtered_req_args[k] = v
258 try:258 try:
259 LOG.debug("[%s/%s] open '%s' with %s configuration", i,259
260 "infinite" if infinite else manual_tries, url,260 if log_req_resp:
261 filtered_req_args)261 LOG.debug("[%s/%s] open '%s' with %s configuration", i,
262 "infinite" if infinite else manual_tries, url,
263 filtered_req_args)
262264
263 if session is None:265 if session is None:
264 session = requests.Session()266 session = requests.Session()
@@ -294,8 +296,11 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
294 break296 break
295 if (infinite and sec_between > 0) or \297 if (infinite and sec_between > 0) or \
296 (i + 1 < manual_tries and sec_between > 0):298 (i + 1 < manual_tries and sec_between > 0):
297 LOG.debug("Please wait %s seconds while we wait to try again",299
298 sec_between)300 if log_req_resp:
301 LOG.debug(
302 "Please wait %s seconds while we wait to try again",
303 sec_between)
299 time.sleep(sec_between)304 time.sleep(sec_between)
300 if excps:305 if excps:
301 raise excps[-1]306 raise excps[-1]
@@ -549,4 +554,18 @@ def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret,
549 _uri, signed_headers, _body = client.sign(url)554 _uri, signed_headers, _body = client.sign(url)
550 return signed_headers555 return signed_headers
551556
557
558def retry_on_url_exc(msg, exc):
559 """readurl exception_cb that will retry on NOT_FOUND and Timeout.
560
561 Returns False to raise the exception from readurl, True to retry.
562 """
563 if not isinstance(exc, UrlError):
564 return False
565 if exc.code == NOT_FOUND:
566 return True
567 if exc.cause and isinstance(exc.cause, requests.Timeout):
568 return True
569 return False
570
552# vi: ts=4 expandtab571# vi: ts=4 expandtab
diff --git a/cloudinit/util.py b/cloudinit/util.py
index c67d6be..7800f7b 100644
--- a/cloudinit/util.py
+++ b/cloudinit/util.py
@@ -615,8 +615,8 @@ def get_linux_distro():
615 distro_name = os_release.get('ID', '')615 distro_name = os_release.get('ID', '')
616 distro_version = os_release.get('VERSION_ID', '')616 distro_version = os_release.get('VERSION_ID', '')
617 if 'sles' in distro_name or 'suse' in distro_name:617 if 'sles' in distro_name or 'suse' in distro_name:
618 # RELEASE_BLOCKER: We will drop this sles ivergent behavior in618 # RELEASE_BLOCKER: We will drop this sles divergent behavior in
619 # before 18.4 so that get_linux_distro returns a named tuple619 # the future so that get_linux_distro returns a named tuple
620 # which will include both version codename and architecture620 # which will include both version codename and architecture
621 # on all distributions.621 # on all distributions.
622 flavor = platform.machine()622 flavor = platform.machine()
@@ -668,7 +668,8 @@ def system_info():
668 var = 'ubuntu'668 var = 'ubuntu'
669 elif linux_dist == 'redhat':669 elif linux_dist == 'redhat':
670 var = 'rhel'670 var = 'rhel'
671 elif linux_dist in ('opensuse', 'sles'):671 elif linux_dist in (
672 'opensuse', 'opensuse-tumbleweed', 'opensuse-leap', 'sles'):
672 var = 'suse'673 var = 'suse'
673 else:674 else:
674 var = 'linux'675 var = 'linux'
diff --git a/debian/changelog b/debian/changelog
index 117fd16..a85c8cc 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,31 @@
1cloud-init (18.4-22-g6062595b-0ubuntu1) disco; urgency=medium
2
3 * New upstream snapshot.
4 - azure: retry imds polling on requests.Timeout (LP: #1800223)
5 - azure: Accept variation in error msg from mount for ntfs volumes
6 [Jason Zions] (LP: #1799338)
7 - azure: fix regression introduced when persisting ephemeral dhcp lease
8 [Aswin Rajamannar]
9 - azure: add udev rules to create cloud-init Gen2 disk name symlinks
10 (LP: #1797480)
11 - tests: ec2 mock missing httpretty user-data and instance-identity routes
12 - azure: remove /etc/netplan/90-hotplug-azure.yaml when net from IMDS
13 - azure: report ready to fabric after reprovision and reduce logging
14 [Aswin Rajamannar] (LP: #1799594)
15 - query: better error when missing read permission on instance-data
16 - instance-data: fallback to instance-data.json if sensitive is absent.
17 (LP: #1798189)
18 - docs: remove colon from network v1 config example. [Tomer Cohen]
19 - Add cloud-id binary to packages for SUSE [Jason Zions]
20 - systemd: On SUSE ensure cloud-init.service runs before wicked
21 [Robert Schweikert] (LP: #1799709)
22 - update detection of openSUSE variants [Robert Schweikert]
23 - azure: Add apply_network_config option to disable network from IMDS
24 (LP: #1798424)
25 - Correct spelling in an error message (udevadm). [Katie McLaughlin]
26
27 -- Chad Smith <chad.smith@canonical.com> Mon, 12 Nov 2018 20:33:12 -0700
28
1cloud-init (18.4-7-g4652b196-0ubuntu1) cosmic; urgency=medium29cloud-init (18.4-7-g4652b196-0ubuntu1) cosmic; urgency=medium
230
3 * New upstream snapshot.31 * New upstream snapshot.
diff --git a/doc/rtd/topics/datasources/azure.rst b/doc/rtd/topics/datasources/azure.rst
index 559011e..f73c369 100644
--- a/doc/rtd/topics/datasources/azure.rst
+++ b/doc/rtd/topics/datasources/azure.rst
@@ -57,6 +57,52 @@ in order to use waagent.conf with cloud-init, the following settings are recomme
57 ResourceDisk.MountPoint=/mnt57 ResourceDisk.MountPoint=/mnt
5858
5959
60Configuration
61-------------
62The following configuration can be set for the datasource in system
63configuration (in `/etc/cloud/cloud.cfg` or `/etc/cloud/cloud.cfg.d/`).
64
65The settings that may be configured are:
66
67 * **agent_command**: Either __builtin__ (default) or a command to run to getcw
68 metadata. If __builtin__, get metadata from walinuxagent. Otherwise run the
69 provided command to obtain metadata.
70 * **apply_network_config**: Boolean set to True to use network configuration
71 described by Azure's IMDS endpoint instead of fallback network config of
72 dhcp on eth0. Default is True. For Ubuntu 16.04 or earlier, default is False.
73 * **data_dir**: Path used to read metadata files and write crawled data.
74 * **dhclient_lease_file**: The fallback lease file to source when looking for
75 custom DHCP option 245 from Azure fabric.
76 * **disk_aliases**: A dictionary defining which device paths should be
77 interpreted as ephemeral images. See cc_disk_setup module for more info.
78 * **hostname_bounce**: A dictionary Azure hostname bounce behavior to react to
79 metadata changes.
80 * **hostname_bounce**: A dictionary Azure hostname bounce behavior to react to
81 metadata changes. Azure will throttle ifup/down in some cases after metadata
82 has been updated to inform dhcp server about updated hostnames.
83 * **set_hostname**: Boolean set to True when we want Azure to set the hostname
84 based on metadata.
85
86An example configuration with the default values is provided below:
87
88.. sourcecode:: yaml
89
90 datasource:
91 Azure:
92 agent_command: __builtin__
93 apply_network_config: true
94 data_dir: /var/lib/waagent
95 dhclient_lease_file: /var/lib/dhcp/dhclient.eth0.leases
96 disk_aliases:
97 ephemeral0: /dev/disk/cloud/azure_resource
98 hostname_bounce:
99 interface: eth0
100 command: builtin
101 policy: true
102 hostname_command: hostname
103 set_hostname: true
104
105
60Userdata106Userdata
61--------107--------
62Userdata is provided to cloud-init inside the ovf-env.xml file. Cloud-init108Userdata is provided to cloud-init inside the ovf-env.xml file. Cloud-init
diff --git a/packages/redhat/cloud-init.spec.in b/packages/redhat/cloud-init.spec.in
index a3a6d1e..6b2022b 100644
--- a/packages/redhat/cloud-init.spec.in
+++ b/packages/redhat/cloud-init.spec.in
@@ -191,6 +191,7 @@ fi
191191
192# Program binaries192# Program binaries
193%{_bindir}/cloud-init*193%{_bindir}/cloud-init*
194%{_bindir}/cloud-id*
194195
195# Docs196# Docs
196%doc LICENSE ChangeLog TODO.rst requirements.txt197%doc LICENSE ChangeLog TODO.rst requirements.txt
diff --git a/packages/suse/cloud-init.spec.in b/packages/suse/cloud-init.spec.in
index e781d74..26894b3 100644
--- a/packages/suse/cloud-init.spec.in
+++ b/packages/suse/cloud-init.spec.in
@@ -93,6 +93,7 @@ version_pys=$(cd "%{buildroot}" && find . -name version.py -type f)
9393
94# Program binaries94# Program binaries
95%{_bindir}/cloud-init*95%{_bindir}/cloud-init*
96%{_bindir}/cloud-id*
9697
97# systemd files98# systemd files
98/usr/lib/systemd/system-generators/*99/usr/lib/systemd/system-generators/*
diff --git a/systemd/cloud-init.service.tmpl b/systemd/cloud-init.service.tmpl
index b92e8ab..5cb0037 100644
--- a/systemd/cloud-init.service.tmpl
+++ b/systemd/cloud-init.service.tmpl
@@ -14,8 +14,7 @@ After=networking.service
14After=network.service14After=network.service
15{% endif %}15{% endif %}
16{% if variant in ["suse"] %}16{% if variant in ["suse"] %}
17Requires=wicked.service17Before=wicked.service
18After=wicked.service
19# setting hostname via hostnamectl depends on dbus, which otherwise18# setting hostname via hostnamectl depends on dbus, which otherwise
20# would not be guaranteed at this point.19# would not be guaranteed at this point.
21After=dbus.service20After=dbus.service
diff --git a/tests/unittests/test_builtin_handlers.py b/tests/unittests/test_builtin_handlers.py
index abe820e..b92ffc7 100644
--- a/tests/unittests/test_builtin_handlers.py
+++ b/tests/unittests/test_builtin_handlers.py
@@ -3,6 +3,7 @@
3"""Tests of the built-in user data handlers."""3"""Tests of the built-in user data handlers."""
44
5import copy5import copy
6import errno
6import os7import os
7import shutil8import shutil
8import tempfile9import tempfile
@@ -202,6 +203,30 @@ class TestJinjaTemplatePartHandler(CiTestCase):
202 os.path.exists(script_file),203 os.path.exists(script_file),
203 'Unexpected file created %s' % script_file)204 'Unexpected file created %s' % script_file)
204205
206 def test_jinja_template_handle_errors_on_unreadable_instance_data(self):
207 """If instance-data is unreadable, raise an error from handle_part."""
208 script_handler = ShellScriptPartHandler(self.paths)
209 instance_json = os.path.join(self.run_dir, 'instance-data.json')
210 util.write_file(instance_json, util.json_dumps({}))
211 h = JinjaTemplatePartHandler(
212 self.paths, sub_handlers=[script_handler])
213 with mock.patch(self.mpath + 'load_file') as m_load:
214 with self.assertRaises(RuntimeError) as context_manager:
215 m_load.side_effect = OSError(errno.EACCES, 'Not allowed')
216 h.handle_part(
217 data='data', ctype="!" + handlers.CONTENT_START,
218 filename='part01',
219 payload='## template: jinja \n#!/bin/bash\necho himom',
220 frequency='freq', headers='headers')
221 script_file = os.path.join(script_handler.script_dir, 'part01')
222 self.assertEqual(
223 'Cannot render jinja template vars. No read permission on'
224 " '{rdir}/instance-data.json'. Try sudo".format(rdir=self.run_dir),
225 str(context_manager.exception))
226 self.assertFalse(
227 os.path.exists(script_file),
228 'Unexpected file created %s' % script_file)
229
205 @skipUnlessJinja()230 @skipUnlessJinja()
206 def test_jinja_template_handle_renders_jinja_content(self):231 def test_jinja_template_handle_renders_jinja_content(self):
207 """When present, render jinja variables from instance-data.json."""232 """When present, render jinja variables from instance-data.json."""
diff --git a/tests/unittests/test_datasource/test_azure.py b/tests/unittests/test_datasource/test_azure.py
index 0f4b7bf..56484b2 100644
--- a/tests/unittests/test_datasource/test_azure.py
+++ b/tests/unittests/test_datasource/test_azure.py
@@ -17,6 +17,7 @@ import crypt
17import httpretty17import httpretty
18import json18import json
19import os19import os
20import requests
20import stat21import stat
21import xml.etree.ElementTree as ET22import xml.etree.ElementTree as ET
22import yaml23import yaml
@@ -184,6 +185,35 @@ class TestGetMetadataFromIMDS(HttprettyTestCase):
184 "Crawl of Azure Instance Metadata Service (IMDS) took", # log_time185 "Crawl of Azure Instance Metadata Service (IMDS) took", # log_time
185 self.logs.getvalue())186 self.logs.getvalue())
186187
188 @mock.patch('requests.Session.request')
189 @mock.patch('cloudinit.url_helper.time.sleep')
190 @mock.patch(MOCKPATH + 'net.is_up')
191 def test_get_metadata_from_imds_retries_on_timeout(
192 self, m_net_is_up, m_sleep, m_request):
193 """Retry IMDS network metadata on timeout errors."""
194
195 self.attempt = 0
196 m_request.side_effect = requests.Timeout('Fake Connection Timeout')
197
198 def retry_callback(request, uri, headers):
199 self.attempt += 1
200 raise requests.Timeout('Fake connection timeout')
201
202 httpretty.register_uri(
203 httpretty.GET,
204 dsaz.IMDS_URL + 'instance?api-version=2017-12-01',
205 body=retry_callback)
206
207 m_net_is_up.return_value = True # skips dhcp
208
209 self.assertEqual({}, dsaz.get_metadata_from_imds('eth9', retries=3))
210
211 m_net_is_up.assert_called_with('eth9')
212 self.assertEqual([mock.call(1)]*3, m_sleep.call_args_list)
213 self.assertIn(
214 "Crawl of Azure Instance Metadata Service (IMDS) took", # log_time
215 self.logs.getvalue())
216
187217
188class TestAzureDataSource(CiTestCase):218class TestAzureDataSource(CiTestCase):
189219
@@ -256,7 +286,8 @@ scbus-1 on xpt0 bus 0
256 ])286 ])
257 return dsaz287 return dsaz
258288
259 def _get_ds(self, data, agent_command=None, distro=None):289 def _get_ds(self, data, agent_command=None, distro=None,
290 apply_network=None):
260291
261 def dsdevs():292 def dsdevs():
262 return data.get('dsdevs', [])293 return data.get('dsdevs', [])
@@ -312,6 +343,8 @@ scbus-1 on xpt0 bus 0
312 data.get('sys_cfg', {}), distro=distro, paths=self.paths)343 data.get('sys_cfg', {}), distro=distro, paths=self.paths)
313 if agent_command is not None:344 if agent_command is not None:
314 dsrc.ds_cfg['agent_command'] = agent_command345 dsrc.ds_cfg['agent_command'] = agent_command
346 if apply_network is not None:
347 dsrc.ds_cfg['apply_network_config'] = apply_network
315348
316 return dsrc349 return dsrc
317350
@@ -434,14 +467,26 @@ fdescfs /dev/fd fdescfs rw 0 0
434467
435 def test_get_data_on_ubuntu_will_remove_network_scripts(self):468 def test_get_data_on_ubuntu_will_remove_network_scripts(self):
436 """get_data will remove ubuntu net scripts on Ubuntu distro."""469 """get_data will remove ubuntu net scripts on Ubuntu distro."""
470 sys_cfg = {'datasource': {'Azure': {'apply_network_config': True}}}
437 odata = {'HostName': "myhost", 'UserName': "myuser"}471 odata = {'HostName': "myhost", 'UserName': "myuser"}
438 data = {'ovfcontent': construct_valid_ovf_env(data=odata),472 data = {'ovfcontent': construct_valid_ovf_env(data=odata),
439 'sys_cfg': {}}473 'sys_cfg': sys_cfg}
440474
441 dsrc = self._get_ds(data, distro='ubuntu')475 dsrc = self._get_ds(data, distro='ubuntu')
442 dsrc.get_data()476 dsrc.get_data()
443 self.m_remove_ubuntu_network_scripts.assert_called_once_with()477 self.m_remove_ubuntu_network_scripts.assert_called_once_with()
444478
479 def test_get_data_on_ubuntu_will_not_remove_network_scripts_disabled(self):
480 """When apply_network_config false, do not remove scripts on Ubuntu."""
481 sys_cfg = {'datasource': {'Azure': {'apply_network_config': False}}}
482 odata = {'HostName': "myhost", 'UserName': "myuser"}
483 data = {'ovfcontent': construct_valid_ovf_env(data=odata),
484 'sys_cfg': sys_cfg}
485
486 dsrc = self._get_ds(data, distro='ubuntu')
487 dsrc.get_data()
488 self.m_remove_ubuntu_network_scripts.assert_not_called()
489
445 def test_crawl_metadata_returns_structured_data_and_caches_nothing(self):490 def test_crawl_metadata_returns_structured_data_and_caches_nothing(self):
446 """Return all structured metadata and cache no class attributes."""491 """Return all structured metadata and cache no class attributes."""
447 yaml_cfg = "{agent_command: my_command}\n"492 yaml_cfg = "{agent_command: my_command}\n"
@@ -498,6 +543,58 @@ fdescfs /dev/fd fdescfs rw 0 0
498 dsrc.crawl_metadata()543 dsrc.crawl_metadata()
499 self.assertEqual(str(cm.exception), error_msg)544 self.assertEqual(str(cm.exception), error_msg)
500545
546 @mock.patch('cloudinit.sources.DataSourceAzure.EphemeralDHCPv4')
547 @mock.patch('cloudinit.sources.DataSourceAzure.util.write_file')
548 @mock.patch(
549 'cloudinit.sources.DataSourceAzure.DataSourceAzure._report_ready')
550 @mock.patch('cloudinit.sources.DataSourceAzure.DataSourceAzure._poll_imds')
551 def test_crawl_metadata_on_reprovision_reports_ready(
552 self, poll_imds_func,
553 report_ready_func,
554 m_write, m_dhcp):
555 """If reprovisioning, report ready at the end"""
556 ovfenv = construct_valid_ovf_env(
557 platform_settings={"PreprovisionedVm": "True"})
558
559 data = {'ovfcontent': ovfenv,
560 'sys_cfg': {}}
561 dsrc = self._get_ds(data)
562 poll_imds_func.return_value = ovfenv
563 dsrc.crawl_metadata()
564 self.assertEqual(1, report_ready_func.call_count)
565
566 @mock.patch('cloudinit.sources.DataSourceAzure.util.write_file')
567 @mock.patch(
568 'cloudinit.sources.DataSourceAzure.DataSourceAzure._report_ready')
569 @mock.patch('cloudinit.net.dhcp.EphemeralIPv4Network')
570 @mock.patch('cloudinit.net.dhcp.maybe_perform_dhcp_discovery')
571 @mock.patch('cloudinit.sources.DataSourceAzure.readurl')
572 def test_crawl_metadata_on_reprovision_reports_ready_using_lease(
573 self, m_readurl, m_dhcp,
574 m_net, report_ready_func,
575 m_write):
576 """If reprovisioning, report ready using the obtained lease"""
577 ovfenv = construct_valid_ovf_env(
578 platform_settings={"PreprovisionedVm": "True"})
579
580 data = {'ovfcontent': ovfenv,
581 'sys_cfg': {}}
582 dsrc = self._get_ds(data)
583
584 lease = {
585 'interface': 'eth9', 'fixed-address': '192.168.2.9',
586 'routers': '192.168.2.1', 'subnet-mask': '255.255.255.0',
587 'unknown-245': '624c3620'}
588 m_dhcp.return_value = [lease]
589
590 reprovision_ovfenv = construct_valid_ovf_env()
591 m_readurl.return_value = url_helper.StringResponse(
592 reprovision_ovfenv.encode('utf-8'))
593
594 dsrc.crawl_metadata()
595 self.assertEqual(2, report_ready_func.call_count)
596 report_ready_func.assert_called_with(lease=lease)
597
501 def test_waagent_d_has_0700_perms(self):598 def test_waagent_d_has_0700_perms(self):
502 # we expect /var/lib/waagent to be created 0700599 # we expect /var/lib/waagent to be created 0700
503 dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})600 dsrc = self._get_ds({'ovfcontent': construct_valid_ovf_env()})
@@ -523,8 +620,10 @@ fdescfs /dev/fd fdescfs rw 0 0
523620
524 def test_network_config_set_from_imds(self):621 def test_network_config_set_from_imds(self):
525 """Datasource.network_config returns IMDS network data."""622 """Datasource.network_config returns IMDS network data."""
623 sys_cfg = {'datasource': {'Azure': {'apply_network_config': True}}}
526 odata = {}624 odata = {}
527 data = {'ovfcontent': construct_valid_ovf_env(data=odata)}625 data = {'ovfcontent': construct_valid_ovf_env(data=odata),
626 'sys_cfg': sys_cfg}
528 expected_network_config = {627 expected_network_config = {
529 'ethernets': {628 'ethernets': {
530 'eth0': {'set-name': 'eth0',629 'eth0': {'set-name': 'eth0',
@@ -803,9 +902,10 @@ fdescfs /dev/fd fdescfs rw 0 0
803 @mock.patch('cloudinit.net.generate_fallback_config')902 @mock.patch('cloudinit.net.generate_fallback_config')
804 def test_imds_network_config(self, mock_fallback):903 def test_imds_network_config(self, mock_fallback):
805 """Network config is generated from IMDS network data when present."""904 """Network config is generated from IMDS network data when present."""
905 sys_cfg = {'datasource': {'Azure': {'apply_network_config': True}}}
806 odata = {'HostName': "myhost", 'UserName': "myuser"}906 odata = {'HostName': "myhost", 'UserName': "myuser"}
807 data = {'ovfcontent': construct_valid_ovf_env(data=odata),907 data = {'ovfcontent': construct_valid_ovf_env(data=odata),
808 'sys_cfg': {}}908 'sys_cfg': sys_cfg}
809909
810 dsrc = self._get_ds(data)910 dsrc = self._get_ds(data)
811 ret = dsrc.get_data()911 ret = dsrc.get_data()
@@ -825,6 +925,36 @@ fdescfs /dev/fd fdescfs rw 0 0
825 @mock.patch('cloudinit.net.get_devicelist')925 @mock.patch('cloudinit.net.get_devicelist')
826 @mock.patch('cloudinit.net.device_driver')926 @mock.patch('cloudinit.net.device_driver')
827 @mock.patch('cloudinit.net.generate_fallback_config')927 @mock.patch('cloudinit.net.generate_fallback_config')
928 def test_imds_network_ignored_when_apply_network_config_false(
929 self, mock_fallback, mock_dd, mock_devlist, mock_get_mac):
930 """When apply_network_config is False, use fallback instead of IMDS."""
931 sys_cfg = {'datasource': {'Azure': {'apply_network_config': False}}}
932 odata = {'HostName': "myhost", 'UserName': "myuser"}
933 data = {'ovfcontent': construct_valid_ovf_env(data=odata),
934 'sys_cfg': sys_cfg}
935 fallback_config = {
936 'version': 1,
937 'config': [{
938 'type': 'physical', 'name': 'eth0',
939 'mac_address': '00:11:22:33:44:55',
940 'params': {'driver': 'hv_netsvc'},
941 'subnets': [{'type': 'dhcp'}],
942 }]
943 }
944 mock_fallback.return_value = fallback_config
945
946 mock_devlist.return_value = ['eth0']
947 mock_dd.return_value = ['hv_netsvc']
948 mock_get_mac.return_value = '00:11:22:33:44:55'
949
950 dsrc = self._get_ds(data)
951 self.assertTrue(dsrc.get_data())
952 self.assertEqual(dsrc.network_config, fallback_config)
953
954 @mock.patch('cloudinit.net.get_interface_mac')
955 @mock.patch('cloudinit.net.get_devicelist')
956 @mock.patch('cloudinit.net.device_driver')
957 @mock.patch('cloudinit.net.generate_fallback_config')
828 def test_fallback_network_config(self, mock_fallback, mock_dd,958 def test_fallback_network_config(self, mock_fallback, mock_dd,
829 mock_devlist, mock_get_mac):959 mock_devlist, mock_get_mac):
830 """On absent IMDS network data, generate network fallback config."""960 """On absent IMDS network data, generate network fallback config."""
@@ -1411,21 +1541,20 @@ class TestCanDevBeReformatted(CiTestCase):
1411 '/dev/sda1': {'num': 1, 'fs': 'ntfs', 'files': []}1541 '/dev/sda1': {'num': 1, 'fs': 'ntfs', 'files': []}
1412 }}})1542 }}})
14131543
1414 err = ("Unexpected error while running command.\n",1544 error_msgs = [
1415 "Command: ['mount', '-o', 'ro,sync', '-t', 'auto', ",1545 "Stderr: mount: unknown filesystem type 'ntfs'", # RHEL
1416 "'/dev/sda1', '/fake-tmp/dir']\n"1546 "Stderr: mount: /dev/sdb1: unknown filesystem type 'ntfs'" # SLES
1417 "Exit code: 32\n"1547 ]
1418 "Reason: -\n"1548
1419 "Stdout: -\n"1549 for err_msg in error_msgs:
1420 "Stderr: mount: unknown filesystem type 'ntfs'")1550 self.m_mount_cb.side_effect = MountFailedError(
1421 self.m_mount_cb.side_effect = MountFailedError(1551 "Failed mounting %s to %s due to: \nUnexpected.\n%s" %
1422 'Failed mounting %s to %s due to: %s' %1552 ('/dev/sda', '/fake-tmp/dir', err_msg))
1423 ('/dev/sda', '/fake-tmp/dir', err))1553
14241554 value, msg = dsaz.can_dev_be_reformatted('/dev/sda',
1425 value, msg = dsaz.can_dev_be_reformatted('/dev/sda',1555 preserve_ntfs=False)
1426 preserve_ntfs=False)1556 self.assertTrue(value)
1427 self.assertTrue(value)1557 self.assertIn('cannot mount NTFS, assuming', msg)
1428 self.assertIn('cannot mount NTFS, assuming', msg)
14291558
1430 def test_never_destroy_ntfs_config_false(self):1559 def test_never_destroy_ntfs_config_false(self):
1431 """Normally formattable situation with never_destroy_ntfs set."""1560 """Normally formattable situation with never_destroy_ntfs set."""
diff --git a/tests/unittests/test_datasource/test_ec2.py b/tests/unittests/test_datasource/test_ec2.py
index 9f81255..1a5956d 100644
--- a/tests/unittests/test_datasource/test_ec2.py
+++ b/tests/unittests/test_datasource/test_ec2.py
@@ -211,9 +211,9 @@ class TestEc2(test_helpers.HttprettyTestCase):
211 self.metadata_addr = self.datasource.metadata_urls[0]211 self.metadata_addr = self.datasource.metadata_urls[0]
212 self.tmp = self.tmp_dir()212 self.tmp = self.tmp_dir()
213213
214 def data_url(self, version):214 def data_url(self, version, data_item='meta-data'):
215 """Return a metadata url based on the version provided."""215 """Return a metadata url based on the version provided."""
216 return '/'.join([self.metadata_addr, version, 'meta-data', ''])216 return '/'.join([self.metadata_addr, version, data_item])
217217
218 def _patch_add_cleanup(self, mpath, *args, **kwargs):218 def _patch_add_cleanup(self, mpath, *args, **kwargs):
219 p = mock.patch(mpath, *args, **kwargs)219 p = mock.patch(mpath, *args, **kwargs)
@@ -238,10 +238,18 @@ class TestEc2(test_helpers.HttprettyTestCase):
238 all_versions = (238 all_versions = (
239 [ds.min_metadata_version] + ds.extended_metadata_versions)239 [ds.min_metadata_version] + ds.extended_metadata_versions)
240 for version in all_versions:240 for version in all_versions:
241 metadata_url = self.data_url(version)241 metadata_url = self.data_url(version) + '/'
242 if version == md_version:242 if version == md_version:
243 # Register all metadata for desired version243 # Register all metadata for desired version
244 register_mock_metaserver(metadata_url, md)244 register_mock_metaserver(
245 metadata_url, md.get('md', DEFAULT_METADATA))
246 userdata_url = self.data_url(
247 version, data_item='user-data')
248 register_mock_metaserver(userdata_url, md.get('ud', ''))
249 identity_url = self.data_url(
250 version, data_item='dynamic/instance-identity')
251 register_mock_metaserver(
252 identity_url, md.get('id', DYNAMIC_METADATA))
245 else:253 else:
246 instance_id_url = metadata_url + 'instance-id'254 instance_id_url = metadata_url + 'instance-id'
247 if version == ds.min_metadata_version:255 if version == ds.min_metadata_version:
@@ -261,7 +269,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
261 ds = self._setup_ds(269 ds = self._setup_ds(
262 platform_data=self.valid_platform_data,270 platform_data=self.valid_platform_data,
263 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},271 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
264 md=DEFAULT_METADATA)272 md={'md': DEFAULT_METADATA})
265 find_fallback_path = (273 find_fallback_path = (
266 'cloudinit.sources.DataSourceEc2.net.find_fallback_nic')274 'cloudinit.sources.DataSourceEc2.net.find_fallback_nic')
267 with mock.patch(find_fallback_path) as m_find_fallback:275 with mock.patch(find_fallback_path) as m_find_fallback:
@@ -293,7 +301,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
293 ds = self._setup_ds(301 ds = self._setup_ds(
294 platform_data=self.valid_platform_data,302 platform_data=self.valid_platform_data,
295 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},303 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
296 md=DEFAULT_METADATA)304 md={'md': DEFAULT_METADATA})
297 find_fallback_path = (305 find_fallback_path = (
298 'cloudinit.sources.DataSourceEc2.net.find_fallback_nic')306 'cloudinit.sources.DataSourceEc2.net.find_fallback_nic')
299 with mock.patch(find_fallback_path) as m_find_fallback:307 with mock.patch(find_fallback_path) as m_find_fallback:
@@ -322,7 +330,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
322 ds = self._setup_ds(330 ds = self._setup_ds(
323 platform_data=self.valid_platform_data,331 platform_data=self.valid_platform_data,
324 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},332 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
325 md=DEFAULT_METADATA)333 md={'md': DEFAULT_METADATA})
326 ds._network_config = {'cached': 'data'}334 ds._network_config = {'cached': 'data'}
327 self.assertEqual({'cached': 'data'}, ds.network_config)335 self.assertEqual({'cached': 'data'}, ds.network_config)
328336
@@ -338,7 +346,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
338 ds = self._setup_ds(346 ds = self._setup_ds(
339 platform_data=self.valid_platform_data,347 platform_data=self.valid_platform_data,
340 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},348 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
341 md=old_metadata)349 md={'md': old_metadata})
342 self.assertTrue(ds.get_data())350 self.assertTrue(ds.get_data())
343 # Provide new revision of metadata that contains network data351 # Provide new revision of metadata that contains network data
344 register_mock_metaserver(352 register_mock_metaserver(
@@ -372,7 +380,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
372 ds = self._setup_ds(380 ds = self._setup_ds(
373 platform_data=self.valid_platform_data,381 platform_data=self.valid_platform_data,
374 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},382 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
375 md=DEFAULT_METADATA)383 md={'md': DEFAULT_METADATA})
376 # Mock 404s on all versions except latest384 # Mock 404s on all versions except latest
377 all_versions = (385 all_versions = (
378 [ds.min_metadata_version] + ds.extended_metadata_versions)386 [ds.min_metadata_version] + ds.extended_metadata_versions)
@@ -399,7 +407,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
399 ds = self._setup_ds(407 ds = self._setup_ds(
400 platform_data=self.valid_platform_data,408 platform_data=self.valid_platform_data,
401 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},409 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
402 md=DEFAULT_METADATA)410 md={'md': DEFAULT_METADATA})
403 ret = ds.get_data()411 ret = ds.get_data()
404 self.assertTrue(ret)412 self.assertTrue(ret)
405 self.assertEqual(0, m_dhcp.call_count)413 self.assertEqual(0, m_dhcp.call_count)
@@ -412,7 +420,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
412 ds = self._setup_ds(420 ds = self._setup_ds(
413 platform_data=self.valid_platform_data,421 platform_data=self.valid_platform_data,
414 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},422 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
415 md=DEFAULT_METADATA)423 md={'md': DEFAULT_METADATA})
416 ret = ds.get_data()424 ret = ds.get_data()
417 self.assertTrue(ret)425 self.assertTrue(ret)
418426
@@ -422,7 +430,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
422 ds = self._setup_ds(430 ds = self._setup_ds(
423 platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},431 platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},
424 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},432 sys_cfg={'datasource': {'Ec2': {'strict_id': True}}},
425 md=DEFAULT_METADATA)433 md={'md': DEFAULT_METADATA})
426 ret = ds.get_data()434 ret = ds.get_data()
427 self.assertFalse(ret)435 self.assertFalse(ret)
428436
@@ -432,7 +440,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
432 ds = self._setup_ds(440 ds = self._setup_ds(
433 platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},441 platform_data={'uuid': uuid, 'uuid_source': 'dmi', 'serial': ''},
434 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},442 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
435 md=DEFAULT_METADATA)443 md={'md': DEFAULT_METADATA})
436 ret = ds.get_data()444 ret = ds.get_data()
437 self.assertTrue(ret)445 self.assertTrue(ret)
438446
@@ -442,7 +450,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
442 ds = self._setup_ds(450 ds = self._setup_ds(
443 platform_data=self.valid_platform_data,451 platform_data=self.valid_platform_data,
444 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},452 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
445 md=DEFAULT_METADATA)453 md={'md': DEFAULT_METADATA})
446 platform_attrs = [454 platform_attrs = [
447 attr for attr in ec2.CloudNames.__dict__.keys()455 attr for attr in ec2.CloudNames.__dict__.keys()
448 if not attr.startswith('__')]456 if not attr.startswith('__')]
@@ -469,7 +477,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
469 ds = self._setup_ds(477 ds = self._setup_ds(
470 platform_data=self.valid_platform_data,478 platform_data=self.valid_platform_data,
471 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},479 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
472 md=DEFAULT_METADATA)480 md={'md': DEFAULT_METADATA})
473 ret = ds.get_data()481 ret = ds.get_data()
474 self.assertFalse(ret)482 self.assertFalse(ret)
475 self.assertIn(483 self.assertIn(
@@ -499,7 +507,7 @@ class TestEc2(test_helpers.HttprettyTestCase):
499 ds = self._setup_ds(507 ds = self._setup_ds(
500 platform_data=self.valid_platform_data,508 platform_data=self.valid_platform_data,
501 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},509 sys_cfg={'datasource': {'Ec2': {'strict_id': False}}},
502 md=DEFAULT_METADATA)510 md={'md': DEFAULT_METADATA})
503511
504 ret = ds.get_data()512 ret = ds.get_data()
505 self.assertTrue(ret)513 self.assertTrue(ret)
diff --git a/udev/66-azure-ephemeral.rules b/udev/66-azure-ephemeral.rules
index b9c5c3e..3032f7e 100644
--- a/udev/66-azure-ephemeral.rules
+++ b/udev/66-azure-ephemeral.rules
@@ -4,10 +4,26 @@ SUBSYSTEM!="block", GOTO="cloud_init_end"
4ATTRS{ID_VENDOR}!="Msft", GOTO="cloud_init_end"4ATTRS{ID_VENDOR}!="Msft", GOTO="cloud_init_end"
5ATTRS{ID_MODEL}!="Virtual_Disk", GOTO="cloud_init_end"5ATTRS{ID_MODEL}!="Virtual_Disk", GOTO="cloud_init_end"
66
7# Root has a GUID of 0000 as the second value7# Root has a GUID of 0000 as the second value on Gen1 instances
8# The resource/resource has GUID of 0001 as the second value8# The resource/resource has GUID of 0001 as the second value
9ATTRS{device_id}=="?00000000-0000-*", ENV{fabric_name}="azure_root", GOTO="ci_azure_names"9ATTRS{device_id}=="?00000000-0000-*", ENV{fabric_name}="azure_root", GOTO="ci_azure_names"
10ATTRS{device_id}=="?00000000-0001-*", ENV{fabric_name}="azure_resource", GOTO="ci_azure_names"10ATTRS{device_id}=="?00000000-0001-*", ENV{fabric_name}="azure_resource", GOTO="ci_azure_names"
11
12# Azure well known SCSI controllers on Gen2 instances
13ATTRS{device_id}=="{f8b3781a-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi0", GOTO="azure_datadisk"
14# Do not create symlinks for scsi[1-3] or unmatched device_ids
15ATTRS{device_id}=="{f8b3781b-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi1", GOTO="cloud_init_end"
16ATTRS{device_id}=="{f8b3781c-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi2", GOTO="cloud_init_end"
17ATTRS{device_id}=="{f8b3781d-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi3", GOTO="cloud_init_end"
18GOTO="cloud_init_end"
19
20# Map scsi#/lun# fabric_name to azure_root|resource on Gen2 instances
21LABEL="azure_datadisk"
22ENV{DEVTYPE}=="partition", PROGRAM="/bin/sh -c 'readlink /sys/class/block/%k/../device|cut -d: -f4'", ENV{fabric_name}="$env{fabric_scsi_controller}/lun$result"
23ENV{DEVTYPE}=="disk", PROGRAM="/bin/sh -c 'readlink /sys/class/block/%k/device|cut -d: -f4'", ENV{fabric_name}="$env{fabric_scsi_controller}/lun$result"
24
25ENV{fabric_name}=="scsi0/lun0", ENV{fabric_name}="azure_root", GOTO="ci_azure_names"
26ENV{fabric_name}=="scsi0/lun1", ENV{fabric_name}="azure_resource", GOTO="ci_azure_names"
11GOTO="cloud_init_end"27GOTO="cloud_init_end"
1228
13# Create the symlinks29# Create the symlinks

Subscribers

People subscribed via source and target branches