Merge ~chad.smith/cloud-init:ubuntu/artful into cloud-init:ubuntu/artful
- Git
- lp:~chad.smith/cloud-init
- ubuntu/artful
- Merge into ubuntu/artful
Proposed by
Chad Smith
Status: | Merged |
---|---|
Merged at revision: | a9968540aa273bee10b0ca040133c01aa3459793 |
Proposed branch: | ~chad.smith/cloud-init:ubuntu/artful |
Merge into: | cloud-init:ubuntu/artful |
Diff against target: |
848 lines (+260/-107) 21 files modified
cloudinit/config/cc_lxd.py (+1/-1) cloudinit/config/cc_ntp.py (+3/-1) cloudinit/config/cc_resizefs.py (+13/-30) cloudinit/config/cc_users_groups.py (+2/-1) cloudinit/config/schema.py (+1/-1) debian/changelog (+16/-0) doc/examples/cloud-config-user-groups.txt (+3/-3) tests/cloud_tests/testcases/__init__.py (+7/-0) tests/cloud_tests/testcases/base.py (+8/-4) tests/cloud_tests/testcases/examples/including_user_groups.py (+6/-0) tests/cloud_tests/testcases/examples/including_user_groups.yaml (+5/-2) tests/cloud_tests/testcases/main/command_output_simple.py (+16/-0) tests/cloud_tests/testcases/modules/ntp.yaml (+2/-2) tests/cloud_tests/testcases/modules/user_groups.py (+6/-0) tests/cloud_tests/testcases/modules/user_groups.yaml (+5/-2) tests/unittests/test_handler/test_handler_lxd.py (+8/-8) tests/unittests/test_handler/test_handler_ntp.py (+12/-11) tests/unittests/test_handler/test_handler_resizefs.py (+57/-34) tests/unittests/test_handler/test_schema.py (+36/-1) tools/read-dependencies (+36/-5) tools/run-centos (+17/-1) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Server Team CI bot | continuous-integration | Approve | |
Scott Moser | Pending | ||
Review via email: mp+332673@code.launchpad.net |
Commit message
Description of the change
Upstream snapshot into Artful for SRU
To post a comment you must log in.
Revision history for this message
Server Team CI bot (server-team-bot) wrote : | # |
review:
Approve
(continuous-integration)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/cloudinit/config/cc_lxd.py b/cloudinit/config/cc_lxd.py | |||
2 | index e6262f8..09374d2 100644 | |||
3 | --- a/cloudinit/config/cc_lxd.py | |||
4 | +++ b/cloudinit/config/cc_lxd.py | |||
5 | @@ -72,7 +72,7 @@ def handle(name, cfg, cloud, log, args): | |||
6 | 72 | type(init_cfg)) | 72 | type(init_cfg)) |
7 | 73 | init_cfg = {} | 73 | init_cfg = {} |
8 | 74 | 74 | ||
10 | 75 | bridge_cfg = lxd_cfg.get('bridge') | 75 | bridge_cfg = lxd_cfg.get('bridge', {}) |
11 | 76 | if not isinstance(bridge_cfg, dict): | 76 | if not isinstance(bridge_cfg, dict): |
12 | 77 | log.warn("lxd/bridge config must be a dictionary. found a '%s'", | 77 | log.warn("lxd/bridge config must be a dictionary. found a '%s'", |
13 | 78 | type(bridge_cfg)) | 78 | type(bridge_cfg)) |
14 | diff --git a/cloudinit/config/cc_ntp.py b/cloudinit/config/cc_ntp.py | |||
15 | index 15ae1ec..d43d060 100644 | |||
16 | --- a/cloudinit/config/cc_ntp.py | |||
17 | +++ b/cloudinit/config/cc_ntp.py | |||
18 | @@ -100,7 +100,9 @@ def handle(name, cfg, cloud, log, _args): | |||
19 | 100 | LOG.debug( | 100 | LOG.debug( |
20 | 101 | "Skipping module named %s, not present or disabled by cfg", name) | 101 | "Skipping module named %s, not present or disabled by cfg", name) |
21 | 102 | return | 102 | return |
23 | 103 | ntp_cfg = cfg.get('ntp', {}) | 103 | ntp_cfg = cfg['ntp'] |
24 | 104 | if ntp_cfg is None: | ||
25 | 105 | ntp_cfg = {} # Allow empty config which will install the package | ||
26 | 104 | 106 | ||
27 | 105 | # TODO drop this when validate_cloudconfig_schema is strict=True | 107 | # TODO drop this when validate_cloudconfig_schema is strict=True |
28 | 106 | if not isinstance(ntp_cfg, (dict)): | 108 | if not isinstance(ntp_cfg, (dict)): |
29 | diff --git a/cloudinit/config/cc_resizefs.py b/cloudinit/config/cc_resizefs.py | |||
30 | index f774baa..0d282e6 100644 | |||
31 | --- a/cloudinit/config/cc_resizefs.py | |||
32 | +++ b/cloudinit/config/cc_resizefs.py | |||
33 | @@ -145,25 +145,6 @@ RESIZE_FS_PRECHECK_CMDS = { | |||
34 | 145 | } | 145 | } |
35 | 146 | 146 | ||
36 | 147 | 147 | ||
37 | 148 | def rootdev_from_cmdline(cmdline): | ||
38 | 149 | found = None | ||
39 | 150 | for tok in cmdline.split(): | ||
40 | 151 | if tok.startswith("root="): | ||
41 | 152 | found = tok[5:] | ||
42 | 153 | break | ||
43 | 154 | if found is None: | ||
44 | 155 | return None | ||
45 | 156 | |||
46 | 157 | if found.startswith("/dev/"): | ||
47 | 158 | return found | ||
48 | 159 | if found.startswith("LABEL="): | ||
49 | 160 | return "/dev/disk/by-label/" + found[len("LABEL="):] | ||
50 | 161 | if found.startswith("UUID="): | ||
51 | 162 | return "/dev/disk/by-uuid/" + found[len("UUID="):] | ||
52 | 163 | |||
53 | 164 | return "/dev/" + found | ||
54 | 165 | |||
55 | 166 | |||
56 | 167 | def can_skip_resize(fs_type, resize_what, devpth): | 148 | def can_skip_resize(fs_type, resize_what, devpth): |
57 | 168 | fstype_lc = fs_type.lower() | 149 | fstype_lc = fs_type.lower() |
58 | 169 | for i, func in RESIZE_FS_PRECHECK_CMDS.items(): | 150 | for i, func in RESIZE_FS_PRECHECK_CMDS.items(): |
59 | @@ -172,14 +153,15 @@ def can_skip_resize(fs_type, resize_what, devpth): | |||
60 | 172 | return False | 153 | return False |
61 | 173 | 154 | ||
62 | 174 | 155 | ||
65 | 175 | def is_device_path_writable_block(devpath, info, log): | 156 | def maybe_get_writable_device_path(devpath, info, log): |
66 | 176 | """Return True if devpath is a writable block device. | 157 | """Return updated devpath if the devpath is a writable block device. |
67 | 177 | 158 | ||
69 | 178 | @param devpath: Path to the root device we want to resize. | 159 | @param devpath: Requested path to the root device we want to resize. |
70 | 179 | @param info: String representing information about the requested device. | 160 | @param info: String representing information about the requested device. |
71 | 180 | @param log: Logger to which logs will be added upon error. | 161 | @param log: Logger to which logs will be added upon error. |
72 | 181 | 162 | ||
74 | 182 | @returns Boolean True if block device is writable | 163 | @returns devpath or updated devpath per kernel commandline if the device |
75 | 164 | path is a writable block device, returns None otherwise. | ||
76 | 183 | """ | 165 | """ |
77 | 184 | container = util.is_container() | 166 | container = util.is_container() |
78 | 185 | 167 | ||
79 | @@ -189,12 +171,12 @@ def is_device_path_writable_block(devpath, info, log): | |||
80 | 189 | devpath = util.rootdev_from_cmdline(util.get_cmdline()) | 171 | devpath = util.rootdev_from_cmdline(util.get_cmdline()) |
81 | 190 | if devpath is None: | 172 | if devpath is None: |
82 | 191 | log.warn("Unable to find device '/dev/root'") | 173 | log.warn("Unable to find device '/dev/root'") |
84 | 192 | return False | 174 | return None |
85 | 193 | log.debug("Converted /dev/root to '%s' per kernel cmdline", devpath) | 175 | log.debug("Converted /dev/root to '%s' per kernel cmdline", devpath) |
86 | 194 | 176 | ||
87 | 195 | if devpath == 'overlayroot': | 177 | if devpath == 'overlayroot': |
88 | 196 | log.debug("Not attempting to resize devpath '%s': %s", devpath, info) | 178 | log.debug("Not attempting to resize devpath '%s': %s", devpath, info) |
90 | 197 | return False | 179 | return None |
91 | 198 | 180 | ||
92 | 199 | try: | 181 | try: |
93 | 200 | statret = os.stat(devpath) | 182 | statret = os.stat(devpath) |
94 | @@ -207,7 +189,7 @@ def is_device_path_writable_block(devpath, info, log): | |||
95 | 207 | devpath, info) | 189 | devpath, info) |
96 | 208 | else: | 190 | else: |
97 | 209 | raise exc | 191 | raise exc |
99 | 210 | return False | 192 | return None |
100 | 211 | 193 | ||
101 | 212 | if not stat.S_ISBLK(statret.st_mode) and not stat.S_ISCHR(statret.st_mode): | 194 | if not stat.S_ISBLK(statret.st_mode) and not stat.S_ISCHR(statret.st_mode): |
102 | 213 | if container: | 195 | if container: |
103 | @@ -216,8 +198,8 @@ def is_device_path_writable_block(devpath, info, log): | |||
104 | 216 | else: | 198 | else: |
105 | 217 | log.warn("device '%s' not a block device. cannot resize: %s" % | 199 | log.warn("device '%s' not a block device. cannot resize: %s" % |
106 | 218 | (devpath, info)) | 200 | (devpath, info)) |
109 | 219 | return False | 201 | return None |
110 | 220 | return True | 202 | return devpath # The writable block devpath |
111 | 221 | 203 | ||
112 | 222 | 204 | ||
113 | 223 | def handle(name, cfg, _cloud, log, args): | 205 | def handle(name, cfg, _cloud, log, args): |
114 | @@ -242,8 +224,9 @@ def handle(name, cfg, _cloud, log, args): | |||
115 | 242 | info = "dev=%s mnt_point=%s path=%s" % (devpth, mount_point, resize_what) | 224 | info = "dev=%s mnt_point=%s path=%s" % (devpth, mount_point, resize_what) |
116 | 243 | log.debug("resize_info: %s" % info) | 225 | log.debug("resize_info: %s" % info) |
117 | 244 | 226 | ||
120 | 245 | if not is_device_path_writable_block(devpth, info, log): | 227 | devpth = maybe_get_writable_device_path(devpth, info, log) |
121 | 246 | return | 228 | if not devpth: |
122 | 229 | return # devpath was not a writable block device | ||
123 | 247 | 230 | ||
124 | 248 | resizer = None | 231 | resizer = None |
125 | 249 | if can_skip_resize(fs_type, resize_what, devpth): | 232 | if can_skip_resize(fs_type, resize_what, devpth): |
126 | diff --git a/cloudinit/config/cc_users_groups.py b/cloudinit/config/cc_users_groups.py | |||
127 | index b80d1d3..f363000 100644 | |||
128 | --- a/cloudinit/config/cc_users_groups.py | |||
129 | +++ b/cloudinit/config/cc_users_groups.py | |||
130 | @@ -15,7 +15,8 @@ options, see the ``Including users and groups`` config example. | |||
131 | 15 | Groups to add to the system can be specified as a list under the ``groups`` | 15 | Groups to add to the system can be specified as a list under the ``groups`` |
132 | 16 | key. Each entry in the list should either contain a the group name as a string, | 16 | key. Each entry in the list should either contain a the group name as a string, |
133 | 17 | or a dictionary with the group name as the key and a list of users who should | 17 | or a dictionary with the group name as the key and a list of users who should |
135 | 18 | be members of the group as the value. | 18 | be members of the group as the value. **Note**: Groups are added before users, |
136 | 19 | so any users in a group list must already exist on the system. | ||
137 | 19 | 20 | ||
138 | 20 | The ``users`` config key takes a list of users to configure. The first entry in | 21 | The ``users`` config key takes a list of users to configure. The first entry in |
139 | 21 | this list is used as the default user for the system. To preserve the standard | 22 | this list is used as the default user for the system. To preserve the standard |
140 | diff --git a/cloudinit/config/schema.py b/cloudinit/config/schema.py | |||
141 | index bb291ff..ca7d0d5 100644 | |||
142 | --- a/cloudinit/config/schema.py | |||
143 | +++ b/cloudinit/config/schema.py | |||
144 | @@ -74,7 +74,7 @@ def validate_cloudconfig_schema(config, schema, strict=False): | |||
145 | 74 | try: | 74 | try: |
146 | 75 | from jsonschema import Draft4Validator, FormatChecker | 75 | from jsonschema import Draft4Validator, FormatChecker |
147 | 76 | except ImportError: | 76 | except ImportError: |
149 | 77 | logging.warning( | 77 | logging.debug( |
150 | 78 | 'Ignoring schema validation. python-jsonschema is not present') | 78 | 'Ignoring schema validation. python-jsonschema is not present') |
151 | 79 | return | 79 | return |
152 | 80 | validator = Draft4Validator(schema, format_checker=FormatChecker()) | 80 | validator = Draft4Validator(schema, format_checker=FormatChecker()) |
153 | diff --git a/debian/changelog b/debian/changelog | |||
154 | index 26d1d45..5f0c1ce 100644 | |||
155 | --- a/debian/changelog | |||
156 | +++ b/debian/changelog | |||
157 | @@ -1,3 +1,19 @@ | |||
158 | 1 | cloud-init (17.1-25-g17a15f9e-0ubuntu1~17.10.1) artful-proposed; urgency=medium | ||
159 | 2 | |||
160 | 3 | * New upstream snapshot. | ||
161 | 4 | - resizefs: Fix regression when system booted with root=PARTUUID= | ||
162 | 5 | (LP: #1725067) | ||
163 | 6 | - tools: make yum package installation more reliable | ||
164 | 7 | - citest: fix remaining warnings raised by integration tests. | ||
165 | 8 | - citest: show the class actual class name in results. | ||
166 | 9 | - ntp: fix config module schema to allow empty ntp config | ||
167 | 10 | (LP: #1724951) | ||
168 | 11 | - tools: disable fastestmirror if using proxy [Joshua Powers] | ||
169 | 12 | - schema: Log debug instead of warning when jsonschema is not available. | ||
170 | 13 | (LP: #1724354) | ||
171 | 14 | |||
172 | 15 | -- Chad Smith <chad.smith@canonical.com> Mon, 23 Oct 2017 15:07:35 -0600 | ||
173 | 16 | |||
174 | 1 | cloud-init (17.1-18-gd4f70470-0ubuntu1) artful; urgency=medium | 17 | cloud-init (17.1-18-gd4f70470-0ubuntu1) artful; urgency=medium |
175 | 2 | 18 | ||
176 | 3 | * New upstream snapshot. | 19 | * New upstream snapshot. |
177 | diff --git a/doc/examples/cloud-config-user-groups.txt b/doc/examples/cloud-config-user-groups.txt | |||
178 | index 9c5202f..0554d1f 100644 | |||
179 | --- a/doc/examples/cloud-config-user-groups.txt | |||
180 | +++ b/doc/examples/cloud-config-user-groups.txt | |||
181 | @@ -1,8 +1,8 @@ | |||
182 | 1 | # Add groups to the system | 1 | # Add groups to the system |
185 | 2 | # The following example adds the ubuntu group with members foo and bar and | 2 | # The following example adds the ubuntu group with members 'root' and 'sys' |
186 | 3 | # the group cloud-users. | 3 | # and the empty group cloud-users. |
187 | 4 | groups: | 4 | groups: |
189 | 5 | - ubuntu: [foo,bar] | 5 | - ubuntu: [root,sys] |
190 | 6 | - cloud-users | 6 | - cloud-users |
191 | 7 | 7 | ||
192 | 8 | # Add users to the system. Users are added after groups are added. | 8 | # Add users to the system. Users are added after groups are added. |
193 | diff --git a/tests/cloud_tests/testcases/__init__.py b/tests/cloud_tests/testcases/__init__.py | |||
194 | index 47217ce..a29a092 100644 | |||
195 | --- a/tests/cloud_tests/testcases/__init__.py | |||
196 | +++ b/tests/cloud_tests/testcases/__init__.py | |||
197 | @@ -5,6 +5,7 @@ | |||
198 | 5 | import importlib | 5 | import importlib |
199 | 6 | import inspect | 6 | import inspect |
200 | 7 | import unittest | 7 | import unittest |
201 | 8 | from unittest.util import strclass | ||
202 | 8 | 9 | ||
203 | 9 | from tests.cloud_tests import config | 10 | from tests.cloud_tests import config |
204 | 10 | from tests.cloud_tests.testcases.base import CloudTestCase as base_test | 11 | from tests.cloud_tests.testcases.base import CloudTestCase as base_test |
205 | @@ -37,6 +38,12 @@ def get_suite(test_name, data, conf): | |||
206 | 37 | 38 | ||
207 | 38 | class tmp(test_class): | 39 | class tmp(test_class): |
208 | 39 | 40 | ||
209 | 41 | _realclass = test_class | ||
210 | 42 | |||
211 | 43 | def __str__(self): | ||
212 | 44 | return "%s (%s)" % (self._testMethodName, | ||
213 | 45 | strclass(self._realclass)) | ||
214 | 46 | |||
215 | 40 | @classmethod | 47 | @classmethod |
216 | 41 | def setUpClass(cls): | 48 | def setUpClass(cls): |
217 | 42 | cls.data = data | 49 | cls.data = data |
218 | diff --git a/tests/cloud_tests/testcases/base.py b/tests/cloud_tests/testcases/base.py | |||
219 | index bb545ab..1706f59 100644 | |||
220 | --- a/tests/cloud_tests/testcases/base.py | |||
221 | +++ b/tests/cloud_tests/testcases/base.py | |||
222 | @@ -16,10 +16,6 @@ class CloudTestCase(unittest.TestCase): | |||
223 | 16 | conf = None | 16 | conf = None |
224 | 17 | _cloud_config = None | 17 | _cloud_config = None |
225 | 18 | 18 | ||
226 | 19 | def shortDescription(self): | ||
227 | 20 | """Prevent nose from using docstrings.""" | ||
228 | 21 | return None | ||
229 | 22 | |||
230 | 23 | @property | 19 | @property |
231 | 24 | def cloud_config(self): | 20 | def cloud_config(self): |
232 | 25 | """Get the cloud-config used by the test.""" | 21 | """Get the cloud-config used by the test.""" |
233 | @@ -72,6 +68,14 @@ class CloudTestCase(unittest.TestCase): | |||
234 | 72 | result = self.get_status_data(self.get_data_file('result.json')) | 68 | result = self.get_status_data(self.get_data_file('result.json')) |
235 | 73 | self.assertEqual(len(result['errors']), 0) | 69 | self.assertEqual(len(result['errors']), 0) |
236 | 74 | 70 | ||
237 | 71 | def test_no_warnings_in_log(self): | ||
238 | 72 | """Warnings should not be found in the log.""" | ||
239 | 73 | self.assertEqual( | ||
240 | 74 | [], | ||
241 | 75 | [l for l in self.get_data_file('cloud-init.log').splitlines() | ||
242 | 76 | if 'WARN' in l], | ||
243 | 77 | msg="'WARN' found inside cloud-init.log") | ||
244 | 78 | |||
245 | 75 | 79 | ||
246 | 76 | class PasswordListTest(CloudTestCase): | 80 | class PasswordListTest(CloudTestCase): |
247 | 77 | """Base password test case class.""" | 81 | """Base password test case class.""" |
248 | diff --git a/tests/cloud_tests/testcases/examples/including_user_groups.py b/tests/cloud_tests/testcases/examples/including_user_groups.py | |||
249 | index 67af527..93b7a82 100644 | |||
250 | --- a/tests/cloud_tests/testcases/examples/including_user_groups.py | |||
251 | +++ b/tests/cloud_tests/testcases/examples/including_user_groups.py | |||
252 | @@ -40,4 +40,10 @@ class TestUserGroups(base.CloudTestCase): | |||
253 | 40 | out = self.get_data_file('user_cloudy') | 40 | out = self.get_data_file('user_cloudy') |
254 | 41 | self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:') | 41 | self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:') |
255 | 42 | 42 | ||
256 | 43 | def test_user_root_in_secret(self): | ||
257 | 44 | """Test root user is in 'secret' group.""" | ||
258 | 45 | user, _, groups = self.get_data_file('root_groups').partition(":") | ||
259 | 46 | self.assertIn("secret", groups.split(), | ||
260 | 47 | msg="User root is not in group 'secret'") | ||
261 | 48 | |||
262 | 43 | # vi: ts=4 expandtab | 49 | # vi: ts=4 expandtab |
263 | diff --git a/tests/cloud_tests/testcases/examples/including_user_groups.yaml b/tests/cloud_tests/testcases/examples/including_user_groups.yaml | |||
264 | index 0aa7ad2..469d03c 100644 | |||
265 | --- a/tests/cloud_tests/testcases/examples/including_user_groups.yaml | |||
266 | +++ b/tests/cloud_tests/testcases/examples/including_user_groups.yaml | |||
267 | @@ -8,7 +8,7 @@ cloud_config: | | |||
268 | 8 | #cloud-config | 8 | #cloud-config |
269 | 9 | # Add groups to the system | 9 | # Add groups to the system |
270 | 10 | groups: | 10 | groups: |
272 | 11 | - secret: [foobar,barfoo] | 11 | - secret: [root] |
273 | 12 | - cloud-users | 12 | - cloud-users |
274 | 13 | 13 | ||
275 | 14 | # Add users to the system. Users are added after groups are added. | 14 | # Add users to the system. Users are added after groups are added. |
276 | @@ -24,7 +24,7 @@ cloud_config: | | |||
277 | 24 | - name: barfoo | 24 | - name: barfoo |
278 | 25 | gecos: Bar B. Foo | 25 | gecos: Bar B. Foo |
279 | 26 | sudo: ALL=(ALL) NOPASSWD:ALL | 26 | sudo: ALL=(ALL) NOPASSWD:ALL |
281 | 27 | groups: cloud-users | 27 | groups: [cloud-users, secret] |
282 | 28 | lock_passwd: true | 28 | lock_passwd: true |
283 | 29 | - name: cloudy | 29 | - name: cloudy |
284 | 30 | gecos: Magic Cloud App Daemon User | 30 | gecos: Magic Cloud App Daemon User |
285 | @@ -49,5 +49,8 @@ collect_scripts: | |||
286 | 49 | user_cloudy: | | 49 | user_cloudy: | |
287 | 50 | #!/bin/bash | 50 | #!/bin/bash |
288 | 51 | getent passwd cloudy | 51 | getent passwd cloudy |
289 | 52 | root_groups: | | ||
290 | 53 | #!/bin/bash | ||
291 | 54 | groups root | ||
292 | 52 | 55 | ||
293 | 53 | # vi: ts=4 expandtab | 56 | # vi: ts=4 expandtab |
294 | diff --git a/tests/cloud_tests/testcases/main/command_output_simple.py b/tests/cloud_tests/testcases/main/command_output_simple.py | |||
295 | index fe4c767..857881c 100644 | |||
296 | --- a/tests/cloud_tests/testcases/main/command_output_simple.py | |||
297 | +++ b/tests/cloud_tests/testcases/main/command_output_simple.py | |||
298 | @@ -15,4 +15,20 @@ class TestCommandOutputSimple(base.CloudTestCase): | |||
299 | 15 | data.splitlines()[-1].strip()) | 15 | data.splitlines()[-1].strip()) |
300 | 16 | # TODO: need to test that all stages redirected here | 16 | # TODO: need to test that all stages redirected here |
301 | 17 | 17 | ||
302 | 18 | def test_no_warnings_in_log(self): | ||
303 | 19 | """Warnings should not be found in the log. | ||
304 | 20 | |||
305 | 21 | This class redirected stderr and stdout, so it expects to find | ||
306 | 22 | a warning in cloud-init.log to that effect.""" | ||
307 | 23 | redirect_msg = 'Stdout, stderr changing to' | ||
308 | 24 | warnings = [ | ||
309 | 25 | l for l in self.get_data_file('cloud-init.log').splitlines() | ||
310 | 26 | if 'WARN' in l] | ||
311 | 27 | self.assertEqual( | ||
312 | 28 | [], [w for w in warnings if redirect_msg not in w], | ||
313 | 29 | msg="'WARN' found inside cloud-init.log") | ||
314 | 30 | self.assertEqual( | ||
315 | 31 | 1, len(warnings), | ||
316 | 32 | msg="Did not find %s in cloud-init.log" % redirect_msg) | ||
317 | 33 | |||
318 | 18 | # vi: ts=4 expandtab | 34 | # vi: ts=4 expandtab |
319 | diff --git a/tests/cloud_tests/testcases/modules/ntp.yaml b/tests/cloud_tests/testcases/modules/ntp.yaml | |||
320 | index fbef431..2530d72 100644 | |||
321 | --- a/tests/cloud_tests/testcases/modules/ntp.yaml | |||
322 | +++ b/tests/cloud_tests/testcases/modules/ntp.yaml | |||
323 | @@ -4,8 +4,8 @@ | |||
324 | 4 | cloud_config: | | 4 | cloud_config: | |
325 | 5 | #cloud-config | 5 | #cloud-config |
326 | 6 | ntp: | 6 | ntp: |
329 | 7 | pools: {} | 7 | pools: [] |
330 | 8 | servers: {} | 8 | servers: [] |
331 | 9 | collect_scripts: | 9 | collect_scripts: |
332 | 10 | ntp_installed: | | 10 | ntp_installed: | |
333 | 11 | #!/bin/bash | 11 | #!/bin/bash |
334 | diff --git a/tests/cloud_tests/testcases/modules/user_groups.py b/tests/cloud_tests/testcases/modules/user_groups.py | |||
335 | index 67af527..93b7a82 100644 | |||
336 | --- a/tests/cloud_tests/testcases/modules/user_groups.py | |||
337 | +++ b/tests/cloud_tests/testcases/modules/user_groups.py | |||
338 | @@ -40,4 +40,10 @@ class TestUserGroups(base.CloudTestCase): | |||
339 | 40 | out = self.get_data_file('user_cloudy') | 40 | out = self.get_data_file('user_cloudy') |
340 | 41 | self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:') | 41 | self.assertRegex(out, r'cloudy:x:[0-9]{3,4}:') |
341 | 42 | 42 | ||
342 | 43 | def test_user_root_in_secret(self): | ||
343 | 44 | """Test root user is in 'secret' group.""" | ||
344 | 45 | user, _, groups = self.get_data_file('root_groups').partition(":") | ||
345 | 46 | self.assertIn("secret", groups.split(), | ||
346 | 47 | msg="User root is not in group 'secret'") | ||
347 | 48 | |||
348 | 43 | # vi: ts=4 expandtab | 49 | # vi: ts=4 expandtab |
349 | diff --git a/tests/cloud_tests/testcases/modules/user_groups.yaml b/tests/cloud_tests/testcases/modules/user_groups.yaml | |||
350 | index 71cc9da..22b5d70 100644 | |||
351 | --- a/tests/cloud_tests/testcases/modules/user_groups.yaml | |||
352 | +++ b/tests/cloud_tests/testcases/modules/user_groups.yaml | |||
353 | @@ -7,7 +7,7 @@ cloud_config: | | |||
354 | 7 | #cloud-config | 7 | #cloud-config |
355 | 8 | # Add groups to the system | 8 | # Add groups to the system |
356 | 9 | groups: | 9 | groups: |
358 | 10 | - secret: [foobar,barfoo] | 10 | - secret: [root] |
359 | 11 | - cloud-users | 11 | - cloud-users |
360 | 12 | 12 | ||
361 | 13 | # Add users to the system. Users are added after groups are added. | 13 | # Add users to the system. Users are added after groups are added. |
362 | @@ -23,7 +23,7 @@ cloud_config: | | |||
363 | 23 | - name: barfoo | 23 | - name: barfoo |
364 | 24 | gecos: Bar B. Foo | 24 | gecos: Bar B. Foo |
365 | 25 | sudo: ALL=(ALL) NOPASSWD:ALL | 25 | sudo: ALL=(ALL) NOPASSWD:ALL |
367 | 26 | groups: cloud-users | 26 | groups: [cloud-users, secret] |
368 | 27 | lock_passwd: true | 27 | lock_passwd: true |
369 | 28 | - name: cloudy | 28 | - name: cloudy |
370 | 29 | gecos: Magic Cloud App Daemon User | 29 | gecos: Magic Cloud App Daemon User |
371 | @@ -48,5 +48,8 @@ collect_scripts: | |||
372 | 48 | user_cloudy: | | 48 | user_cloudy: | |
373 | 49 | #!/bin/bash | 49 | #!/bin/bash |
374 | 50 | getent passwd cloudy | 50 | getent passwd cloudy |
375 | 51 | root_groups: | | ||
376 | 52 | #!/bin/bash | ||
377 | 53 | groups root | ||
378 | 51 | 54 | ||
379 | 52 | # vi: ts=4 expandtab | 55 | # vi: ts=4 expandtab |
380 | diff --git a/tests/unittests/test_handler/test_handler_lxd.py b/tests/unittests/test_handler/test_handler_lxd.py | |||
381 | index f132a77..e0d9ab6 100644 | |||
382 | --- a/tests/unittests/test_handler/test_handler_lxd.py | |||
383 | +++ b/tests/unittests/test_handler/test_handler_lxd.py | |||
384 | @@ -5,17 +5,16 @@ from cloudinit.sources import DataSourceNoCloud | |||
385 | 5 | from cloudinit import (distros, helpers, cloud) | 5 | from cloudinit import (distros, helpers, cloud) |
386 | 6 | from cloudinit.tests import helpers as t_help | 6 | from cloudinit.tests import helpers as t_help |
387 | 7 | 7 | ||
388 | 8 | import logging | ||
389 | 9 | |||
390 | 10 | try: | 8 | try: |
391 | 11 | from unittest import mock | 9 | from unittest import mock |
392 | 12 | except ImportError: | 10 | except ImportError: |
393 | 13 | import mock | 11 | import mock |
394 | 14 | 12 | ||
395 | 15 | LOG = logging.getLogger(__name__) | ||
396 | 16 | 13 | ||
397 | 14 | class TestLxd(t_help.CiTestCase): | ||
398 | 15 | |||
399 | 16 | with_logs = True | ||
400 | 17 | 17 | ||
401 | 18 | class TestLxd(t_help.TestCase): | ||
402 | 19 | lxd_cfg = { | 18 | lxd_cfg = { |
403 | 20 | 'lxd': { | 19 | 'lxd': { |
404 | 21 | 'init': { | 20 | 'init': { |
405 | @@ -41,7 +40,7 @@ class TestLxd(t_help.TestCase): | |||
406 | 41 | def test_lxd_init(self, mock_util): | 40 | def test_lxd_init(self, mock_util): |
407 | 42 | cc = self._get_cloud('ubuntu') | 41 | cc = self._get_cloud('ubuntu') |
408 | 43 | mock_util.which.return_value = True | 42 | mock_util.which.return_value = True |
410 | 44 | cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, LOG, []) | 43 | cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, []) |
411 | 45 | self.assertTrue(mock_util.which.called) | 44 | self.assertTrue(mock_util.which.called) |
412 | 46 | init_call = mock_util.subp.call_args_list[0][0][0] | 45 | init_call = mock_util.subp.call_args_list[0][0][0] |
413 | 47 | self.assertEqual(init_call, | 46 | self.assertEqual(init_call, |
414 | @@ -55,7 +54,8 @@ class TestLxd(t_help.TestCase): | |||
415 | 55 | cc = self._get_cloud('ubuntu') | 54 | cc = self._get_cloud('ubuntu') |
416 | 56 | cc.distro = mock.MagicMock() | 55 | cc.distro = mock.MagicMock() |
417 | 57 | mock_util.which.return_value = None | 56 | mock_util.which.return_value = None |
419 | 58 | cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, LOG, []) | 57 | cc_lxd.handle('cc_lxd', self.lxd_cfg, cc, self.logger, []) |
420 | 58 | self.assertNotIn('WARN', self.logs.getvalue()) | ||
421 | 59 | self.assertTrue(cc.distro.install_packages.called) | 59 | self.assertTrue(cc.distro.install_packages.called) |
422 | 60 | install_pkg = cc.distro.install_packages.call_args_list[0][0][0] | 60 | install_pkg = cc.distro.install_packages.call_args_list[0][0][0] |
423 | 61 | self.assertEqual(sorted(install_pkg), ['lxd', 'zfs']) | 61 | self.assertEqual(sorted(install_pkg), ['lxd', 'zfs']) |
424 | @@ -64,7 +64,7 @@ class TestLxd(t_help.TestCase): | |||
425 | 64 | def test_no_init_does_nothing(self, mock_util): | 64 | def test_no_init_does_nothing(self, mock_util): |
426 | 65 | cc = self._get_cloud('ubuntu') | 65 | cc = self._get_cloud('ubuntu') |
427 | 66 | cc.distro = mock.MagicMock() | 66 | cc.distro = mock.MagicMock() |
429 | 67 | cc_lxd.handle('cc_lxd', {'lxd': {}}, cc, LOG, []) | 67 | cc_lxd.handle('cc_lxd', {'lxd': {}}, cc, self.logger, []) |
430 | 68 | self.assertFalse(cc.distro.install_packages.called) | 68 | self.assertFalse(cc.distro.install_packages.called) |
431 | 69 | self.assertFalse(mock_util.subp.called) | 69 | self.assertFalse(mock_util.subp.called) |
432 | 70 | 70 | ||
433 | @@ -72,7 +72,7 @@ class TestLxd(t_help.TestCase): | |||
434 | 72 | def test_no_lxd_does_nothing(self, mock_util): | 72 | def test_no_lxd_does_nothing(self, mock_util): |
435 | 73 | cc = self._get_cloud('ubuntu') | 73 | cc = self._get_cloud('ubuntu') |
436 | 74 | cc.distro = mock.MagicMock() | 74 | cc.distro = mock.MagicMock() |
438 | 75 | cc_lxd.handle('cc_lxd', {'package_update': True}, cc, LOG, []) | 75 | cc_lxd.handle('cc_lxd', {'package_update': True}, cc, self.logger, []) |
439 | 76 | self.assertFalse(cc.distro.install_packages.called) | 76 | self.assertFalse(cc.distro.install_packages.called) |
440 | 77 | self.assertFalse(mock_util.subp.called) | 77 | self.assertFalse(mock_util.subp.called) |
441 | 78 | 78 | ||
442 | diff --git a/tests/unittests/test_handler/test_handler_ntp.py b/tests/unittests/test_handler/test_handler_ntp.py | |||
443 | index 4f29124..3abe578 100644 | |||
444 | --- a/tests/unittests/test_handler/test_handler_ntp.py | |||
445 | +++ b/tests/unittests/test_handler/test_handler_ntp.py | |||
446 | @@ -293,23 +293,24 @@ class TestNtp(FilesystemMockingTestCase): | |||
447 | 293 | 293 | ||
448 | 294 | def test_ntp_handler_schema_validation_allows_empty_ntp_config(self): | 294 | def test_ntp_handler_schema_validation_allows_empty_ntp_config(self): |
449 | 295 | """Ntp schema validation allows for an empty ntp: configuration.""" | 295 | """Ntp schema validation allows for an empty ntp: configuration.""" |
451 | 296 | invalid_config = {'ntp': {}} | 296 | valid_empty_configs = [{'ntp': {}}, {'ntp': None}] |
452 | 297 | distro = 'ubuntu' | 297 | distro = 'ubuntu' |
453 | 298 | cc = self._get_cloud(distro) | 298 | cc = self._get_cloud(distro) |
454 | 299 | ntp_conf = os.path.join(self.new_root, 'ntp.conf') | 299 | ntp_conf = os.path.join(self.new_root, 'ntp.conf') |
455 | 300 | with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: | 300 | with open('{0}.tmpl'.format(ntp_conf), 'wb') as stream: |
456 | 301 | stream.write(NTP_TEMPLATE) | 301 | stream.write(NTP_TEMPLATE) |
459 | 302 | with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): | 302 | for valid_empty_config in valid_empty_configs: |
460 | 303 | cc_ntp.handle('cc_ntp', invalid_config, cc, None, []) | 303 | with mock.patch('cloudinit.config.cc_ntp.NTP_CONF', ntp_conf): |
461 | 304 | cc_ntp.handle('cc_ntp', valid_empty_config, cc, None, []) | ||
462 | 305 | with open(ntp_conf) as stream: | ||
463 | 306 | content = stream.read() | ||
464 | 307 | default_pools = [ | ||
465 | 308 | "{0}.{1}.pool.ntp.org".format(x, distro) | ||
466 | 309 | for x in range(0, cc_ntp.NR_POOL_SERVERS)] | ||
467 | 310 | self.assertEqual( | ||
468 | 311 | "servers []\npools {0}\n".format(default_pools), | ||
469 | 312 | content) | ||
470 | 304 | self.assertNotIn('Invalid config:', self.logs.getvalue()) | 313 | self.assertNotIn('Invalid config:', self.logs.getvalue()) |
471 | 305 | with open(ntp_conf) as stream: | ||
472 | 306 | content = stream.read() | ||
473 | 307 | default_pools = [ | ||
474 | 308 | "{0}.{1}.pool.ntp.org".format(x, distro) | ||
475 | 309 | for x in range(0, cc_ntp.NR_POOL_SERVERS)] | ||
476 | 310 | self.assertEqual( | ||
477 | 311 | "servers []\npools {0}\n".format(default_pools), | ||
478 | 312 | content) | ||
479 | 313 | 314 | ||
480 | 314 | @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency") | 315 | @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency") |
481 | 315 | def test_ntp_handler_schema_validation_warns_non_string_item_type(self): | 316 | def test_ntp_handler_schema_validation_warns_non_string_item_type(self): |
482 | diff --git a/tests/unittests/test_handler/test_handler_resizefs.py b/tests/unittests/test_handler/test_handler_resizefs.py | |||
483 | index 3e5d436..29d5574 100644 | |||
484 | --- a/tests/unittests/test_handler/test_handler_resizefs.py | |||
485 | +++ b/tests/unittests/test_handler/test_handler_resizefs.py | |||
486 | @@ -1,9 +1,9 @@ | |||
487 | 1 | # This file is part of cloud-init. See LICENSE file for license information. | 1 | # This file is part of cloud-init. See LICENSE file for license information. |
488 | 2 | 2 | ||
489 | 3 | from cloudinit.config.cc_resizefs import ( | 3 | from cloudinit.config.cc_resizefs import ( |
492 | 4 | can_skip_resize, handle, is_device_path_writable_block, | 4 | can_skip_resize, handle, maybe_get_writable_device_path) |
491 | 5 | rootdev_from_cmdline) | ||
493 | 6 | 5 | ||
494 | 6 | from collections import namedtuple | ||
495 | 7 | import logging | 7 | import logging |
496 | 8 | import textwrap | 8 | import textwrap |
497 | 9 | 9 | ||
498 | @@ -138,47 +138,48 @@ class TestRootDevFromCmdline(CiTestCase): | |||
499 | 138 | invalid_cases = [ | 138 | invalid_cases = [ |
500 | 139 | 'BOOT_IMAGE=/adsf asdfa werasef root adf', 'BOOT_IMAGE=/adsf', ''] | 139 | 'BOOT_IMAGE=/adsf asdfa werasef root adf', 'BOOT_IMAGE=/adsf', ''] |
501 | 140 | for case in invalid_cases: | 140 | for case in invalid_cases: |
503 | 141 | self.assertIsNone(rootdev_from_cmdline(case)) | 141 | self.assertIsNone(util.rootdev_from_cmdline(case)) |
504 | 142 | 142 | ||
505 | 143 | def test_rootdev_from_cmdline_with_root_startswith_dev(self): | 143 | def test_rootdev_from_cmdline_with_root_startswith_dev(self): |
506 | 144 | """Return the cmdline root when the path starts with /dev.""" | 144 | """Return the cmdline root when the path starts with /dev.""" |
507 | 145 | self.assertEqual( | 145 | self.assertEqual( |
509 | 146 | '/dev/this', rootdev_from_cmdline('asdf root=/dev/this')) | 146 | '/dev/this', util.rootdev_from_cmdline('asdf root=/dev/this')) |
510 | 147 | 147 | ||
511 | 148 | def test_rootdev_from_cmdline_with_root_without_dev_prefix(self): | 148 | def test_rootdev_from_cmdline_with_root_without_dev_prefix(self): |
512 | 149 | """Add /dev prefix to cmdline root when the path lacks the prefix.""" | 149 | """Add /dev prefix to cmdline root when the path lacks the prefix.""" |
514 | 150 | self.assertEqual('/dev/this', rootdev_from_cmdline('asdf root=this')) | 150 | self.assertEqual( |
515 | 151 | '/dev/this', util.rootdev_from_cmdline('asdf root=this')) | ||
516 | 151 | 152 | ||
517 | 152 | def test_rootdev_from_cmdline_with_root_with_label(self): | 153 | def test_rootdev_from_cmdline_with_root_with_label(self): |
518 | 153 | """When cmdline root contains a LABEL, our root is disk/by-label.""" | 154 | """When cmdline root contains a LABEL, our root is disk/by-label.""" |
519 | 154 | self.assertEqual( | 155 | self.assertEqual( |
520 | 155 | '/dev/disk/by-label/unique', | 156 | '/dev/disk/by-label/unique', |
522 | 156 | rootdev_from_cmdline('asdf root=LABEL=unique')) | 157 | util.rootdev_from_cmdline('asdf root=LABEL=unique')) |
523 | 157 | 158 | ||
524 | 158 | def test_rootdev_from_cmdline_with_root_with_uuid(self): | 159 | def test_rootdev_from_cmdline_with_root_with_uuid(self): |
525 | 159 | """When cmdline root contains a UUID, our root is disk/by-uuid.""" | 160 | """When cmdline root contains a UUID, our root is disk/by-uuid.""" |
526 | 160 | self.assertEqual( | 161 | self.assertEqual( |
527 | 161 | '/dev/disk/by-uuid/adsfdsaf-adsf', | 162 | '/dev/disk/by-uuid/adsfdsaf-adsf', |
529 | 162 | rootdev_from_cmdline('asdf root=UUID=adsfdsaf-adsf')) | 163 | util.rootdev_from_cmdline('asdf root=UUID=adsfdsaf-adsf')) |
530 | 163 | 164 | ||
531 | 164 | 165 | ||
533 | 165 | class TestIsDevicePathWritableBlock(CiTestCase): | 166 | class TestMaybeGetDevicePathAsWritableBlock(CiTestCase): |
534 | 166 | 167 | ||
535 | 167 | with_logs = True | 168 | with_logs = True |
536 | 168 | 169 | ||
538 | 169 | def test_is_device_path_writable_block_false_on_overlayroot(self): | 170 | def test_maybe_get_writable_device_path_none_on_overlayroot(self): |
539 | 170 | """When devpath is overlayroot (on MAAS), is_dev_writable is False.""" | 171 | """When devpath is overlayroot (on MAAS), is_dev_writable is False.""" |
540 | 171 | info = 'does not matter' | 172 | info = 'does not matter' |
542 | 172 | is_writable = wrap_and_call( | 173 | devpath = wrap_and_call( |
543 | 173 | 'cloudinit.config.cc_resizefs.util', | 174 | 'cloudinit.config.cc_resizefs.util', |
544 | 174 | {'is_container': {'return_value': False}}, | 175 | {'is_container': {'return_value': False}}, |
547 | 175 | is_device_path_writable_block, 'overlayroot', info, LOG) | 176 | maybe_get_writable_device_path, 'overlayroot', info, LOG) |
548 | 176 | self.assertFalse(is_writable) | 177 | self.assertIsNone(devpath) |
549 | 177 | self.assertIn( | 178 | self.assertIn( |
550 | 178 | "Not attempting to resize devpath 'overlayroot'", | 179 | "Not attempting to resize devpath 'overlayroot'", |
551 | 179 | self.logs.getvalue()) | 180 | self.logs.getvalue()) |
552 | 180 | 181 | ||
554 | 181 | def test_is_device_path_writable_block_warns_missing_cmdline_root(self): | 182 | def test_maybe_get_writable_device_path_warns_missing_cmdline_root(self): |
555 | 182 | """When root does not exist isn't in the cmdline, log warning.""" | 183 | """When root does not exist isn't in the cmdline, log warning.""" |
556 | 183 | info = 'does not matter' | 184 | info = 'does not matter' |
557 | 184 | 185 | ||
558 | @@ -190,43 +191,43 @@ class TestIsDevicePathWritableBlock(CiTestCase): | |||
559 | 190 | exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists' | 191 | exists_mock_path = 'cloudinit.config.cc_resizefs.os.path.exists' |
560 | 191 | with mock.patch(exists_mock_path) as m_exists: | 192 | with mock.patch(exists_mock_path) as m_exists: |
561 | 192 | m_exists.return_value = False | 193 | m_exists.return_value = False |
563 | 193 | is_writable = wrap_and_call( | 194 | devpath = wrap_and_call( |
564 | 194 | 'cloudinit.config.cc_resizefs.util', | 195 | 'cloudinit.config.cc_resizefs.util', |
565 | 195 | {'is_container': {'return_value': False}, | 196 | {'is_container': {'return_value': False}, |
566 | 196 | 'get_mount_info': {'side_effect': fake_mount_info}, | 197 | 'get_mount_info': {'side_effect': fake_mount_info}, |
567 | 197 | 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}}, | 198 | 'get_cmdline': {'return_value': 'BOOT_IMAGE=/vmlinuz.efi'}}, |
570 | 198 | is_device_path_writable_block, '/dev/root', info, LOG) | 199 | maybe_get_writable_device_path, '/dev/root', info, LOG) |
571 | 199 | self.assertFalse(is_writable) | 200 | self.assertIsNone(devpath) |
572 | 200 | logs = self.logs.getvalue() | 201 | logs = self.logs.getvalue() |
573 | 201 | self.assertIn("WARNING: Unable to find device '/dev/root'", logs) | 202 | self.assertIn("WARNING: Unable to find device '/dev/root'", logs) |
574 | 202 | 203 | ||
576 | 203 | def test_is_device_path_writable_block_does_not_exist(self): | 204 | def test_maybe_get_writable_device_path_does_not_exist(self): |
577 | 204 | """When devpath does not exist, a warning is logged.""" | 205 | """When devpath does not exist, a warning is logged.""" |
578 | 205 | info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' | 206 | info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' |
580 | 206 | is_writable = wrap_and_call( | 207 | devpath = wrap_and_call( |
581 | 207 | 'cloudinit.config.cc_resizefs.util', | 208 | 'cloudinit.config.cc_resizefs.util', |
582 | 208 | {'is_container': {'return_value': False}}, | 209 | {'is_container': {'return_value': False}}, |
585 | 209 | is_device_path_writable_block, '/I/dont/exist', info, LOG) | 210 | maybe_get_writable_device_path, '/I/dont/exist', info, LOG) |
586 | 210 | self.assertFalse(is_writable) | 211 | self.assertIsNone(devpath) |
587 | 211 | self.assertIn( | 212 | self.assertIn( |
588 | 212 | "WARNING: Device '/I/dont/exist' did not exist." | 213 | "WARNING: Device '/I/dont/exist' did not exist." |
589 | 213 | ' cannot resize: %s' % info, | 214 | ' cannot resize: %s' % info, |
590 | 214 | self.logs.getvalue()) | 215 | self.logs.getvalue()) |
591 | 215 | 216 | ||
593 | 216 | def test_is_device_path_writable_block_does_not_exist_in_container(self): | 217 | def test_maybe_get_writable_device_path_does_not_exist_in_container(self): |
594 | 217 | """When devpath does not exist in a container, log a debug message.""" | 218 | """When devpath does not exist in a container, log a debug message.""" |
595 | 218 | info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' | 219 | info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' |
597 | 219 | is_writable = wrap_and_call( | 220 | devpath = wrap_and_call( |
598 | 220 | 'cloudinit.config.cc_resizefs.util', | 221 | 'cloudinit.config.cc_resizefs.util', |
599 | 221 | {'is_container': {'return_value': True}}, | 222 | {'is_container': {'return_value': True}}, |
602 | 222 | is_device_path_writable_block, '/I/dont/exist', info, LOG) | 223 | maybe_get_writable_device_path, '/I/dont/exist', info, LOG) |
603 | 223 | self.assertFalse(is_writable) | 224 | self.assertIsNone(devpath) |
604 | 224 | self.assertIn( | 225 | self.assertIn( |
605 | 225 | "DEBUG: Device '/I/dont/exist' did not exist in container." | 226 | "DEBUG: Device '/I/dont/exist' did not exist in container." |
606 | 226 | ' cannot resize: %s' % info, | 227 | ' cannot resize: %s' % info, |
607 | 227 | self.logs.getvalue()) | 228 | self.logs.getvalue()) |
608 | 228 | 229 | ||
610 | 229 | def test_is_device_path_writable_block_raises_oserror(self): | 230 | def test_maybe_get_writable_device_path_raises_oserror(self): |
611 | 230 | """When unexpected OSError is raises by os.stat it is reraised.""" | 231 | """When unexpected OSError is raises by os.stat it is reraised.""" |
612 | 231 | info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' | 232 | info = 'dev=/I/dont/exist mnt_point=/ path=/dev/none' |
613 | 232 | with self.assertRaises(OSError) as context_manager: | 233 | with self.assertRaises(OSError) as context_manager: |
614 | @@ -234,41 +235,63 @@ class TestIsDevicePathWritableBlock(CiTestCase): | |||
615 | 234 | 'cloudinit.config.cc_resizefs', | 235 | 'cloudinit.config.cc_resizefs', |
616 | 235 | {'util.is_container': {'return_value': True}, | 236 | {'util.is_container': {'return_value': True}, |
617 | 236 | 'os.stat': {'side_effect': OSError('Something unexpected')}}, | 237 | 'os.stat': {'side_effect': OSError('Something unexpected')}}, |
619 | 237 | is_device_path_writable_block, '/I/dont/exist', info, LOG) | 238 | maybe_get_writable_device_path, '/I/dont/exist', info, LOG) |
620 | 238 | self.assertEqual( | 239 | self.assertEqual( |
621 | 239 | 'Something unexpected', str(context_manager.exception)) | 240 | 'Something unexpected', str(context_manager.exception)) |
622 | 240 | 241 | ||
624 | 241 | def test_is_device_path_writable_block_non_block(self): | 242 | def test_maybe_get_writable_device_path_non_block(self): |
625 | 242 | """When device is not a block device, emit warning return False.""" | 243 | """When device is not a block device, emit warning return False.""" |
626 | 243 | fake_devpath = self.tmp_path('dev/readwrite') | 244 | fake_devpath = self.tmp_path('dev/readwrite') |
627 | 244 | util.write_file(fake_devpath, '', mode=0o600) # read-write | 245 | util.write_file(fake_devpath, '', mode=0o600) # read-write |
628 | 245 | info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath) | 246 | info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath) |
629 | 246 | 247 | ||
631 | 247 | is_writable = wrap_and_call( | 248 | devpath = wrap_and_call( |
632 | 248 | 'cloudinit.config.cc_resizefs.util', | 249 | 'cloudinit.config.cc_resizefs.util', |
633 | 249 | {'is_container': {'return_value': False}}, | 250 | {'is_container': {'return_value': False}}, |
636 | 250 | is_device_path_writable_block, fake_devpath, info, LOG) | 251 | maybe_get_writable_device_path, fake_devpath, info, LOG) |
637 | 251 | self.assertFalse(is_writable) | 252 | self.assertIsNone(devpath) |
638 | 252 | self.assertIn( | 253 | self.assertIn( |
639 | 253 | "WARNING: device '{0}' not a block device. cannot resize".format( | 254 | "WARNING: device '{0}' not a block device. cannot resize".format( |
640 | 254 | fake_devpath), | 255 | fake_devpath), |
641 | 255 | self.logs.getvalue()) | 256 | self.logs.getvalue()) |
642 | 256 | 257 | ||
644 | 257 | def test_is_device_path_writable_block_non_block_on_container(self): | 258 | def test_maybe_get_writable_device_path_non_block_on_container(self): |
645 | 258 | """When device is non-block device in container, emit debug log.""" | 259 | """When device is non-block device in container, emit debug log.""" |
646 | 259 | fake_devpath = self.tmp_path('dev/readwrite') | 260 | fake_devpath = self.tmp_path('dev/readwrite') |
647 | 260 | util.write_file(fake_devpath, '', mode=0o600) # read-write | 261 | util.write_file(fake_devpath, '', mode=0o600) # read-write |
648 | 261 | info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath) | 262 | info = 'dev=/dev/root mnt_point=/ path={0}'.format(fake_devpath) |
649 | 262 | 263 | ||
651 | 263 | is_writable = wrap_and_call( | 264 | devpath = wrap_and_call( |
652 | 264 | 'cloudinit.config.cc_resizefs.util', | 265 | 'cloudinit.config.cc_resizefs.util', |
653 | 265 | {'is_container': {'return_value': True}}, | 266 | {'is_container': {'return_value': True}}, |
656 | 266 | is_device_path_writable_block, fake_devpath, info, LOG) | 267 | maybe_get_writable_device_path, fake_devpath, info, LOG) |
657 | 267 | self.assertFalse(is_writable) | 268 | self.assertIsNone(devpath) |
658 | 268 | self.assertIn( | 269 | self.assertIn( |
659 | 269 | "DEBUG: device '{0}' not a block device in container." | 270 | "DEBUG: device '{0}' not a block device in container." |
660 | 270 | ' cannot resize'.format(fake_devpath), | 271 | ' cannot resize'.format(fake_devpath), |
661 | 271 | self.logs.getvalue()) | 272 | self.logs.getvalue()) |
662 | 272 | 273 | ||
663 | 274 | def test_maybe_get_writable_device_path_returns_cmdline_root(self): | ||
664 | 275 | """When root device is UUID in kernel commandline, update devpath.""" | ||
665 | 276 | # XXX Long-term we want to use FilesystemMocking test to avoid | ||
666 | 277 | # touching os.stat. | ||
667 | 278 | FakeStat = namedtuple( | ||
668 | 279 | 'FakeStat', ['st_mode', 'st_size', 'st_mtime']) # minimal def. | ||
669 | 280 | info = 'dev=/dev/root mnt_point=/ path=/does/not/matter' | ||
670 | 281 | devpath = wrap_and_call( | ||
671 | 282 | 'cloudinit.config.cc_resizefs', | ||
672 | 283 | {'util.get_cmdline': {'return_value': 'asdf root=UUID=my-uuid'}, | ||
673 | 284 | 'util.is_container': False, | ||
674 | 285 | 'os.path.exists': False, # /dev/root doesn't exist | ||
675 | 286 | 'os.stat': { | ||
676 | 287 | 'return_value': FakeStat(25008, 0, 1)} # char block device | ||
677 | 288 | }, | ||
678 | 289 | maybe_get_writable_device_path, '/dev/root', info, LOG) | ||
679 | 290 | self.assertEqual('/dev/disk/by-uuid/my-uuid', devpath) | ||
680 | 291 | self.assertIn( | ||
681 | 292 | "DEBUG: Converted /dev/root to '/dev/disk/by-uuid/my-uuid'" | ||
682 | 293 | " per kernel cmdline", | ||
683 | 294 | self.logs.getvalue()) | ||
684 | 295 | |||
685 | 273 | 296 | ||
686 | 274 | # vi: ts=4 expandtab | 297 | # vi: ts=4 expandtab |
687 | diff --git a/tests/unittests/test_handler/test_schema.py b/tests/unittests/test_handler/test_schema.py | |||
688 | index b8fc893..648573f 100644 | |||
689 | --- a/tests/unittests/test_handler/test_schema.py | |||
690 | +++ b/tests/unittests/test_handler/test_schema.py | |||
691 | @@ -4,11 +4,12 @@ from cloudinit.config.schema import ( | |||
692 | 4 | CLOUD_CONFIG_HEADER, SchemaValidationError, annotated_cloudconfig_file, | 4 | CLOUD_CONFIG_HEADER, SchemaValidationError, annotated_cloudconfig_file, |
693 | 5 | get_schema_doc, get_schema, validate_cloudconfig_file, | 5 | get_schema_doc, get_schema, validate_cloudconfig_file, |
694 | 6 | validate_cloudconfig_schema, main) | 6 | validate_cloudconfig_schema, main) |
696 | 7 | from cloudinit.util import write_file | 7 | from cloudinit.util import subp, write_file |
697 | 8 | 8 | ||
698 | 9 | from cloudinit.tests.helpers import CiTestCase, mock, skipIf | 9 | from cloudinit.tests.helpers import CiTestCase, mock, skipIf |
699 | 10 | 10 | ||
700 | 11 | from copy import copy | 11 | from copy import copy |
701 | 12 | import os | ||
702 | 12 | from six import StringIO | 13 | from six import StringIO |
703 | 13 | from textwrap import dedent | 14 | from textwrap import dedent |
704 | 14 | from yaml import safe_load | 15 | from yaml import safe_load |
705 | @@ -364,4 +365,38 @@ class MainTest(CiTestCase): | |||
706 | 364 | self.assertIn( | 365 | self.assertIn( |
707 | 365 | 'Valid cloud-config file {0}'.format(myyaml), m_stdout.getvalue()) | 366 | 'Valid cloud-config file {0}'.format(myyaml), m_stdout.getvalue()) |
708 | 366 | 367 | ||
709 | 368 | |||
710 | 369 | class CloudTestsIntegrationTest(CiTestCase): | ||
711 | 370 | """Validate all cloud-config yaml schema provided in integration tests. | ||
712 | 371 | |||
713 | 372 | It is less expensive to have unittests validate schema of all cloud-config | ||
714 | 373 | yaml provided to integration tests, than to run an integration test which | ||
715 | 374 | raises Warnings or errors on invalid cloud-config schema. | ||
716 | 375 | """ | ||
717 | 376 | |||
718 | 377 | @skipIf(_missing_jsonschema_dep, "No python-jsonschema dependency") | ||
719 | 378 | def test_all_integration_test_cloud_config_schema(self): | ||
720 | 379 | """Validate schema of cloud_tests yaml files looking for warnings.""" | ||
721 | 380 | schema = get_schema() | ||
722 | 381 | testsdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) | ||
723 | 382 | integration_testdir = os.path.sep.join( | ||
724 | 383 | [testsdir, 'cloud_tests', 'testcases']) | ||
725 | 384 | errors = [] | ||
726 | 385 | out, _ = subp(['find', integration_testdir, '-name', '*yaml']) | ||
727 | 386 | for filename in out.splitlines(): | ||
728 | 387 | test_cfg = safe_load(open(filename)) | ||
729 | 388 | cloud_config = test_cfg.get('cloud_config') | ||
730 | 389 | if cloud_config: | ||
731 | 390 | cloud_config = safe_load( | ||
732 | 391 | cloud_config.replace("#cloud-config\n", "")) | ||
733 | 392 | try: | ||
734 | 393 | validate_cloudconfig_schema( | ||
735 | 394 | cloud_config, schema, strict=True) | ||
736 | 395 | except SchemaValidationError as e: | ||
737 | 396 | errors.append( | ||
738 | 397 | '{0}: {1}'.format( | ||
739 | 398 | filename, e)) | ||
740 | 399 | if errors: | ||
741 | 400 | raise AssertionError(', '.join(errors)) | ||
742 | 401 | |||
743 | 367 | # vi: ts=4 expandtab syntax=python | 402 | # vi: ts=4 expandtab syntax=python |
744 | diff --git a/tools/read-dependencies b/tools/read-dependencies | |||
745 | index 2a64868..421f470 100755 | |||
746 | --- a/tools/read-dependencies | |||
747 | +++ b/tools/read-dependencies | |||
748 | @@ -30,9 +30,35 @@ DISTRO_PKG_TYPE_MAP = { | |||
749 | 30 | 'suse': 'suse' | 30 | 'suse': 'suse' |
750 | 31 | } | 31 | } |
751 | 32 | 32 | ||
753 | 33 | DISTRO_INSTALL_PKG_CMD = { | 33 | MAYBE_RELIABLE_YUM_INSTALL = [ |
754 | 34 | 'sh', '-c', | ||
755 | 35 | """ | ||
756 | 36 | error() { echo "$@" 1>&2; } | ||
757 | 37 | n=0; max=10; | ||
758 | 38 | bcmd="yum install --downloadonly --assumeyes --setopt=keepcache=1" | ||
759 | 39 | while n=$(($n+1)); do | ||
760 | 40 | error ":: running $bcmd $* [$n/$max]" | ||
761 | 41 | $bcmd "$@" | ||
762 | 42 | r=$? | ||
763 | 43 | [ $r -eq 0 ] && break | ||
764 | 44 | [ $n -ge $max ] && { error "gave up on $bcmd"; exit $r; } | ||
765 | 45 | nap=$(($n*5)) | ||
766 | 46 | error ":: failed [$r] ($n/$max). sleeping $nap." | ||
767 | 47 | sleep $nap | ||
768 | 48 | done | ||
769 | 49 | error ":: running yum install --cacheonly --assumeyes $*" | ||
770 | 50 | yum install --cacheonly --assumeyes "$@" | ||
771 | 51 | """, | ||
772 | 52 | 'reliable-yum-install'] | ||
773 | 53 | |||
774 | 54 | DRY_DISTRO_INSTALL_PKG_CMD = { | ||
775 | 34 | 'centos': ['yum', 'install', '--assumeyes'], | 55 | 'centos': ['yum', 'install', '--assumeyes'], |
776 | 35 | 'redhat': ['yum', 'install', '--assumeyes'], | 56 | 'redhat': ['yum', 'install', '--assumeyes'], |
777 | 57 | } | ||
778 | 58 | |||
779 | 59 | DISTRO_INSTALL_PKG_CMD = { | ||
780 | 60 | 'centos': MAYBE_RELIABLE_YUM_INSTALL, | ||
781 | 61 | 'redhat': MAYBE_RELIABLE_YUM_INSTALL, | ||
782 | 36 | 'debian': ['apt', 'install', '-y'], | 62 | 'debian': ['apt', 'install', '-y'], |
783 | 37 | 'ubuntu': ['apt', 'install', '-y'], | 63 | 'ubuntu': ['apt', 'install', '-y'], |
784 | 38 | 'opensuse': ['zypper', 'install'], | 64 | 'opensuse': ['zypper', 'install'], |
785 | @@ -80,8 +106,8 @@ def get_parser(): | |||
786 | 80 | help='Additionally install continuous integration system packages ' | 106 | help='Additionally install continuous integration system packages ' |
787 | 81 | 'required for build and test automation.') | 107 | 'required for build and test automation.') |
788 | 82 | parser.add_argument( | 108 | parser.add_argument( |
791 | 83 | '-v', '--python-version', type=str, dest='python_version', default=None, | 109 | '-v', '--python-version', type=str, dest='python_version', |
792 | 84 | choices=["2", "3"], | 110 | default=None, choices=["2", "3"], |
793 | 85 | help='Override the version of python we want to generate system ' | 111 | help='Override the version of python we want to generate system ' |
794 | 86 | 'package dependencies for. Defaults to the version of python ' | 112 | 'package dependencies for. Defaults to the version of python ' |
795 | 87 | 'this script is called with') | 113 | 'this script is called with') |
796 | @@ -219,10 +245,15 @@ def pkg_install(pkg_list, distro, test_distro=False, dry_run=False): | |||
797 | 219 | '(dryrun)' if dry_run else '', ' '.join(pkg_list))) | 245 | '(dryrun)' if dry_run else '', ' '.join(pkg_list))) |
798 | 220 | install_cmd = [] | 246 | install_cmd = [] |
799 | 221 | if dry_run: | 247 | if dry_run: |
801 | 222 | install_cmd.append('echo') | 248 | install_cmd.append('echo') |
802 | 223 | if os.geteuid() != 0: | 249 | if os.geteuid() != 0: |
803 | 224 | install_cmd.append('sudo') | 250 | install_cmd.append('sudo') |
805 | 225 | install_cmd.extend(DISTRO_INSTALL_PKG_CMD[distro]) | 251 | |
806 | 252 | cmd = DISTRO_INSTALL_PKG_CMD[distro] | ||
807 | 253 | if dry_run and distro in DRY_DISTRO_INSTALL_PKG_CMD: | ||
808 | 254 | cmd = DRY_DISTRO_INSTALL_PKG_CMD[distro] | ||
809 | 255 | install_cmd.extend(cmd) | ||
810 | 256 | |||
811 | 226 | if distro in ['centos', 'redhat']: | 257 | if distro in ['centos', 'redhat']: |
812 | 227 | # CentOS and Redhat need epel-release to access oauthlib and jsonschema | 258 | # CentOS and Redhat need epel-release to access oauthlib and jsonschema |
813 | 228 | subprocess.check_call(install_cmd + ['epel-release']) | 259 | subprocess.check_call(install_cmd + ['epel-release']) |
814 | diff --git a/tools/run-centos b/tools/run-centos | |||
815 | index d44d514..d58ef3e 100755 | |||
816 | --- a/tools/run-centos | |||
817 | +++ b/tools/run-centos | |||
818 | @@ -123,7 +123,22 @@ prep() { | |||
819 | 123 | return 0 | 123 | return 0 |
820 | 124 | fi | 124 | fi |
821 | 125 | error "Installing prep packages: ${needed}" | 125 | error "Installing prep packages: ${needed}" |
823 | 126 | yum install --assumeyes ${needed} | 126 | set -- $needed |
824 | 127 | local n max r | ||
825 | 128 | n=0; max=10; | ||
826 | 129 | bcmd="yum install --downloadonly --assumeyes --setopt=keepcache=1" | ||
827 | 130 | while n=$(($n+1)); do | ||
828 | 131 | error ":: running $bcmd $* [$n/$max]" | ||
829 | 132 | $bcmd "$@" | ||
830 | 133 | r=$? | ||
831 | 134 | [ $r -eq 0 ] && break | ||
832 | 135 | [ $n -ge $max ] && { error "gave up on $bcmd"; exit $r; } | ||
833 | 136 | nap=$(($n*5)) | ||
834 | 137 | error ":: failed [$r] ($n/$max). sleeping $nap." | ||
835 | 138 | sleep $nap | ||
836 | 139 | done | ||
837 | 140 | error ":: running yum install --cacheonly --assumeyes $*" | ||
838 | 141 | yum install --cacheonly --assumeyes "$@" | ||
839 | 127 | } | 142 | } |
840 | 128 | 143 | ||
841 | 129 | start_container() { | 144 | start_container() { |
842 | @@ -153,6 +168,7 @@ start_container() { | |||
843 | 153 | if [ ! -z "${http_proxy-}" ]; then | 168 | if [ ! -z "${http_proxy-}" ]; then |
844 | 154 | debug 1 "configuring proxy ${http_proxy}" | 169 | debug 1 "configuring proxy ${http_proxy}" |
845 | 155 | inside "$name" sh -c "echo proxy=$http_proxy >> /etc/yum.conf" | 170 | inside "$name" sh -c "echo proxy=$http_proxy >> /etc/yum.conf" |
846 | 171 | inside "$name" sed -i s/enabled=1/enabled=0/ /etc/yum/pluginconf.d/fastestmirror.conf | ||
847 | 156 | fi | 172 | fi |
848 | 157 | } | 173 | } |
849 | 158 | 174 |
PASSED: Continuous integration, rev:a9968540aa2 73bee10b0ca0401 33c01aa3459793 /jenkins. ubuntu. com/server/ job/cloud- init-ci/ 434/
https:/
Executed test runs:
SUCCESS: Checkout
SUCCESS: Unit & Style Tests
SUCCESS: Ubuntu LTS: Build
SUCCESS: Ubuntu LTS: Integration
SUCCESS: MAAS Compatability Testing
IN_PROGRESS: Declarative: Post Actions
Click here to trigger a rebuild: /jenkins. ubuntu. com/server/ job/cloud- init-ci/ 434/rebuild
https:/