Merge ~oddbloke/cloud-init/+git/cloud-init:ubuntu/devel into cloud-init:ubuntu/devel
- Git
- lp:~oddbloke/cloud-init/+git/cloud-init
- ubuntu/devel
- Merge into ubuntu/devel
Proposed by
Dan Watkins
Status: | Merged |
---|---|
Merged at revision: | ba7b33b48d5dfc9f786f55778877922625cac00a |
Proposed branch: | ~oddbloke/cloud-init/+git/cloud-init:ubuntu/devel |
Merge into: | cloud-init:ubuntu/devel |
Diff against target: |
1177 lines (+863/-34) 18 files modified
cloudinit/apport.py (+1/-0) cloudinit/config/cc_set_passwords.py (+34/-19) cloudinit/config/cc_ssh.py (+55/-0) cloudinit/config/tests/test_ssh.py (+166/-0) cloudinit/settings.py (+1/-0) cloudinit/sources/DataSourceExoscale.py (+258/-0) cloudinit/sources/DataSourceGCE.py (+20/-2) cloudinit/sources/__init__.py (+10/-0) cloudinit/url_helper.py (+5/-4) debian/changelog (+12/-0) debian/cloud-init.templates (+3/-3) doc/rtd/conf.py (+0/-5) doc/rtd/topics/datasources.rst (+1/-0) doc/rtd/topics/datasources/exoscale.rst (+68/-0) tests/unittests/test_datasource/test_common.py (+2/-0) tests/unittests/test_datasource/test_exoscale.py (+203/-0) tests/unittests/test_datasource/test_gce.py (+18/-0) tools/ds-identify (+6/-1) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Ryan Harper | Approve | ||
Review via email: mp+371135@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Ryan Harper (raharper) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/cloudinit/apport.py b/cloudinit/apport.py | |||
2 | index 22cb7fd..003ff1f 100644 | |||
3 | --- a/cloudinit/apport.py | |||
4 | +++ b/cloudinit/apport.py | |||
5 | @@ -23,6 +23,7 @@ KNOWN_CLOUD_NAMES = [ | |||
6 | 23 | 'CloudStack', | 23 | 'CloudStack', |
7 | 24 | 'DigitalOcean', | 24 | 'DigitalOcean', |
8 | 25 | 'GCE - Google Compute Engine', | 25 | 'GCE - Google Compute Engine', |
9 | 26 | 'Exoscale', | ||
10 | 26 | 'Hetzner Cloud', | 27 | 'Hetzner Cloud', |
11 | 27 | 'IBM - (aka SoftLayer or BlueMix)', | 28 | 'IBM - (aka SoftLayer or BlueMix)', |
12 | 28 | 'LXD', | 29 | 'LXD', |
13 | diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py | |||
14 | index 4585e4d..cf9b5ab 100755 | |||
15 | --- a/cloudinit/config/cc_set_passwords.py | |||
16 | +++ b/cloudinit/config/cc_set_passwords.py | |||
17 | @@ -9,27 +9,40 @@ | |||
18 | 9 | """ | 9 | """ |
19 | 10 | Set Passwords | 10 | Set Passwords |
20 | 11 | ------------- | 11 | ------------- |
38 | 12 | **Summary:** Set user passwords | 12 | **Summary:** Set user passwords and enable/disable SSH password authentication |
39 | 13 | 13 | ||
40 | 14 | Set system passwords and enable or disable ssh password authentication. | 14 | This module consumes three top-level config keys: ``ssh_pwauth``, ``chpasswd`` |
41 | 15 | The ``chpasswd`` config key accepts a dictionary containing a single one of two | 15 | and ``password``. |
42 | 16 | keys, either ``expire`` or ``list``. If ``expire`` is specified and is set to | 16 | |
43 | 17 | ``false``, then the ``password`` global config key is used as the password for | 17 | The ``ssh_pwauth`` config key determines whether or not sshd will be configured |
44 | 18 | all user accounts. If the ``expire`` key is specified and is set to ``true`` | 18 | to accept password authentication. True values will enable password auth, |
45 | 19 | then user passwords will be expired, preventing the default system passwords | 19 | false values will disable password auth, and the literal string ``unchanged`` |
46 | 20 | from being used. | 20 | will leave it unchanged. Setting no value will also leave the current setting |
47 | 21 | 21 | on-disk unchanged. | |
48 | 22 | If the ``list`` key is provided, a list of | 22 | |
49 | 23 | ``username:password`` pairs can be specified. The usernames specified | 23 | The ``chpasswd`` config key accepts a dictionary containing either or both of |
50 | 24 | must already exist on the system, or have been created using the | 24 | ``expire`` and ``list``. |
51 | 25 | ``cc_users_groups`` module. A password can be randomly generated using | 25 | |
52 | 26 | ``username:RANDOM`` or ``username:R``. A hashed password can be specified | 26 | If the ``list`` key is provided, it should contain a list of |
53 | 27 | using ``username:$6$salt$hash``. Password ssh authentication can be | 27 | ``username:password`` pairs. This can be either a YAML list (of strings), or a |
54 | 28 | enabled, disabled, or left to system defaults using ``ssh_pwauth``. | 28 | multi-line string with one pair per line. Each user will have the |
55 | 29 | corresponding password set. A password can be randomly generated by specifying | ||
56 | 30 | ``RANDOM`` or ``R`` as a user's password. A hashed password, created by a tool | ||
57 | 31 | like ``mkpasswd``, can be specified; a regex | ||
58 | 32 | (``r'\\$(1|2a|2y|5|6)(\\$.+){2}'``) is used to determine if a password value | ||
59 | 33 | should be treated as a hash. | ||
60 | 29 | 34 | ||
61 | 30 | .. note:: | 35 | .. note:: |
64 | 31 | if using ``expire: true`` then a ssh authkey should be specified or it may | 36 | The users specified must already exist on the system. Users will have been |
65 | 32 | not be possible to login to the system | 37 | created by the ``cc_users_groups`` module at this point. |
66 | 38 | |||
67 | 39 | By default, all users on the system will have their passwords expired (meaning | ||
68 | 40 | that they will have to be reset the next time the user logs in). To disable | ||
69 | 41 | this behaviour, set ``expire`` under ``chpasswd`` to a false value. | ||
70 | 42 | |||
71 | 43 | If a ``list`` of user/password pairs is not specified under ``chpasswd``, then | ||
72 | 44 | the value of the ``password`` config key will be used to set the default user's | ||
73 | 45 | password. | ||
74 | 33 | 46 | ||
75 | 34 | **Internal name:** ``cc_set_passwords`` | 47 | **Internal name:** ``cc_set_passwords`` |
76 | 35 | 48 | ||
77 | @@ -160,6 +173,8 @@ def handle(_name, cfg, cloud, log, args): | |||
78 | 160 | hashed_users = [] | 173 | hashed_users = [] |
79 | 161 | randlist = [] | 174 | randlist = [] |
80 | 162 | users = [] | 175 | users = [] |
81 | 176 | # N.B. This regex is included in the documentation (i.e. the module | ||
82 | 177 | # docstring), so any changes to it should be reflected there. | ||
83 | 163 | prog = re.compile(r'\$(1|2a|2y|5|6)(\$.+){2}') | 178 | prog = re.compile(r'\$(1|2a|2y|5|6)(\$.+){2}') |
84 | 164 | for line in plist: | 179 | for line in plist: |
85 | 165 | u, p = line.split(':', 1) | 180 | u, p = line.split(':', 1) |
86 | diff --git a/cloudinit/config/cc_ssh.py b/cloudinit/config/cc_ssh.py | |||
87 | index f8f7cb3..53f6939 100755 | |||
88 | --- a/cloudinit/config/cc_ssh.py | |||
89 | +++ b/cloudinit/config/cc_ssh.py | |||
90 | @@ -91,6 +91,9 @@ public keys. | |||
91 | 91 | ssh_authorized_keys: | 91 | ssh_authorized_keys: |
92 | 92 | - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEA3FSyQwBI6Z+nCSjUU ... | 92 | - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEA3FSyQwBI6Z+nCSjUU ... |
93 | 93 | - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA3I7VUf2l5gSn5uavROsc5HRDpZ ... | 93 | - ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA3I7VUf2l5gSn5uavROsc5HRDpZ ... |
94 | 94 | ssh_publish_hostkeys: | ||
95 | 95 | enabled: <true/false> (Defaults to true) | ||
96 | 96 | blacklist: <list of key types> (Defaults to [dsa]) | ||
97 | 94 | """ | 97 | """ |
98 | 95 | 98 | ||
99 | 96 | import glob | 99 | import glob |
100 | @@ -104,6 +107,10 @@ from cloudinit import util | |||
101 | 104 | 107 | ||
102 | 105 | GENERATE_KEY_NAMES = ['rsa', 'dsa', 'ecdsa', 'ed25519'] | 108 | GENERATE_KEY_NAMES = ['rsa', 'dsa', 'ecdsa', 'ed25519'] |
103 | 106 | KEY_FILE_TPL = '/etc/ssh/ssh_host_%s_key' | 109 | KEY_FILE_TPL = '/etc/ssh/ssh_host_%s_key' |
104 | 110 | PUBLISH_HOST_KEYS = True | ||
105 | 111 | # Don't publish the dsa hostkey by default since OpenSSH recommends not using | ||
106 | 112 | # it. | ||
107 | 113 | HOST_KEY_PUBLISH_BLACKLIST = ['dsa'] | ||
108 | 107 | 114 | ||
109 | 108 | CONFIG_KEY_TO_FILE = {} | 115 | CONFIG_KEY_TO_FILE = {} |
110 | 109 | PRIV_TO_PUB = {} | 116 | PRIV_TO_PUB = {} |
111 | @@ -176,6 +183,23 @@ def handle(_name, cfg, cloud, log, _args): | |||
112 | 176 | util.logexc(log, "Failed generating key type %s to " | 183 | util.logexc(log, "Failed generating key type %s to " |
113 | 177 | "file %s", keytype, keyfile) | 184 | "file %s", keytype, keyfile) |
114 | 178 | 185 | ||
115 | 186 | if "ssh_publish_hostkeys" in cfg: | ||
116 | 187 | host_key_blacklist = util.get_cfg_option_list( | ||
117 | 188 | cfg["ssh_publish_hostkeys"], "blacklist", | ||
118 | 189 | HOST_KEY_PUBLISH_BLACKLIST) | ||
119 | 190 | publish_hostkeys = util.get_cfg_option_bool( | ||
120 | 191 | cfg["ssh_publish_hostkeys"], "enabled", PUBLISH_HOST_KEYS) | ||
121 | 192 | else: | ||
122 | 193 | host_key_blacklist = HOST_KEY_PUBLISH_BLACKLIST | ||
123 | 194 | publish_hostkeys = PUBLISH_HOST_KEYS | ||
124 | 195 | |||
125 | 196 | if publish_hostkeys: | ||
126 | 197 | hostkeys = get_public_host_keys(blacklist=host_key_blacklist) | ||
127 | 198 | try: | ||
128 | 199 | cloud.datasource.publish_host_keys(hostkeys) | ||
129 | 200 | except Exception as e: | ||
130 | 201 | util.logexc(log, "Publishing host keys failed!") | ||
131 | 202 | |||
132 | 179 | try: | 203 | try: |
133 | 180 | (users, _groups) = ug_util.normalize_users_groups(cfg, cloud.distro) | 204 | (users, _groups) = ug_util.normalize_users_groups(cfg, cloud.distro) |
134 | 181 | (user, _user_config) = ug_util.extract_default(users) | 205 | (user, _user_config) = ug_util.extract_default(users) |
135 | @@ -209,4 +233,35 @@ def apply_credentials(keys, user, disable_root, disable_root_opts): | |||
136 | 209 | 233 | ||
137 | 210 | ssh_util.setup_user_keys(keys, 'root', options=key_prefix) | 234 | ssh_util.setup_user_keys(keys, 'root', options=key_prefix) |
138 | 211 | 235 | ||
139 | 236 | |||
140 | 237 | def get_public_host_keys(blacklist=None): | ||
141 | 238 | """Read host keys from /etc/ssh/*.pub files and return them as a list. | ||
142 | 239 | |||
143 | 240 | @param blacklist: List of key types to ignore. e.g. ['dsa', 'rsa'] | ||
144 | 241 | @returns: List of keys, each formatted as a two-element tuple. | ||
145 | 242 | e.g. [('ssh-rsa', 'AAAAB3Nz...'), ('ssh-ed25519', 'AAAAC3Nx...')] | ||
146 | 243 | """ | ||
147 | 244 | public_key_file_tmpl = '%s.pub' % (KEY_FILE_TPL,) | ||
148 | 245 | key_list = [] | ||
149 | 246 | blacklist_files = [] | ||
150 | 247 | if blacklist: | ||
151 | 248 | # Convert blacklist to filenames: | ||
152 | 249 | # 'dsa' -> '/etc/ssh/ssh_host_dsa_key.pub' | ||
153 | 250 | blacklist_files = [public_key_file_tmpl % (key_type,) | ||
154 | 251 | for key_type in blacklist] | ||
155 | 252 | # Get list of public key files and filter out blacklisted files. | ||
156 | 253 | file_list = [hostfile for hostfile | ||
157 | 254 | in glob.glob(public_key_file_tmpl % ('*',)) | ||
158 | 255 | if hostfile not in blacklist_files] | ||
159 | 256 | |||
160 | 257 | # Read host key files, retrieve first two fields as a tuple and | ||
161 | 258 | # append that tuple to key_list. | ||
162 | 259 | for file_name in file_list: | ||
163 | 260 | file_contents = util.load_file(file_name) | ||
164 | 261 | key_data = file_contents.split() | ||
165 | 262 | if key_data and len(key_data) > 1: | ||
166 | 263 | key_list.append(tuple(key_data[:2])) | ||
167 | 264 | return key_list | ||
168 | 265 | |||
169 | 266 | |||
170 | 212 | # vi: ts=4 expandtab | 267 | # vi: ts=4 expandtab |
171 | diff --git a/cloudinit/config/tests/test_ssh.py b/cloudinit/config/tests/test_ssh.py | |||
172 | index c8a4271..e778984 100644 | |||
173 | --- a/cloudinit/config/tests/test_ssh.py | |||
174 | +++ b/cloudinit/config/tests/test_ssh.py | |||
175 | @@ -1,5 +1,6 @@ | |||
176 | 1 | # This file is part of cloud-init. See LICENSE file for license information. | 1 | # This file is part of cloud-init. See LICENSE file for license information. |
177 | 2 | 2 | ||
178 | 3 | import os.path | ||
179 | 3 | 4 | ||
180 | 4 | from cloudinit.config import cc_ssh | 5 | from cloudinit.config import cc_ssh |
181 | 5 | from cloudinit import ssh_util | 6 | from cloudinit import ssh_util |
182 | @@ -12,6 +13,25 @@ MODPATH = "cloudinit.config.cc_ssh." | |||
183 | 12 | class TestHandleSsh(CiTestCase): | 13 | class TestHandleSsh(CiTestCase): |
184 | 13 | """Test cc_ssh handling of ssh config.""" | 14 | """Test cc_ssh handling of ssh config.""" |
185 | 14 | 15 | ||
186 | 16 | def _publish_hostkey_test_setup(self): | ||
187 | 17 | self.test_hostkeys = { | ||
188 | 18 | 'dsa': ('ssh-dss', 'AAAAB3NzaC1kc3MAAACB'), | ||
189 | 19 | 'ecdsa': ('ecdsa-sha2-nistp256', 'AAAAE2VjZ'), | ||
190 | 20 | 'ed25519': ('ssh-ed25519', 'AAAAC3NzaC1lZDI'), | ||
191 | 21 | 'rsa': ('ssh-rsa', 'AAAAB3NzaC1yc2EAAA'), | ||
192 | 22 | } | ||
193 | 23 | self.test_hostkey_files = [] | ||
194 | 24 | hostkey_tmpdir = self.tmp_dir() | ||
195 | 25 | for key_type in ['dsa', 'ecdsa', 'ed25519', 'rsa']: | ||
196 | 26 | key_data = self.test_hostkeys[key_type] | ||
197 | 27 | filename = 'ssh_host_%s_key.pub' % key_type | ||
198 | 28 | filepath = os.path.join(hostkey_tmpdir, filename) | ||
199 | 29 | self.test_hostkey_files.append(filepath) | ||
200 | 30 | with open(filepath, 'w') as f: | ||
201 | 31 | f.write(' '.join(key_data)) | ||
202 | 32 | |||
203 | 33 | cc_ssh.KEY_FILE_TPL = os.path.join(hostkey_tmpdir, 'ssh_host_%s_key') | ||
204 | 34 | |||
205 | 15 | def test_apply_credentials_with_user(self, m_setup_keys): | 35 | def test_apply_credentials_with_user(self, m_setup_keys): |
206 | 16 | """Apply keys for the given user and root.""" | 36 | """Apply keys for the given user and root.""" |
207 | 17 | keys = ["key1"] | 37 | keys = ["key1"] |
208 | @@ -64,6 +84,7 @@ class TestHandleSsh(CiTestCase): | |||
209 | 64 | # Mock os.path.exits to True to short-circuit the key writing logic | 84 | # Mock os.path.exits to True to short-circuit the key writing logic |
210 | 65 | m_path_exists.return_value = True | 85 | m_path_exists.return_value = True |
211 | 66 | m_nug.return_value = ([], {}) | 86 | m_nug.return_value = ([], {}) |
212 | 87 | cc_ssh.PUBLISH_HOST_KEYS = False | ||
213 | 67 | cloud = self.tmp_cloud( | 88 | cloud = self.tmp_cloud( |
214 | 68 | distro='ubuntu', metadata={'public-keys': keys}) | 89 | distro='ubuntu', metadata={'public-keys': keys}) |
215 | 69 | cc_ssh.handle("name", cfg, cloud, None, None) | 90 | cc_ssh.handle("name", cfg, cloud, None, None) |
216 | @@ -149,3 +170,148 @@ class TestHandleSsh(CiTestCase): | |||
217 | 149 | self.assertEqual([mock.call(set(keys), user), | 170 | self.assertEqual([mock.call(set(keys), user), |
218 | 150 | mock.call(set(keys), "root", options="")], | 171 | mock.call(set(keys), "root", options="")], |
219 | 151 | m_setup_keys.call_args_list) | 172 | m_setup_keys.call_args_list) |
220 | 173 | |||
221 | 174 | @mock.patch(MODPATH + "glob.glob") | ||
222 | 175 | @mock.patch(MODPATH + "ug_util.normalize_users_groups") | ||
223 | 176 | @mock.patch(MODPATH + "os.path.exists") | ||
224 | 177 | def test_handle_publish_hostkeys_default( | ||
225 | 178 | self, m_path_exists, m_nug, m_glob, m_setup_keys): | ||
226 | 179 | """Test handle with various configs for ssh_publish_hostkeys.""" | ||
227 | 180 | self._publish_hostkey_test_setup() | ||
228 | 181 | cc_ssh.PUBLISH_HOST_KEYS = True | ||
229 | 182 | keys = ["key1"] | ||
230 | 183 | user = "clouduser" | ||
231 | 184 | # Return no matching keys for first glob, test keys for second. | ||
232 | 185 | m_glob.side_effect = iter([ | ||
233 | 186 | [], | ||
234 | 187 | self.test_hostkey_files, | ||
235 | 188 | ]) | ||
236 | 189 | # Mock os.path.exits to True to short-circuit the key writing logic | ||
237 | 190 | m_path_exists.return_value = True | ||
238 | 191 | m_nug.return_value = ({user: {"default": user}}, {}) | ||
239 | 192 | cloud = self.tmp_cloud( | ||
240 | 193 | distro='ubuntu', metadata={'public-keys': keys}) | ||
241 | 194 | cloud.datasource.publish_host_keys = mock.Mock() | ||
242 | 195 | |||
243 | 196 | cfg = {} | ||
244 | 197 | expected_call = [self.test_hostkeys[key_type] for key_type | ||
245 | 198 | in ['ecdsa', 'ed25519', 'rsa']] | ||
246 | 199 | cc_ssh.handle("name", cfg, cloud, None, None) | ||
247 | 200 | self.assertEqual([mock.call(expected_call)], | ||
248 | 201 | cloud.datasource.publish_host_keys.call_args_list) | ||
249 | 202 | |||
250 | 203 | @mock.patch(MODPATH + "glob.glob") | ||
251 | 204 | @mock.patch(MODPATH + "ug_util.normalize_users_groups") | ||
252 | 205 | @mock.patch(MODPATH + "os.path.exists") | ||
253 | 206 | def test_handle_publish_hostkeys_config_enable( | ||
254 | 207 | self, m_path_exists, m_nug, m_glob, m_setup_keys): | ||
255 | 208 | """Test handle with various configs for ssh_publish_hostkeys.""" | ||
256 | 209 | self._publish_hostkey_test_setup() | ||
257 | 210 | cc_ssh.PUBLISH_HOST_KEYS = False | ||
258 | 211 | keys = ["key1"] | ||
259 | 212 | user = "clouduser" | ||
260 | 213 | # Return no matching keys for first glob, test keys for second. | ||
261 | 214 | m_glob.side_effect = iter([ | ||
262 | 215 | [], | ||
263 | 216 | self.test_hostkey_files, | ||
264 | 217 | ]) | ||
265 | 218 | # Mock os.path.exits to True to short-circuit the key writing logic | ||
266 | 219 | m_path_exists.return_value = True | ||
267 | 220 | m_nug.return_value = ({user: {"default": user}}, {}) | ||
268 | 221 | cloud = self.tmp_cloud( | ||
269 | 222 | distro='ubuntu', metadata={'public-keys': keys}) | ||
270 | 223 | cloud.datasource.publish_host_keys = mock.Mock() | ||
271 | 224 | |||
272 | 225 | cfg = {'ssh_publish_hostkeys': {'enabled': True}} | ||
273 | 226 | expected_call = [self.test_hostkeys[key_type] for key_type | ||
274 | 227 | in ['ecdsa', 'ed25519', 'rsa']] | ||
275 | 228 | cc_ssh.handle("name", cfg, cloud, None, None) | ||
276 | 229 | self.assertEqual([mock.call(expected_call)], | ||
277 | 230 | cloud.datasource.publish_host_keys.call_args_list) | ||
278 | 231 | |||
279 | 232 | @mock.patch(MODPATH + "glob.glob") | ||
280 | 233 | @mock.patch(MODPATH + "ug_util.normalize_users_groups") | ||
281 | 234 | @mock.patch(MODPATH + "os.path.exists") | ||
282 | 235 | def test_handle_publish_hostkeys_config_disable( | ||
283 | 236 | self, m_path_exists, m_nug, m_glob, m_setup_keys): | ||
284 | 237 | """Test handle with various configs for ssh_publish_hostkeys.""" | ||
285 | 238 | self._publish_hostkey_test_setup() | ||
286 | 239 | cc_ssh.PUBLISH_HOST_KEYS = True | ||
287 | 240 | keys = ["key1"] | ||
288 | 241 | user = "clouduser" | ||
289 | 242 | # Return no matching keys for first glob, test keys for second. | ||
290 | 243 | m_glob.side_effect = iter([ | ||
291 | 244 | [], | ||
292 | 245 | self.test_hostkey_files, | ||
293 | 246 | ]) | ||
294 | 247 | # Mock os.path.exits to True to short-circuit the key writing logic | ||
295 | 248 | m_path_exists.return_value = True | ||
296 | 249 | m_nug.return_value = ({user: {"default": user}}, {}) | ||
297 | 250 | cloud = self.tmp_cloud( | ||
298 | 251 | distro='ubuntu', metadata={'public-keys': keys}) | ||
299 | 252 | cloud.datasource.publish_host_keys = mock.Mock() | ||
300 | 253 | |||
301 | 254 | cfg = {'ssh_publish_hostkeys': {'enabled': False}} | ||
302 | 255 | cc_ssh.handle("name", cfg, cloud, None, None) | ||
303 | 256 | self.assertFalse(cloud.datasource.publish_host_keys.call_args_list) | ||
304 | 257 | cloud.datasource.publish_host_keys.assert_not_called() | ||
305 | 258 | |||
306 | 259 | @mock.patch(MODPATH + "glob.glob") | ||
307 | 260 | @mock.patch(MODPATH + "ug_util.normalize_users_groups") | ||
308 | 261 | @mock.patch(MODPATH + "os.path.exists") | ||
309 | 262 | def test_handle_publish_hostkeys_config_blacklist( | ||
310 | 263 | self, m_path_exists, m_nug, m_glob, m_setup_keys): | ||
311 | 264 | """Test handle with various configs for ssh_publish_hostkeys.""" | ||
312 | 265 | self._publish_hostkey_test_setup() | ||
313 | 266 | cc_ssh.PUBLISH_HOST_KEYS = True | ||
314 | 267 | keys = ["key1"] | ||
315 | 268 | user = "clouduser" | ||
316 | 269 | # Return no matching keys for first glob, test keys for second. | ||
317 | 270 | m_glob.side_effect = iter([ | ||
318 | 271 | [], | ||
319 | 272 | self.test_hostkey_files, | ||
320 | 273 | ]) | ||
321 | 274 | # Mock os.path.exits to True to short-circuit the key writing logic | ||
322 | 275 | m_path_exists.return_value = True | ||
323 | 276 | m_nug.return_value = ({user: {"default": user}}, {}) | ||
324 | 277 | cloud = self.tmp_cloud( | ||
325 | 278 | distro='ubuntu', metadata={'public-keys': keys}) | ||
326 | 279 | cloud.datasource.publish_host_keys = mock.Mock() | ||
327 | 280 | |||
328 | 281 | cfg = {'ssh_publish_hostkeys': {'enabled': True, | ||
329 | 282 | 'blacklist': ['dsa', 'rsa']}} | ||
330 | 283 | expected_call = [self.test_hostkeys[key_type] for key_type | ||
331 | 284 | in ['ecdsa', 'ed25519']] | ||
332 | 285 | cc_ssh.handle("name", cfg, cloud, None, None) | ||
333 | 286 | self.assertEqual([mock.call(expected_call)], | ||
334 | 287 | cloud.datasource.publish_host_keys.call_args_list) | ||
335 | 288 | |||
336 | 289 | @mock.patch(MODPATH + "glob.glob") | ||
337 | 290 | @mock.patch(MODPATH + "ug_util.normalize_users_groups") | ||
338 | 291 | @mock.patch(MODPATH + "os.path.exists") | ||
339 | 292 | def test_handle_publish_hostkeys_empty_blacklist( | ||
340 | 293 | self, m_path_exists, m_nug, m_glob, m_setup_keys): | ||
341 | 294 | """Test handle with various configs for ssh_publish_hostkeys.""" | ||
342 | 295 | self._publish_hostkey_test_setup() | ||
343 | 296 | cc_ssh.PUBLISH_HOST_KEYS = True | ||
344 | 297 | keys = ["key1"] | ||
345 | 298 | user = "clouduser" | ||
346 | 299 | # Return no matching keys for first glob, test keys for second. | ||
347 | 300 | m_glob.side_effect = iter([ | ||
348 | 301 | [], | ||
349 | 302 | self.test_hostkey_files, | ||
350 | 303 | ]) | ||
351 | 304 | # Mock os.path.exits to True to short-circuit the key writing logic | ||
352 | 305 | m_path_exists.return_value = True | ||
353 | 306 | m_nug.return_value = ({user: {"default": user}}, {}) | ||
354 | 307 | cloud = self.tmp_cloud( | ||
355 | 308 | distro='ubuntu', metadata={'public-keys': keys}) | ||
356 | 309 | cloud.datasource.publish_host_keys = mock.Mock() | ||
357 | 310 | |||
358 | 311 | cfg = {'ssh_publish_hostkeys': {'enabled': True, | ||
359 | 312 | 'blacklist': []}} | ||
360 | 313 | expected_call = [self.test_hostkeys[key_type] for key_type | ||
361 | 314 | in ['dsa', 'ecdsa', 'ed25519', 'rsa']] | ||
362 | 315 | cc_ssh.handle("name", cfg, cloud, None, None) | ||
363 | 316 | self.assertEqual([mock.call(expected_call)], | ||
364 | 317 | cloud.datasource.publish_host_keys.call_args_list) | ||
365 | diff --git a/cloudinit/settings.py b/cloudinit/settings.py | |||
366 | index b1ebaad..2060d81 100644 | |||
367 | --- a/cloudinit/settings.py | |||
368 | +++ b/cloudinit/settings.py | |||
369 | @@ -39,6 +39,7 @@ CFG_BUILTIN = { | |||
370 | 39 | 'Hetzner', | 39 | 'Hetzner', |
371 | 40 | 'IBMCloud', | 40 | 'IBMCloud', |
372 | 41 | 'Oracle', | 41 | 'Oracle', |
373 | 42 | 'Exoscale', | ||
374 | 42 | # At the end to act as a 'catch' when none of the above work... | 43 | # At the end to act as a 'catch' when none of the above work... |
375 | 43 | 'None', | 44 | 'None', |
376 | 44 | ], | 45 | ], |
377 | diff --git a/cloudinit/sources/DataSourceExoscale.py b/cloudinit/sources/DataSourceExoscale.py | |||
378 | 45 | new file mode 100644 | 46 | new file mode 100644 |
379 | index 0000000..52e7f6f | |||
380 | --- /dev/null | |||
381 | +++ b/cloudinit/sources/DataSourceExoscale.py | |||
382 | @@ -0,0 +1,258 @@ | |||
383 | 1 | # Author: Mathieu Corbin <mathieu.corbin@exoscale.com> | ||
384 | 2 | # Author: Christopher Glass <christopher.glass@exoscale.com> | ||
385 | 3 | # | ||
386 | 4 | # This file is part of cloud-init. See LICENSE file for license information. | ||
387 | 5 | |||
388 | 6 | from cloudinit import ec2_utils as ec2 | ||
389 | 7 | from cloudinit import log as logging | ||
390 | 8 | from cloudinit import sources | ||
391 | 9 | from cloudinit import url_helper | ||
392 | 10 | from cloudinit import util | ||
393 | 11 | |||
394 | 12 | LOG = logging.getLogger(__name__) | ||
395 | 13 | |||
396 | 14 | METADATA_URL = "http://169.254.169.254" | ||
397 | 15 | API_VERSION = "1.0" | ||
398 | 16 | PASSWORD_SERVER_PORT = 8080 | ||
399 | 17 | |||
400 | 18 | URL_TIMEOUT = 10 | ||
401 | 19 | URL_RETRIES = 6 | ||
402 | 20 | |||
403 | 21 | EXOSCALE_DMI_NAME = "Exoscale" | ||
404 | 22 | |||
405 | 23 | BUILTIN_DS_CONFIG = { | ||
406 | 24 | # We run the set password config module on every boot in order to enable | ||
407 | 25 | # resetting the instance's password via the exoscale console (and a | ||
408 | 26 | # subsequent instance reboot). | ||
409 | 27 | 'cloud_config_modules': [["set-passwords", "always"]] | ||
410 | 28 | } | ||
411 | 29 | |||
412 | 30 | |||
413 | 31 | class DataSourceExoscale(sources.DataSource): | ||
414 | 32 | |||
415 | 33 | dsname = 'Exoscale' | ||
416 | 34 | |||
417 | 35 | def __init__(self, sys_cfg, distro, paths): | ||
418 | 36 | super(DataSourceExoscale, self).__init__(sys_cfg, distro, paths) | ||
419 | 37 | LOG.debug("Initializing the Exoscale datasource") | ||
420 | 38 | |||
421 | 39 | self.metadata_url = self.ds_cfg.get('metadata_url', METADATA_URL) | ||
422 | 40 | self.api_version = self.ds_cfg.get('api_version', API_VERSION) | ||
423 | 41 | self.password_server_port = int( | ||
424 | 42 | self.ds_cfg.get('password_server_port', PASSWORD_SERVER_PORT)) | ||
425 | 43 | self.url_timeout = self.ds_cfg.get('timeout', URL_TIMEOUT) | ||
426 | 44 | self.url_retries = self.ds_cfg.get('retries', URL_RETRIES) | ||
427 | 45 | |||
428 | 46 | self.extra_config = BUILTIN_DS_CONFIG | ||
429 | 47 | |||
430 | 48 | def wait_for_metadata_service(self): | ||
431 | 49 | """Wait for the metadata service to be reachable.""" | ||
432 | 50 | |||
433 | 51 | metadata_url = "{}/{}/meta-data/instance-id".format( | ||
434 | 52 | self.metadata_url, self.api_version) | ||
435 | 53 | |||
436 | 54 | url = url_helper.wait_for_url( | ||
437 | 55 | urls=[metadata_url], | ||
438 | 56 | max_wait=self.url_max_wait, | ||
439 | 57 | timeout=self.url_timeout, | ||
440 | 58 | status_cb=LOG.critical) | ||
441 | 59 | |||
442 | 60 | return bool(url) | ||
443 | 61 | |||
444 | 62 | def crawl_metadata(self): | ||
445 | 63 | """ | ||
446 | 64 | Crawl the metadata service when available. | ||
447 | 65 | |||
448 | 66 | @returns: Dictionary of crawled metadata content. | ||
449 | 67 | """ | ||
450 | 68 | metadata_ready = util.log_time( | ||
451 | 69 | logfunc=LOG.info, | ||
452 | 70 | msg='waiting for the metadata service', | ||
453 | 71 | func=self.wait_for_metadata_service) | ||
454 | 72 | |||
455 | 73 | if not metadata_ready: | ||
456 | 74 | return {} | ||
457 | 75 | |||
458 | 76 | return read_metadata(self.metadata_url, self.api_version, | ||
459 | 77 | self.password_server_port, self.url_timeout, | ||
460 | 78 | self.url_retries) | ||
461 | 79 | |||
462 | 80 | def _get_data(self): | ||
463 | 81 | """Fetch the user data, the metadata and the VM password | ||
464 | 82 | from the metadata service. | ||
465 | 83 | |||
466 | 84 | Please refer to the datasource documentation for details on how the | ||
467 | 85 | metadata server and password server are crawled. | ||
468 | 86 | """ | ||
469 | 87 | if not self._is_platform_viable(): | ||
470 | 88 | return False | ||
471 | 89 | |||
472 | 90 | data = util.log_time( | ||
473 | 91 | logfunc=LOG.debug, | ||
474 | 92 | msg='Crawl of metadata service', | ||
475 | 93 | func=self.crawl_metadata) | ||
476 | 94 | |||
477 | 95 | if not data: | ||
478 | 96 | return False | ||
479 | 97 | |||
480 | 98 | self.userdata_raw = data['user-data'] | ||
481 | 99 | self.metadata = data['meta-data'] | ||
482 | 100 | password = data.get('password') | ||
483 | 101 | |||
484 | 102 | password_config = {} | ||
485 | 103 | if password: | ||
486 | 104 | # Since we have a password, let's make sure we are allowed to use | ||
487 | 105 | # it by allowing ssh_pwauth. | ||
488 | 106 | # The password module's default behavior is to leave the | ||
489 | 107 | # configuration as-is in this regard, so that means it will either | ||
490 | 108 | # leave the password always disabled if no password is ever set, or | ||
491 | 109 | # leave the password login enabled if we set it once. | ||
492 | 110 | password_config = { | ||
493 | 111 | 'ssh_pwauth': True, | ||
494 | 112 | 'password': password, | ||
495 | 113 | 'chpasswd': { | ||
496 | 114 | 'expire': False, | ||
497 | 115 | }, | ||
498 | 116 | } | ||
499 | 117 | |||
500 | 118 | # builtin extra_config overrides password_config | ||
501 | 119 | self.extra_config = util.mergemanydict( | ||
502 | 120 | [self.extra_config, password_config]) | ||
503 | 121 | |||
504 | 122 | return True | ||
505 | 123 | |||
506 | 124 | def get_config_obj(self): | ||
507 | 125 | return self.extra_config | ||
508 | 126 | |||
509 | 127 | def _is_platform_viable(self): | ||
510 | 128 | return util.read_dmi_data('system-product-name').startswith( | ||
511 | 129 | EXOSCALE_DMI_NAME) | ||
512 | 130 | |||
513 | 131 | |||
514 | 132 | # Used to match classes to dependencies | ||
515 | 133 | datasources = [ | ||
516 | 134 | (DataSourceExoscale, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), | ||
517 | 135 | ] | ||
518 | 136 | |||
519 | 137 | |||
520 | 138 | # Return a list of data sources that match this set of dependencies | ||
521 | 139 | def get_datasource_list(depends): | ||
522 | 140 | return sources.list_from_depends(depends, datasources) | ||
523 | 141 | |||
524 | 142 | |||
525 | 143 | def get_password(metadata_url=METADATA_URL, | ||
526 | 144 | api_version=API_VERSION, | ||
527 | 145 | password_server_port=PASSWORD_SERVER_PORT, | ||
528 | 146 | url_timeout=URL_TIMEOUT, | ||
529 | 147 | url_retries=URL_RETRIES): | ||
530 | 148 | """Obtain the VM's password if set. | ||
531 | 149 | |||
532 | 150 | Once fetched the password is marked saved. Future calls to this method may | ||
533 | 151 | return empty string or 'saved_password'.""" | ||
534 | 152 | password_url = "{}:{}/{}/".format(metadata_url, password_server_port, | ||
535 | 153 | api_version) | ||
536 | 154 | response = url_helper.read_file_or_url( | ||
537 | 155 | password_url, | ||
538 | 156 | ssl_details=None, | ||
539 | 157 | headers={"DomU_Request": "send_my_password"}, | ||
540 | 158 | timeout=url_timeout, | ||
541 | 159 | retries=url_retries) | ||
542 | 160 | password = response.contents.decode('utf-8') | ||
543 | 161 | # the password is empty or already saved | ||
544 | 162 | # Note: the original metadata server would answer an additional | ||
545 | 163 | # 'bad_request' status, but the Exoscale implementation does not. | ||
546 | 164 | if password in ['', 'saved_password']: | ||
547 | 165 | return None | ||
548 | 166 | # save the password | ||
549 | 167 | url_helper.read_file_or_url( | ||
550 | 168 | password_url, | ||
551 | 169 | ssl_details=None, | ||
552 | 170 | headers={"DomU_Request": "saved_password"}, | ||
553 | 171 | timeout=url_timeout, | ||
554 | 172 | retries=url_retries) | ||
555 | 173 | return password | ||
556 | 174 | |||
557 | 175 | |||
558 | 176 | def read_metadata(metadata_url=METADATA_URL, | ||
559 | 177 | api_version=API_VERSION, | ||
560 | 178 | password_server_port=PASSWORD_SERVER_PORT, | ||
561 | 179 | url_timeout=URL_TIMEOUT, | ||
562 | 180 | url_retries=URL_RETRIES): | ||
563 | 181 | """Query the metadata server and return the retrieved data.""" | ||
564 | 182 | crawled_metadata = {} | ||
565 | 183 | crawled_metadata['_metadata_api_version'] = api_version | ||
566 | 184 | try: | ||
567 | 185 | crawled_metadata['user-data'] = ec2.get_instance_userdata( | ||
568 | 186 | api_version, | ||
569 | 187 | metadata_url, | ||
570 | 188 | timeout=url_timeout, | ||
571 | 189 | retries=url_retries) | ||
572 | 190 | crawled_metadata['meta-data'] = ec2.get_instance_metadata( | ||
573 | 191 | api_version, | ||
574 | 192 | metadata_url, | ||
575 | 193 | timeout=url_timeout, | ||
576 | 194 | retries=url_retries) | ||
577 | 195 | except Exception as e: | ||
578 | 196 | util.logexc(LOG, "failed reading from metadata url %s (%s)", | ||
579 | 197 | metadata_url, e) | ||
580 | 198 | return {} | ||
581 | 199 | |||
582 | 200 | try: | ||
583 | 201 | crawled_metadata['password'] = get_password( | ||
584 | 202 | api_version=api_version, | ||
585 | 203 | metadata_url=metadata_url, | ||
586 | 204 | password_server_port=password_server_port, | ||
587 | 205 | url_retries=url_retries, | ||
588 | 206 | url_timeout=url_timeout) | ||
589 | 207 | except Exception as e: | ||
590 | 208 | util.logexc(LOG, "failed to read from password server url %s:%s (%s)", | ||
591 | 209 | metadata_url, password_server_port, e) | ||
592 | 210 | |||
593 | 211 | return crawled_metadata | ||
594 | 212 | |||
595 | 213 | |||
596 | 214 | if __name__ == "__main__": | ||
597 | 215 | import argparse | ||
598 | 216 | |||
599 | 217 | parser = argparse.ArgumentParser(description='Query Exoscale Metadata') | ||
600 | 218 | parser.add_argument( | ||
601 | 219 | "--endpoint", | ||
602 | 220 | metavar="URL", | ||
603 | 221 | help="The url of the metadata service.", | ||
604 | 222 | default=METADATA_URL) | ||
605 | 223 | parser.add_argument( | ||
606 | 224 | "--version", | ||
607 | 225 | metavar="VERSION", | ||
608 | 226 | help="The version of the metadata endpoint to query.", | ||
609 | 227 | default=API_VERSION) | ||
610 | 228 | parser.add_argument( | ||
611 | 229 | "--retries", | ||
612 | 230 | metavar="NUM", | ||
613 | 231 | type=int, | ||
614 | 232 | help="The number of retries querying the endpoint.", | ||
615 | 233 | default=URL_RETRIES) | ||
616 | 234 | parser.add_argument( | ||
617 | 235 | "--timeout", | ||
618 | 236 | metavar="NUM", | ||
619 | 237 | type=int, | ||
620 | 238 | help="The time in seconds to wait before timing out.", | ||
621 | 239 | default=URL_TIMEOUT) | ||
622 | 240 | parser.add_argument( | ||
623 | 241 | "--password-port", | ||
624 | 242 | metavar="PORT", | ||
625 | 243 | type=int, | ||
626 | 244 | help="The port on which the password endpoint listens", | ||
627 | 245 | default=PASSWORD_SERVER_PORT) | ||
628 | 246 | |||
629 | 247 | args = parser.parse_args() | ||
630 | 248 | |||
631 | 249 | data = read_metadata( | ||
632 | 250 | metadata_url=args.endpoint, | ||
633 | 251 | api_version=args.version, | ||
634 | 252 | password_server_port=args.password_port, | ||
635 | 253 | url_timeout=args.timeout, | ||
636 | 254 | url_retries=args.retries) | ||
637 | 255 | |||
638 | 256 | print(util.json_dumps(data)) | ||
639 | 257 | |||
640 | 258 | # vi: ts=4 expandtab | ||
641 | diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py | |||
642 | index d816262..6cbfbba 100644 | |||
643 | --- a/cloudinit/sources/DataSourceGCE.py | |||
644 | +++ b/cloudinit/sources/DataSourceGCE.py | |||
645 | @@ -18,10 +18,13 @@ LOG = logging.getLogger(__name__) | |||
646 | 18 | MD_V1_URL = 'http://metadata.google.internal/computeMetadata/v1/' | 18 | MD_V1_URL = 'http://metadata.google.internal/computeMetadata/v1/' |
647 | 19 | BUILTIN_DS_CONFIG = {'metadata_url': MD_V1_URL} | 19 | BUILTIN_DS_CONFIG = {'metadata_url': MD_V1_URL} |
648 | 20 | REQUIRED_FIELDS = ('instance-id', 'availability-zone', 'local-hostname') | 20 | REQUIRED_FIELDS = ('instance-id', 'availability-zone', 'local-hostname') |
649 | 21 | GUEST_ATTRIBUTES_URL = ('http://metadata.google.internal/computeMetadata/' | ||
650 | 22 | 'v1/instance/guest-attributes') | ||
651 | 23 | HOSTKEY_NAMESPACE = 'hostkeys' | ||
652 | 24 | HEADERS = {'Metadata-Flavor': 'Google'} | ||
653 | 21 | 25 | ||
654 | 22 | 26 | ||
655 | 23 | class GoogleMetadataFetcher(object): | 27 | class GoogleMetadataFetcher(object): |
656 | 24 | headers = {'Metadata-Flavor': 'Google'} | ||
657 | 25 | 28 | ||
658 | 26 | def __init__(self, metadata_address): | 29 | def __init__(self, metadata_address): |
659 | 27 | self.metadata_address = metadata_address | 30 | self.metadata_address = metadata_address |
660 | @@ -32,7 +35,7 @@ class GoogleMetadataFetcher(object): | |||
661 | 32 | url = self.metadata_address + path | 35 | url = self.metadata_address + path |
662 | 33 | if is_recursive: | 36 | if is_recursive: |
663 | 34 | url += '/?recursive=True' | 37 | url += '/?recursive=True' |
665 | 35 | resp = url_helper.readurl(url=url, headers=self.headers) | 38 | resp = url_helper.readurl(url=url, headers=HEADERS) |
666 | 36 | except url_helper.UrlError as exc: | 39 | except url_helper.UrlError as exc: |
667 | 37 | msg = "url %s raised exception %s" | 40 | msg = "url %s raised exception %s" |
668 | 38 | LOG.debug(msg, path, exc) | 41 | LOG.debug(msg, path, exc) |
669 | @@ -90,6 +93,10 @@ class DataSourceGCE(sources.DataSource): | |||
670 | 90 | public_keys_data = self.metadata['public-keys-data'] | 93 | public_keys_data = self.metadata['public-keys-data'] |
671 | 91 | return _parse_public_keys(public_keys_data, self.default_user) | 94 | return _parse_public_keys(public_keys_data, self.default_user) |
672 | 92 | 95 | ||
673 | 96 | def publish_host_keys(self, hostkeys): | ||
674 | 97 | for key in hostkeys: | ||
675 | 98 | _write_host_key_to_guest_attributes(*key) | ||
676 | 99 | |||
677 | 93 | def get_hostname(self, fqdn=False, resolve_ip=False, metadata_only=False): | 100 | def get_hostname(self, fqdn=False, resolve_ip=False, metadata_only=False): |
678 | 94 | # GCE has long FDQN's and has asked for short hostnames. | 101 | # GCE has long FDQN's and has asked for short hostnames. |
679 | 95 | return self.metadata['local-hostname'].split('.')[0] | 102 | return self.metadata['local-hostname'].split('.')[0] |
680 | @@ -103,6 +110,17 @@ class DataSourceGCE(sources.DataSource): | |||
681 | 103 | return self.availability_zone.rsplit('-', 1)[0] | 110 | return self.availability_zone.rsplit('-', 1)[0] |
682 | 104 | 111 | ||
683 | 105 | 112 | ||
684 | 113 | def _write_host_key_to_guest_attributes(key_type, key_value): | ||
685 | 114 | url = '%s/%s/%s' % (GUEST_ATTRIBUTES_URL, HOSTKEY_NAMESPACE, key_type) | ||
686 | 115 | key_value = key_value.encode('utf-8') | ||
687 | 116 | resp = url_helper.readurl(url=url, data=key_value, headers=HEADERS, | ||
688 | 117 | request_method='PUT', check_status=False) | ||
689 | 118 | if resp.ok(): | ||
690 | 119 | LOG.debug('Wrote %s host key to guest attributes.', key_type) | ||
691 | 120 | else: | ||
692 | 121 | LOG.debug('Unable to write %s host key to guest attributes.', key_type) | ||
693 | 122 | |||
694 | 123 | |||
695 | 106 | def _has_expired(public_key): | 124 | def _has_expired(public_key): |
696 | 107 | # Check whether an SSH key is expired. Public key input is a single SSH | 125 | # Check whether an SSH key is expired. Public key input is a single SSH |
697 | 108 | # public key in the GCE specific key format documented here: | 126 | # public key in the GCE specific key format documented here: |
698 | diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py | |||
699 | index c2baccd..a319322 100644 | |||
700 | --- a/cloudinit/sources/__init__.py | |||
701 | +++ b/cloudinit/sources/__init__.py | |||
702 | @@ -491,6 +491,16 @@ class DataSource(object): | |||
703 | 491 | def get_public_ssh_keys(self): | 491 | def get_public_ssh_keys(self): |
704 | 492 | return normalize_pubkey_data(self.metadata.get('public-keys')) | 492 | return normalize_pubkey_data(self.metadata.get('public-keys')) |
705 | 493 | 493 | ||
706 | 494 | def publish_host_keys(self, hostkeys): | ||
707 | 495 | """Publish the public SSH host keys (found in /etc/ssh/*.pub). | ||
708 | 496 | |||
709 | 497 | @param hostkeys: List of host key tuples (key_type, key_value), | ||
710 | 498 | where key_type is the first field in the public key file | ||
711 | 499 | (e.g. 'ssh-rsa') and key_value is the key itself | ||
712 | 500 | (e.g. 'AAAAB3NzaC1y...'). | ||
713 | 501 | """ | ||
714 | 502 | pass | ||
715 | 503 | |||
716 | 494 | def _remap_device(self, short_name): | 504 | def _remap_device(self, short_name): |
717 | 495 | # LP: #611137 | 505 | # LP: #611137 |
718 | 496 | # the metadata service may believe that devices are named 'sda' | 506 | # the metadata service may believe that devices are named 'sda' |
719 | diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py | |||
720 | index 0af0d9e..44ee61d 100644 | |||
721 | --- a/cloudinit/url_helper.py | |||
722 | +++ b/cloudinit/url_helper.py | |||
723 | @@ -199,18 +199,19 @@ def _get_ssl_args(url, ssl_details): | |||
724 | 199 | def readurl(url, data=None, timeout=None, retries=0, sec_between=1, | 199 | def readurl(url, data=None, timeout=None, retries=0, sec_between=1, |
725 | 200 | headers=None, headers_cb=None, ssl_details=None, | 200 | headers=None, headers_cb=None, ssl_details=None, |
726 | 201 | check_status=True, allow_redirects=True, exception_cb=None, | 201 | check_status=True, allow_redirects=True, exception_cb=None, |
728 | 202 | session=None, infinite=False, log_req_resp=True): | 202 | session=None, infinite=False, log_req_resp=True, |
729 | 203 | request_method=None): | ||
730 | 203 | url = _cleanurl(url) | 204 | url = _cleanurl(url) |
731 | 204 | req_args = { | 205 | req_args = { |
732 | 205 | 'url': url, | 206 | 'url': url, |
733 | 206 | } | 207 | } |
734 | 207 | req_args.update(_get_ssl_args(url, ssl_details)) | 208 | req_args.update(_get_ssl_args(url, ssl_details)) |
735 | 208 | req_args['allow_redirects'] = allow_redirects | 209 | req_args['allow_redirects'] = allow_redirects |
737 | 209 | req_args['method'] = 'GET' | 210 | if not request_method: |
738 | 211 | request_method = 'POST' if data else 'GET' | ||
739 | 212 | req_args['method'] = request_method | ||
740 | 210 | if timeout is not None: | 213 | if timeout is not None: |
741 | 211 | req_args['timeout'] = max(float(timeout), 0) | 214 | req_args['timeout'] = max(float(timeout), 0) |
742 | 212 | if data: | ||
743 | 213 | req_args['method'] = 'POST' | ||
744 | 214 | # It doesn't seem like config | 215 | # It doesn't seem like config |
745 | 215 | # was added in older library versions (or newer ones either), thus we | 216 | # was added in older library versions (or newer ones either), thus we |
746 | 216 | # need to manually do the retries if it wasn't... | 217 | # need to manually do the retries if it wasn't... |
747 | diff --git a/debian/changelog b/debian/changelog | |||
748 | index 671dad7..2cda24c 100644 | |||
749 | --- a/debian/changelog | |||
750 | +++ b/debian/changelog | |||
751 | @@ -1,3 +1,15 @@ | |||
752 | 1 | cloud-init (19.2-9-g15584720-0ubuntu1) eoan; urgency=medium | ||
753 | 2 | |||
754 | 3 | * New upstream snapshot. | ||
755 | 4 | - Add support for publishing host keys to GCE guest attributes | ||
756 | 5 | [Rick Wright] | ||
757 | 6 | - New data source for the Exoscale.com cloud platform [Chris Glass] | ||
758 | 7 | - doc: remove intersphinx extension | ||
759 | 8 | - cc_set_passwords: rewrite documentation (LP: #1838794) | ||
760 | 9 | * d/cloud-init.templates: add Exoscale data source | ||
761 | 10 | |||
762 | 11 | -- Daniel Watkins <oddbloke@ubuntu.com> Fri, 09 Aug 2019 13:57:28 -0400 | ||
763 | 12 | |||
764 | 1 | cloud-init (19.2-5-g496aaa94-0ubuntu1) eoan; urgency=medium | 13 | cloud-init (19.2-5-g496aaa94-0ubuntu1) eoan; urgency=medium |
765 | 2 | 14 | ||
766 | 3 | * New upstream snapshot. | 15 | * New upstream snapshot. |
767 | diff --git a/debian/cloud-init.templates b/debian/cloud-init.templates | |||
768 | index ece53a0..e5efdad 100644 | |||
769 | --- a/debian/cloud-init.templates | |||
770 | +++ b/debian/cloud-init.templates | |||
771 | @@ -1,8 +1,8 @@ | |||
772 | 1 | Template: cloud-init/datasources | 1 | Template: cloud-init/datasources |
773 | 2 | Type: multiselect | 2 | Type: multiselect |
777 | 3 | Default: NoCloud, ConfigDrive, OpenNebula, DigitalOcean, Azure, AltCloud, OVF, MAAS, GCE, OpenStack, CloudSigma, SmartOS, Bigstep, Scaleway, AliYun, Ec2, CloudStack, Hetzner, IBMCloud, Oracle, None | 3 | Default: NoCloud, ConfigDrive, OpenNebula, DigitalOcean, Azure, AltCloud, OVF, MAAS, GCE, OpenStack, CloudSigma, SmartOS, Bigstep, Scaleway, AliYun, Ec2, CloudStack, Hetzner, IBMCloud, Oracle, Exoscale, None |
778 | 4 | Choices-C: NoCloud, ConfigDrive, OpenNebula, DigitalOcean, Azure, AltCloud, OVF, MAAS, GCE, OpenStack, CloudSigma, SmartOS, Bigstep, Scaleway, AliYun, Ec2, CloudStack, Hetzner, IBMCloud, Oracle, None | 4 | Choices-C: NoCloud, ConfigDrive, OpenNebula, DigitalOcean, Azure, AltCloud, OVF, MAAS, GCE, OpenStack, CloudSigma, SmartOS, Bigstep, Scaleway, AliYun, Ec2, CloudStack, Hetzner, IBMCloud, Oracle, Exoscale, None |
779 | 5 | Choices: NoCloud: Reads info from /var/lib/cloud/seed only, ConfigDrive: Reads data from Openstack Config Drive, OpenNebula: read from OpenNebula context disk, DigitalOcean: reads data from Droplet datasource, Azure: read from MS Azure cdrom. Requires walinux-agent, AltCloud: config disks for RHEVm and vSphere, OVF: Reads data from OVF Transports, MAAS: Reads data from Ubuntu MAAS, GCE: google compute metadata service, OpenStack: native openstack metadata service, CloudSigma: metadata over serial for cloudsigma.com, SmartOS: Read from SmartOS metadata service, Bigstep: Bigstep metadata service, Scaleway: Scaleway metadata service, AliYun: Alibaba metadata service, Ec2: reads data from EC2 Metadata service, CloudStack: Read from CloudStack metadata service, Hetzner: Hetzner Cloud, IBMCloud: IBM Cloud. Previously softlayer or bluemix., Oracle: Oracle Compute Infrastructure, None: Failsafe datasource | 5 | Choices: NoCloud: Reads info from /var/lib/cloud/seed only, ConfigDrive: Reads data from Openstack Config Drive, OpenNebula: read from OpenNebula context disk, DigitalOcean: reads data from Droplet datasource, Azure: read from MS Azure cdrom. Requires walinux-agent, AltCloud: config disks for RHEVm and vSphere, OVF: Reads data from OVF Transports, MAAS: Reads data from Ubuntu MAAS, GCE: google compute metadata service, OpenStack: native openstack metadata service, CloudSigma: metadata over serial for cloudsigma.com, SmartOS: Read from SmartOS metadata service, Bigstep: Bigstep metadata service, Scaleway: Scaleway metadata service, AliYun: Alibaba metadata service, Ec2: reads data from EC2 Metadata service, CloudStack: Read from CloudStack metadata service, Hetzner: Hetzner Cloud, IBMCloud: IBM Cloud. Previously softlayer or bluemix., Oracle: Oracle Compute Infrastructure, Exoscale: Exoscale, None: Failsafe datasource |
780 | 6 | Description: Which data sources should be searched? | 6 | Description: Which data sources should be searched? |
781 | 7 | Cloud-init supports searching different "Data Sources" for information | 7 | Cloud-init supports searching different "Data Sources" for information |
782 | 8 | that it uses to configure a cloud instance. | 8 | that it uses to configure a cloud instance. |
783 | diff --git a/doc/rtd/conf.py b/doc/rtd/conf.py | |||
784 | index 50eb05c..4174477 100644 | |||
785 | --- a/doc/rtd/conf.py | |||
786 | +++ b/doc/rtd/conf.py | |||
787 | @@ -27,16 +27,11 @@ project = 'Cloud-Init' | |||
788 | 27 | # Add any Sphinx extension module names here, as strings. They can be | 27 | # Add any Sphinx extension module names here, as strings. They can be |
789 | 28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | 28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. |
790 | 29 | extensions = [ | 29 | extensions = [ |
791 | 30 | 'sphinx.ext.intersphinx', | ||
792 | 31 | 'sphinx.ext.autodoc', | 30 | 'sphinx.ext.autodoc', |
793 | 32 | 'sphinx.ext.autosectionlabel', | 31 | 'sphinx.ext.autosectionlabel', |
794 | 33 | 'sphinx.ext.viewcode', | 32 | 'sphinx.ext.viewcode', |
795 | 34 | ] | 33 | ] |
796 | 35 | 34 | ||
797 | 36 | intersphinx_mapping = { | ||
798 | 37 | 'sphinx': ('http://sphinx.pocoo.org', None) | ||
799 | 38 | } | ||
800 | 39 | |||
801 | 40 | # The suffix of source filenames. | 35 | # The suffix of source filenames. |
802 | 41 | source_suffix = '.rst' | 36 | source_suffix = '.rst' |
803 | 42 | 37 | ||
804 | diff --git a/doc/rtd/topics/datasources.rst b/doc/rtd/topics/datasources.rst | |||
805 | index 648c606..2148cd5 100644 | |||
806 | --- a/doc/rtd/topics/datasources.rst | |||
807 | +++ b/doc/rtd/topics/datasources.rst | |||
808 | @@ -155,6 +155,7 @@ Follow for more information. | |||
809 | 155 | datasources/configdrive.rst | 155 | datasources/configdrive.rst |
810 | 156 | datasources/digitalocean.rst | 156 | datasources/digitalocean.rst |
811 | 157 | datasources/ec2.rst | 157 | datasources/ec2.rst |
812 | 158 | datasources/exoscale.rst | ||
813 | 158 | datasources/maas.rst | 159 | datasources/maas.rst |
814 | 159 | datasources/nocloud.rst | 160 | datasources/nocloud.rst |
815 | 160 | datasources/opennebula.rst | 161 | datasources/opennebula.rst |
816 | diff --git a/doc/rtd/topics/datasources/exoscale.rst b/doc/rtd/topics/datasources/exoscale.rst | |||
817 | 161 | new file mode 100644 | 162 | new file mode 100644 |
818 | index 0000000..27aec9c | |||
819 | --- /dev/null | |||
820 | +++ b/doc/rtd/topics/datasources/exoscale.rst | |||
821 | @@ -0,0 +1,68 @@ | |||
822 | 1 | .. _datasource_exoscale: | ||
823 | 2 | |||
824 | 3 | Exoscale | ||
825 | 4 | ======== | ||
826 | 5 | |||
827 | 6 | This datasource supports reading from the metadata server used on the | ||
828 | 7 | `Exoscale platform <https://exoscale.com>`_. | ||
829 | 8 | |||
830 | 9 | Use of the Exoscale datasource is recommended to benefit from new features of | ||
831 | 10 | the Exoscale platform. | ||
832 | 11 | |||
833 | 12 | The datasource relies on the availability of a compatible metadata server | ||
834 | 13 | (``http://169.254.169.254`` is used by default) and its companion password | ||
835 | 14 | server, reachable at the same address (by default on port 8080). | ||
836 | 15 | |||
837 | 16 | Crawling of metadata | ||
838 | 17 | -------------------- | ||
839 | 18 | |||
840 | 19 | The metadata service and password server are crawled slightly differently: | ||
841 | 20 | |||
842 | 21 | * The "metadata service" is crawled every boot. | ||
843 | 22 | * The password server is also crawled every boot (the Exoscale datasource | ||
844 | 23 | forces the password module to run with "frequency always"). | ||
845 | 24 | |||
846 | 25 | In the password server case, the following rules apply in order to enable the | ||
847 | 26 | "restore instance password" functionality: | ||
848 | 27 | |||
849 | 28 | * If a password is returned by the password server, it is then marked "saved" | ||
850 | 29 | by the cloud-init datasource. Subsequent boots will skip setting the password | ||
851 | 30 | (the password server will return "saved_password"). | ||
852 | 31 | * When the instance password is reset (via the Exoscale UI), the password | ||
853 | 32 | server will return the non-empty password at next boot, therefore causing | ||
854 | 33 | cloud-init to reset the instance's password. | ||
855 | 34 | |||
856 | 35 | Configuration | ||
857 | 36 | ------------- | ||
858 | 37 | |||
859 | 38 | Users of this datasource are discouraged from changing the default settings | ||
860 | 39 | unless instructed to by Exoscale support. | ||
861 | 40 | |||
862 | 41 | The following settings are available and can be set for the datasource in system | ||
863 | 42 | configuration (in `/etc/cloud/cloud.cfg.d/`). | ||
864 | 43 | |||
865 | 44 | The settings available are: | ||
866 | 45 | |||
867 | 46 | * **metadata_url**: The URL for the metadata service (defaults to | ||
868 | 47 | ``http://169.254.169.254``) | ||
869 | 48 | * **api_version**: The API version path on which to query the instance metadata | ||
870 | 49 | (defaults to ``1.0``) | ||
871 | 50 | * **password_server_port**: The port (on the metadata server) on which the | ||
872 | 51 | password server listens (defaults to ``8080``). | ||
873 | 52 | * **timeout**: the timeout value provided to urlopen for each individual http | ||
874 | 53 | request. (defaults to ``10``) | ||
875 | 54 | * **retries**: The number of retries that should be done for an http request | ||
876 | 55 | (defaults to ``6``) | ||
877 | 56 | |||
878 | 57 | |||
879 | 58 | An example configuration with the default values is provided below: | ||
880 | 59 | |||
881 | 60 | .. sourcecode:: yaml | ||
882 | 61 | |||
883 | 62 | datasource: | ||
884 | 63 | Exoscale: | ||
885 | 64 | metadata_url: "http://169.254.169.254" | ||
886 | 65 | api_version: "1.0" | ||
887 | 66 | password_server_port: 8080 | ||
888 | 67 | timeout: 10 | ||
889 | 68 | retries: 6 | ||
890 | diff --git a/tests/unittests/test_datasource/test_common.py b/tests/unittests/test_datasource/test_common.py | |||
891 | index 2a9cfb2..61a7a76 100644 | |||
892 | --- a/tests/unittests/test_datasource/test_common.py | |||
893 | +++ b/tests/unittests/test_datasource/test_common.py | |||
894 | @@ -13,6 +13,7 @@ from cloudinit.sources import ( | |||
895 | 13 | DataSourceConfigDrive as ConfigDrive, | 13 | DataSourceConfigDrive as ConfigDrive, |
896 | 14 | DataSourceDigitalOcean as DigitalOcean, | 14 | DataSourceDigitalOcean as DigitalOcean, |
897 | 15 | DataSourceEc2 as Ec2, | 15 | DataSourceEc2 as Ec2, |
898 | 16 | DataSourceExoscale as Exoscale, | ||
899 | 16 | DataSourceGCE as GCE, | 17 | DataSourceGCE as GCE, |
900 | 17 | DataSourceHetzner as Hetzner, | 18 | DataSourceHetzner as Hetzner, |
901 | 18 | DataSourceIBMCloud as IBMCloud, | 19 | DataSourceIBMCloud as IBMCloud, |
902 | @@ -53,6 +54,7 @@ DEFAULT_NETWORK = [ | |||
903 | 53 | CloudStack.DataSourceCloudStack, | 54 | CloudStack.DataSourceCloudStack, |
904 | 54 | DSNone.DataSourceNone, | 55 | DSNone.DataSourceNone, |
905 | 55 | Ec2.DataSourceEc2, | 56 | Ec2.DataSourceEc2, |
906 | 57 | Exoscale.DataSourceExoscale, | ||
907 | 56 | GCE.DataSourceGCE, | 58 | GCE.DataSourceGCE, |
908 | 57 | MAAS.DataSourceMAAS, | 59 | MAAS.DataSourceMAAS, |
909 | 58 | NoCloud.DataSourceNoCloudNet, | 60 | NoCloud.DataSourceNoCloudNet, |
910 | diff --git a/tests/unittests/test_datasource/test_exoscale.py b/tests/unittests/test_datasource/test_exoscale.py | |||
911 | 59 | new file mode 100644 | 61 | new file mode 100644 |
912 | index 0000000..350c330 | |||
913 | --- /dev/null | |||
914 | +++ b/tests/unittests/test_datasource/test_exoscale.py | |||
915 | @@ -0,0 +1,203 @@ | |||
916 | 1 | # Author: Mathieu Corbin <mathieu.corbin@exoscale.com> | ||
917 | 2 | # Author: Christopher Glass <christopher.glass@exoscale.com> | ||
918 | 3 | # | ||
919 | 4 | # This file is part of cloud-init. See LICENSE file for license information. | ||
920 | 5 | from cloudinit import helpers | ||
921 | 6 | from cloudinit.sources.DataSourceExoscale import ( | ||
922 | 7 | API_VERSION, | ||
923 | 8 | DataSourceExoscale, | ||
924 | 9 | METADATA_URL, | ||
925 | 10 | get_password, | ||
926 | 11 | PASSWORD_SERVER_PORT, | ||
927 | 12 | read_metadata) | ||
928 | 13 | from cloudinit.tests.helpers import HttprettyTestCase, mock | ||
929 | 14 | |||
930 | 15 | import httpretty | ||
931 | 16 | import requests | ||
932 | 17 | |||
933 | 18 | |||
934 | 19 | TEST_PASSWORD_URL = "{}:{}/{}/".format(METADATA_URL, | ||
935 | 20 | PASSWORD_SERVER_PORT, | ||
936 | 21 | API_VERSION) | ||
937 | 22 | |||
938 | 23 | TEST_METADATA_URL = "{}/{}/meta-data/".format(METADATA_URL, | ||
939 | 24 | API_VERSION) | ||
940 | 25 | |||
941 | 26 | TEST_USERDATA_URL = "{}/{}/user-data".format(METADATA_URL, | ||
942 | 27 | API_VERSION) | ||
943 | 28 | |||
944 | 29 | |||
945 | 30 | @httpretty.activate | ||
946 | 31 | class TestDatasourceExoscale(HttprettyTestCase): | ||
947 | 32 | |||
948 | 33 | def setUp(self): | ||
949 | 34 | super(TestDatasourceExoscale, self).setUp() | ||
950 | 35 | self.tmp = self.tmp_dir() | ||
951 | 36 | self.password_url = TEST_PASSWORD_URL | ||
952 | 37 | self.metadata_url = TEST_METADATA_URL | ||
953 | 38 | self.userdata_url = TEST_USERDATA_URL | ||
954 | 39 | |||
955 | 40 | def test_password_saved(self): | ||
956 | 41 | """The password is not set when it is not found | ||
957 | 42 | in the metadata service.""" | ||
958 | 43 | httpretty.register_uri(httpretty.GET, | ||
959 | 44 | self.password_url, | ||
960 | 45 | body="saved_password") | ||
961 | 46 | self.assertFalse(get_password()) | ||
962 | 47 | |||
963 | 48 | def test_password_empty(self): | ||
964 | 49 | """No password is set if the metadata service returns | ||
965 | 50 | an empty string.""" | ||
966 | 51 | httpretty.register_uri(httpretty.GET, | ||
967 | 52 | self.password_url, | ||
968 | 53 | body="") | ||
969 | 54 | self.assertFalse(get_password()) | ||
970 | 55 | |||
971 | 56 | def test_password(self): | ||
972 | 57 | """The password is set to what is found in the metadata | ||
973 | 58 | service.""" | ||
974 | 59 | expected_password = "p@ssw0rd" | ||
975 | 60 | httpretty.register_uri(httpretty.GET, | ||
976 | 61 | self.password_url, | ||
977 | 62 | body=expected_password) | ||
978 | 63 | password = get_password() | ||
979 | 64 | self.assertEqual(expected_password, password) | ||
980 | 65 | |||
981 | 66 | def test_get_data(self): | ||
982 | 67 | """The datasource conforms to expected behavior when supplied | ||
983 | 68 | full test data.""" | ||
984 | 69 | path = helpers.Paths({'run_dir': self.tmp}) | ||
985 | 70 | ds = DataSourceExoscale({}, None, path) | ||
986 | 71 | ds._is_platform_viable = lambda: True | ||
987 | 72 | expected_password = "p@ssw0rd" | ||
988 | 73 | expected_id = "12345" | ||
989 | 74 | expected_hostname = "myname" | ||
990 | 75 | expected_userdata = "#cloud-config" | ||
991 | 76 | httpretty.register_uri(httpretty.GET, | ||
992 | 77 | self.userdata_url, | ||
993 | 78 | body=expected_userdata) | ||
994 | 79 | httpretty.register_uri(httpretty.GET, | ||
995 | 80 | self.password_url, | ||
996 | 81 | body=expected_password) | ||
997 | 82 | httpretty.register_uri(httpretty.GET, | ||
998 | 83 | self.metadata_url, | ||
999 | 84 | body="instance-id\nlocal-hostname") | ||
1000 | 85 | httpretty.register_uri(httpretty.GET, | ||
1001 | 86 | "{}local-hostname".format(self.metadata_url), | ||
1002 | 87 | body=expected_hostname) | ||
1003 | 88 | httpretty.register_uri(httpretty.GET, | ||
1004 | 89 | "{}instance-id".format(self.metadata_url), | ||
1005 | 90 | body=expected_id) | ||
1006 | 91 | self.assertTrue(ds._get_data()) | ||
1007 | 92 | self.assertEqual(ds.userdata_raw.decode("utf-8"), "#cloud-config") | ||
1008 | 93 | self.assertEqual(ds.metadata, {"instance-id": expected_id, | ||
1009 | 94 | "local-hostname": expected_hostname}) | ||
1010 | 95 | self.assertEqual(ds.get_config_obj(), | ||
1011 | 96 | {'ssh_pwauth': True, | ||
1012 | 97 | 'password': expected_password, | ||
1013 | 98 | 'cloud_config_modules': [ | ||
1014 | 99 | ["set-passwords", "always"]], | ||
1015 | 100 | 'chpasswd': { | ||
1016 | 101 | 'expire': False, | ||
1017 | 102 | }}) | ||
1018 | 103 | |||
1019 | 104 | def test_get_data_saved_password(self): | ||
1020 | 105 | """The datasource conforms to expected behavior when saved_password is | ||
1021 | 106 | returned by the password server.""" | ||
1022 | 107 | path = helpers.Paths({'run_dir': self.tmp}) | ||
1023 | 108 | ds = DataSourceExoscale({}, None, path) | ||
1024 | 109 | ds._is_platform_viable = lambda: True | ||
1025 | 110 | expected_answer = "saved_password" | ||
1026 | 111 | expected_id = "12345" | ||
1027 | 112 | expected_hostname = "myname" | ||
1028 | 113 | expected_userdata = "#cloud-config" | ||
1029 | 114 | httpretty.register_uri(httpretty.GET, | ||
1030 | 115 | self.userdata_url, | ||
1031 | 116 | body=expected_userdata) | ||
1032 | 117 | httpretty.register_uri(httpretty.GET, | ||
1033 | 118 | self.password_url, | ||
1034 | 119 | body=expected_answer) | ||
1035 | 120 | httpretty.register_uri(httpretty.GET, | ||
1036 | 121 | self.metadata_url, | ||
1037 | 122 | body="instance-id\nlocal-hostname") | ||
1038 | 123 | httpretty.register_uri(httpretty.GET, | ||
1039 | 124 | "{}local-hostname".format(self.metadata_url), | ||
1040 | 125 | body=expected_hostname) | ||
1041 | 126 | httpretty.register_uri(httpretty.GET, | ||
1042 | 127 | "{}instance-id".format(self.metadata_url), | ||
1043 | 128 | body=expected_id) | ||
1044 | 129 | self.assertTrue(ds._get_data()) | ||
1045 | 130 | self.assertEqual(ds.userdata_raw.decode("utf-8"), "#cloud-config") | ||
1046 | 131 | self.assertEqual(ds.metadata, {"instance-id": expected_id, | ||
1047 | 132 | "local-hostname": expected_hostname}) | ||
1048 | 133 | self.assertEqual(ds.get_config_obj(), | ||
1049 | 134 | {'cloud_config_modules': [ | ||
1050 | 135 | ["set-passwords", "always"]]}) | ||
1051 | 136 | |||
1052 | 137 | def test_get_data_no_password(self): | ||
1053 | 138 | """The datasource conforms to expected behavior when no password is | ||
1054 | 139 | returned by the password server.""" | ||
1055 | 140 | path = helpers.Paths({'run_dir': self.tmp}) | ||
1056 | 141 | ds = DataSourceExoscale({}, None, path) | ||
1057 | 142 | ds._is_platform_viable = lambda: True | ||
1058 | 143 | expected_answer = "" | ||
1059 | 144 | expected_id = "12345" | ||
1060 | 145 | expected_hostname = "myname" | ||
1061 | 146 | expected_userdata = "#cloud-config" | ||
1062 | 147 | httpretty.register_uri(httpretty.GET, | ||
1063 | 148 | self.userdata_url, | ||
1064 | 149 | body=expected_userdata) | ||
1065 | 150 | httpretty.register_uri(httpretty.GET, | ||
1066 | 151 | self.password_url, | ||
1067 | 152 | body=expected_answer) | ||
1068 | 153 | httpretty.register_uri(httpretty.GET, | ||
1069 | 154 | self.metadata_url, | ||
1070 | 155 | body="instance-id\nlocal-hostname") | ||
1071 | 156 | httpretty.register_uri(httpretty.GET, | ||
1072 | 157 | "{}local-hostname".format(self.metadata_url), | ||
1073 | 158 | body=expected_hostname) | ||
1074 | 159 | httpretty.register_uri(httpretty.GET, | ||
1075 | 160 | "{}instance-id".format(self.metadata_url), | ||
1076 | 161 | body=expected_id) | ||
1077 | 162 | self.assertTrue(ds._get_data()) | ||
1078 | 163 | self.assertEqual(ds.userdata_raw.decode("utf-8"), "#cloud-config") | ||
1079 | 164 | self.assertEqual(ds.metadata, {"instance-id": expected_id, | ||
1080 | 165 | "local-hostname": expected_hostname}) | ||
1081 | 166 | self.assertEqual(ds.get_config_obj(), | ||
1082 | 167 | {'cloud_config_modules': [ | ||
1083 | 168 | ["set-passwords", "always"]]}) | ||
1084 | 169 | |||
1085 | 170 | @mock.patch('cloudinit.sources.DataSourceExoscale.get_password') | ||
1086 | 171 | def test_read_metadata_when_password_server_unreachable(self, m_password): | ||
1087 | 172 | """The read_metadata function returns partial results in case the | ||
1088 | 173 | password server (only) is unreachable.""" | ||
1089 | 174 | expected_id = "12345" | ||
1090 | 175 | expected_hostname = "myname" | ||
1091 | 176 | expected_userdata = "#cloud-config" | ||
1092 | 177 | |||
1093 | 178 | m_password.side_effect = requests.Timeout('Fake Connection Timeout') | ||
1094 | 179 | httpretty.register_uri(httpretty.GET, | ||
1095 | 180 | self.userdata_url, | ||
1096 | 181 | body=expected_userdata) | ||
1097 | 182 | httpretty.register_uri(httpretty.GET, | ||
1098 | 183 | self.metadata_url, | ||
1099 | 184 | body="instance-id\nlocal-hostname") | ||
1100 | 185 | httpretty.register_uri(httpretty.GET, | ||
1101 | 186 | "{}local-hostname".format(self.metadata_url), | ||
1102 | 187 | body=expected_hostname) | ||
1103 | 188 | httpretty.register_uri(httpretty.GET, | ||
1104 | 189 | "{}instance-id".format(self.metadata_url), | ||
1105 | 190 | body=expected_id) | ||
1106 | 191 | |||
1107 | 192 | result = read_metadata() | ||
1108 | 193 | |||
1109 | 194 | self.assertIsNone(result.get("password")) | ||
1110 | 195 | self.assertEqual(result.get("user-data").decode("utf-8"), | ||
1111 | 196 | expected_userdata) | ||
1112 | 197 | |||
1113 | 198 | def test_non_viable_platform(self): | ||
1114 | 199 | """The datasource fails fast when the platform is not viable.""" | ||
1115 | 200 | path = helpers.Paths({'run_dir': self.tmp}) | ||
1116 | 201 | ds = DataSourceExoscale({}, None, path) | ||
1117 | 202 | ds._is_platform_viable = lambda: False | ||
1118 | 203 | self.assertFalse(ds._get_data()) | ||
1119 | diff --git a/tests/unittests/test_datasource/test_gce.py b/tests/unittests/test_datasource/test_gce.py | |||
1120 | index 41176c6..67744d3 100644 | |||
1121 | --- a/tests/unittests/test_datasource/test_gce.py | |||
1122 | +++ b/tests/unittests/test_datasource/test_gce.py | |||
1123 | @@ -55,6 +55,8 @@ GCE_USER_DATA_TEXT = { | |||
1124 | 55 | HEADERS = {'Metadata-Flavor': 'Google'} | 55 | HEADERS = {'Metadata-Flavor': 'Google'} |
1125 | 56 | MD_URL_RE = re.compile( | 56 | MD_URL_RE = re.compile( |
1126 | 57 | r'http://metadata.google.internal/computeMetadata/v1/.*') | 57 | r'http://metadata.google.internal/computeMetadata/v1/.*') |
1127 | 58 | GUEST_ATTRIBUTES_URL = ('http://metadata.google.internal/computeMetadata/' | ||
1128 | 59 | 'v1/instance/guest-attributes/hostkeys/') | ||
1129 | 58 | 60 | ||
1130 | 59 | 61 | ||
1131 | 60 | def _set_mock_metadata(gce_meta=None): | 62 | def _set_mock_metadata(gce_meta=None): |
1132 | @@ -341,4 +343,20 @@ class TestDataSourceGCE(test_helpers.HttprettyTestCase): | |||
1133 | 341 | public_key_data, default_user='default') | 343 | public_key_data, default_user='default') |
1134 | 342 | self.assertEqual(sorted(found), sorted(expected)) | 344 | self.assertEqual(sorted(found), sorted(expected)) |
1135 | 343 | 345 | ||
1136 | 346 | @mock.patch("cloudinit.url_helper.readurl") | ||
1137 | 347 | def test_publish_host_keys(self, m_readurl): | ||
1138 | 348 | hostkeys = [('ssh-rsa', 'asdfasdf'), | ||
1139 | 349 | ('ssh-ed25519', 'qwerqwer')] | ||
1140 | 350 | readurl_expected_calls = [ | ||
1141 | 351 | mock.call(check_status=False, data=b'asdfasdf', headers=HEADERS, | ||
1142 | 352 | request_method='PUT', | ||
1143 | 353 | url='%s%s' % (GUEST_ATTRIBUTES_URL, 'ssh-rsa')), | ||
1144 | 354 | mock.call(check_status=False, data=b'qwerqwer', headers=HEADERS, | ||
1145 | 355 | request_method='PUT', | ||
1146 | 356 | url='%s%s' % (GUEST_ATTRIBUTES_URL, 'ssh-ed25519')), | ||
1147 | 357 | ] | ||
1148 | 358 | self.ds.publish_host_keys(hostkeys) | ||
1149 | 359 | m_readurl.assert_has_calls(readurl_expected_calls, any_order=True) | ||
1150 | 360 | |||
1151 | 361 | |||
1152 | 344 | # vi: ts=4 expandtab | 362 | # vi: ts=4 expandtab |
1153 | diff --git a/tools/ds-identify b/tools/ds-identify | |||
1154 | index 0305e36..e0d4865 100755 | |||
1155 | --- a/tools/ds-identify | |||
1156 | +++ b/tools/ds-identify | |||
1157 | @@ -124,7 +124,7 @@ DI_DSNAME="" | |||
1158 | 124 | # be searched if there is no setting found in config. | 124 | # be searched if there is no setting found in config. |
1159 | 125 | DI_DSLIST_DEFAULT="MAAS ConfigDrive NoCloud AltCloud Azure Bigstep \ | 125 | DI_DSLIST_DEFAULT="MAAS ConfigDrive NoCloud AltCloud Azure Bigstep \ |
1160 | 126 | CloudSigma CloudStack DigitalOcean AliYun Ec2 GCE OpenNebula OpenStack \ | 126 | CloudSigma CloudStack DigitalOcean AliYun Ec2 GCE OpenNebula OpenStack \ |
1162 | 127 | OVF SmartOS Scaleway Hetzner IBMCloud Oracle" | 127 | OVF SmartOS Scaleway Hetzner IBMCloud Oracle Exoscale" |
1163 | 128 | DI_DSLIST="" | 128 | DI_DSLIST="" |
1164 | 129 | DI_MODE="" | 129 | DI_MODE="" |
1165 | 130 | DI_ON_FOUND="" | 130 | DI_ON_FOUND="" |
1166 | @@ -553,6 +553,11 @@ dscheck_CloudStack() { | |||
1167 | 553 | return $DS_NOT_FOUND | 553 | return $DS_NOT_FOUND |
1168 | 554 | } | 554 | } |
1169 | 555 | 555 | ||
1170 | 556 | dscheck_Exoscale() { | ||
1171 | 557 | dmi_product_name_matches "Exoscale*" && return $DS_FOUND | ||
1172 | 558 | return $DS_NOT_FOUND | ||
1173 | 559 | } | ||
1174 | 560 | |||
1175 | 556 | dscheck_CloudSigma() { | 561 | dscheck_CloudSigma() { |
1176 | 557 | # http://paste.ubuntu.com/23624795/ | 562 | # http://paste.ubuntu.com/23624795/ |
1177 | 558 | dmi_product_name_matches "CloudSigma" && return $DS_FOUND | 563 | dmi_product_name_matches "CloudSigma" && return $DS_FOUND |