Merge lp:~barry/cloud-init/py2-3 into lp:~cloud-init-dev/cloud-init/trunk
- py2-3
- Merge into trunk
Status: | Merged |
---|---|
Merged at revision: | 1054 |
Proposed branch: | lp:~barry/cloud-init/py2-3 |
Merge into: | lp:~cloud-init-dev/cloud-init/trunk |
Diff against target: |
6414 lines (+1542/-1225) 111 files modified
.bzrignore (+4/-0) MANIFEST.in (+8/-0) cloudinit/config/cc_apt_configure.py (+1/-1) cloudinit/config/cc_ca_certs.py (+2/-2) cloudinit/config/cc_chef.py (+4/-2) cloudinit/config/cc_debug.py (+4/-3) cloudinit/config/cc_landscape.py (+1/-1) cloudinit/config/cc_mcollective.py (+8/-7) cloudinit/config/cc_phone_home.py (+2/-2) cloudinit/config/cc_puppet.py (+5/-5) cloudinit/config/cc_resolv_conf.py (+2/-2) cloudinit/config/cc_rightscale_userdata.py (+1/-1) cloudinit/config/cc_runcmd.py (+1/-1) cloudinit/config/cc_salt_minion.py (+1/-1) cloudinit/config/cc_seed_random.py (+3/-2) cloudinit/config/cc_ssh.py (+8/-8) cloudinit/config/cc_ssh_authkey_fingerprints.py (+1/-1) cloudinit/config/cc_write_files.py (+3/-2) cloudinit/config/cc_yum_add_repo.py (+4/-3) cloudinit/distros/__init__.py (+35/-34) cloudinit/distros/arch.py (+2/-2) cloudinit/distros/debian.py (+1/-1) cloudinit/distros/freebsd.py (+7/-5) cloudinit/distros/gentoo.py (+1/-1) cloudinit/distros/net_util.py (+1/-1) cloudinit/distros/parsers/hostname.py (+1/-1) cloudinit/distros/parsers/hosts.py (+1/-1) cloudinit/distros/parsers/resolv_conf.py (+1/-1) cloudinit/distros/parsers/sys_conf.py (+3/-2) cloudinit/distros/rhel.py (+1/-1) cloudinit/distros/rhel_util.py (+2/-2) cloudinit/distros/sles.py (+2/-2) cloudinit/ec2_utils.py (+4/-5) cloudinit/handlers/__init__.py (+8/-6) cloudinit/handlers/boot_hook.py (+1/-1) cloudinit/handlers/cloud_config.py (+1/-1) cloudinit/handlers/shell_script.py (+1/-1) cloudinit/handlers/upstart_job.py (+1/-1) cloudinit/helpers.py (+5/-7) cloudinit/log.py (+4/-3) cloudinit/mergers/__init__.py (+3/-1) cloudinit/mergers/m_dict.py (+3/-1) cloudinit/mergers/m_list.py (+4/-2) cloudinit/mergers/m_str.py (+6/-4) cloudinit/netinfo.py (+2/-2) cloudinit/signal_handler.py (+1/-1) cloudinit/sources/DataSourceAltCloud.py (+4/-4) cloudinit/sources/DataSourceAzure.py (+2/-2) cloudinit/sources/DataSourceConfigDrive.py (+2/-2) cloudinit/sources/DataSourceDigitalOcean.py (+5/-4) cloudinit/sources/DataSourceEc2.py (+2/-2) cloudinit/sources/DataSourceMAAS.py (+20/-28) cloudinit/sources/DataSourceOVF.py (+3/-3) cloudinit/sources/DataSourceOpenNebula.py (+6/-6) cloudinit/sources/DataSourceSmartOS.py (+12/-9) cloudinit/sources/__init__.py (+6/-4) cloudinit/sources/helpers/openstack.py (+6/-4) cloudinit/ssh_util.py (+3/-3) cloudinit/stages.py (+12/-11) cloudinit/templater.py (+1/-1) cloudinit/type_utils.py (+26/-6) cloudinit/url_helper.py (+15/-7) cloudinit/user_data.py (+6/-4) cloudinit/util.py (+137/-64) packages/bddeb (+1/-0) packages/brpm (+2/-0) requirements.txt (+4/-2) setup.py (+10/-4) templates/resolv.conf.tmpl (+1/-1) tests/unittests/helpers.py (+60/-45) tests/unittests/test__init__.py (+115/-125) tests/unittests/test_builtin_handlers.py (+26/-17) tests/unittests/test_cs_util.py (+29/-10) tests/unittests/test_data.py (+84/-69) tests/unittests/test_datasource/test_altcloud.py (+3/-3) tests/unittests/test_datasource/test_azure.py (+47/-51) tests/unittests/test_datasource/test_cloudsigma.py (+1/-0) tests/unittests/test_datasource/test_configdrive.py (+59/-45) tests/unittests/test_datasource/test_digitalocean.py (+3/-4) tests/unittests/test_datasource/test_gce.py (+2/-2) tests/unittests/test_datasource/test_maas.py (+50/-32) tests/unittests/test_datasource/test_nocloud.py (+29/-34) tests/unittests/test_datasource/test_opennebula.py (+13/-10) tests/unittests/test_datasource/test_openstack.py (+3/-4) tests/unittests/test_datasource/test_smartos.py (+15/-9) tests/unittests/test_distros/test_generic.py (+4/-2) tests/unittests/test_distros/test_hostname.py (+2/-2) tests/unittests/test_distros/test_hosts.py (+2/-2) tests/unittests/test_distros/test_netconfig.py (+123/-160) tests/unittests/test_distros/test_resolv.py (+2/-3) tests/unittests/test_distros/test_sysconfig.py (+2/-3) tests/unittests/test_distros/test_user_data_normalize.py (+4/-3) tests/unittests/test_filters/test_launch_index.py (+3/-5) tests/unittests/test_handler/test_handler_apt_configure.py (+16/-13) tests/unittests/test_handler/test_handler_ca_certs.py (+162/-128) tests/unittests/test_handler/test_handler_chef.py (+6/-2) tests/unittests/test_handler/test_handler_debug.py (+4/-1) tests/unittests/test_handler/test_handler_growpart.py (+67/-57) tests/unittests/test_handler/test_handler_locale.py (+7/-4) tests/unittests/test_handler/test_handler_seed_random.py (+7/-8) tests/unittests/test_handler/test_handler_set_hostname.py (+7/-4) tests/unittests/test_handler/test_handler_timezone.py (+7/-4) tests/unittests/test_handler/test_handler_yum_add_repo.py (+8/-4) tests/unittests/test_merging.py (+9/-7) tests/unittests/test_pathprefix2dict.py (+10/-6) tests/unittests/test_runs/test_merge_run.py (+5/-3) tests/unittests/test_runs/test_simple_run.py (+6/-5) tests/unittests/test_templating.py (+21/-0) tests/unittests/test_util.py (+43/-32) tools/ccfg-merge-debug (+2/-2) tox.ini (+23/-0) |
To merge this branch: | bzr merge lp:~barry/cloud-init/py2-3 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
cloud-init Commiters | Pending | ||
Review via email: mp+247239@code.launchpad.net |
Commit message
Description of the change
cloud-init ported to Python 3.4. Python 2.6 test suite repaired. With this branch the full suite now passes for 2.6, 2.7, and 3.4.
- 1058. By Barry Warsaw
-
Port test__init__.py to unittest.mock.
Joshua Harlow (harlowja) wrote : | # |
- 1059. By Barry Warsaw
-
Use .addCleanup() instead of a .tearDown() where appropriate, although we
might have to rewrite this for Python 2.6.Disable Cepko tests (test_cs_util.py) since they are essentially worthless.
Convert test_azure to unittest.mock.
- 1060. By Barry Warsaw
-
Convert helpers.py and test_data.py from mocker to mock.
- 1061. By Barry Warsaw
-
Clean up.
- 1062. By Barry Warsaw
-
More test ports from mocker to mock.
- 1063. By Barry Warsaw
-
More conversions from mocker to mock.
- 1064. By Barry Warsaw
-
More conversions from mocker to mock.
- 1065. By Barry Warsaw
-
Conversion from mocker to mock completed.
- 1066. By Barry Warsaw
-
Low hanging Python 3 fruit.
- 1067. By Barry Warsaw
-
* Fix the filter() imports.
* In Py3, pass universal_newlines to subprocess.Popen()
Joshua Harlow (harlowja) wrote : | # |
Looks pretty good to me (seems like there is a new conflict against trunk/head); thanks for beating the tests into shape.
- 1068. By Barry Warsaw
-
More test repairs.
- 1069. By Barry Warsaw
-
Fix a few string/bytes problems with Python 3.
- 1070. By Barry Warsaw
-
Avoid a nose bug when running under the test suite and no exception is in
flight. - 1071. By Barry Warsaw
-
More Python 3 test fixes.
- 1072. By Barry Warsaw
-
* More str/bytes fixes.
* Temporarily skip the MAAS tests in py3 since they need to be ported to oauthlib. - 1073. By Barry Warsaw
-
Another handling of b64decode.
Also, restore Python 2 compatibility.
- 1074. By Barry Warsaw
-
Down to it.
- 1075. By Barry Warsaw
-
Python 3 tests pass, except for skips.
- 1076. By Barry Warsaw
-
Port the MAAS code to oauthlib.
Barry Warsaw (barry) wrote : | # |
This is ready for review and merging, at least modulo Python 2.6 support (which I will try to test on next).
% tox -e py27,py34
GLOB sdist-make: /home/barry/
py27 inst-nodeps: /home/barry/
py27 runtests: PYTHONHASHSEED=
py27 runtests: commands[0] | python -m nose tests
.......
-------
Ran 363 tests in 7.423s
OK (SKIP=5)
py34 inst-nodeps: /home/barry/
py34 runtests: PYTHONHASHSEED=
py34 runtests: commands[0] | python -m nose tests
.......
-------
Ran 363 tests in 9.542s
OK (SKIP=6)
_______
py27: commands succeeded
py34: commands succeeded
congratulations :)
- 1077. By Barry Warsaw
-
Trunk merged and ported.
- 1078. By Barry Warsaw
-
Repair the Python 2.6 tests.
- 1079. By Barry Warsaw
-
super() works in all of Python 2.6, 2.7, and 3.4.
Barry Warsaw (barry) wrote : | # |
Python 2.6 support restored. Test suite now passes in all of 2.6, 2.7, and 3.4. This is ready to go.
Joshua Harlow (harlowja) wrote : | # |
So just a few comments/
Also I don't think I test-requiremen
Otherwise looks pretty ok.
Barry Warsaw (barry) wrote : | # |
Thanks, I'll address each of the comments separately.
test-requiremen
- 1080. By Barry Warsaw
-
Respond to review:
- Remove str() wrappers to second argument to write_files() where it is no
longer necessary.Also: Fixed a couple of other octal literals which clearly weren't being
tested. - 1081. By Barry Warsaw
-
Remove some unused code.
- 1082. By Barry Warsaw
-
Remove a comment turd.
- 1083. By Barry Warsaw
-
Respond to review:
- Just use util.load_file() instead of yet another way to open and read the
file. - 1084. By Barry Warsaw
-
Respond to review:
- Refactor both the base64 encoding and decoding into utility functions.
Also:
- Mechanically fix some other broken untested code.
- 1085. By Barry Warsaw
-
Respond to review:
- Refactor "fully" decoding the payload of a text/* part. In Python 3,
decode=True only means to decode according to Content-Transfer- Encoding, not
according to any charset in the Content-Type header. So do that. - 1086. By Barry Warsaw
-
Remove debugging turd.
Barry Warsaw (barry) wrote : | # |
Thanks for the great review. I think I've addressed all the comments and made the changes that seem reasonable. Cheers!
Joshua Harlow (harlowja) wrote : | # |
> Thanks, I'll address each of the comments separately.
>
> test-requiremen
> any more. The tox.ini file specifies the test dependencies, but I'll leave
> removal of test-requiremen
> season to taste there too.)
Ok dokie :-)
Bohuslav "Slavek" Kabrda (bkabrda) wrote : | # |
This is great work, thanks for doing it Barry!
Any estimate on when this will get merged?
Thanks a bunch!
Preview Diff
1 | === added file '.bzrignore' |
2 | --- .bzrignore 1970-01-01 00:00:00 +0000 |
3 | +++ .bzrignore 2015-01-27 20:16:43 +0000 |
4 | @@ -0,0 +1,4 @@ |
5 | +.tox |
6 | +dist |
7 | +cloud_init.egg-info |
8 | +__pycache__ |
9 | |
10 | === added file 'MANIFEST.in' |
11 | --- MANIFEST.in 1970-01-01 00:00:00 +0000 |
12 | +++ MANIFEST.in 2015-01-27 20:16:43 +0000 |
13 | @@ -0,0 +1,8 @@ |
14 | +include *.py MANIFEST.in ChangeLog |
15 | +global-include *.txt *.rst *.ini *.in *.conf *.cfg *.sh |
16 | +graft tools |
17 | +prune build |
18 | +prune dist |
19 | +prune .tox |
20 | +prune .bzr |
21 | +exclude .bzrignore |
22 | |
23 | === modified file 'cloudinit/config/cc_apt_configure.py' |
24 | --- cloudinit/config/cc_apt_configure.py 2014-08-26 18:50:11 +0000 |
25 | +++ cloudinit/config/cc_apt_configure.py 2015-01-27 20:16:43 +0000 |
26 | @@ -126,7 +126,7 @@ |
27 | |
28 | |
29 | def rename_apt_lists(old_mirrors, new_mirrors, lists_d="/var/lib/apt/lists"): |
30 | - for (name, omirror) in old_mirrors.iteritems(): |
31 | + for (name, omirror) in old_mirrors.items(): |
32 | nmirror = new_mirrors.get(name) |
33 | if not nmirror: |
34 | continue |
35 | |
36 | === modified file 'cloudinit/config/cc_ca_certs.py' |
37 | --- cloudinit/config/cc_ca_certs.py 2014-02-05 15:36:47 +0000 |
38 | +++ cloudinit/config/cc_ca_certs.py 2015-01-27 20:16:43 +0000 |
39 | @@ -44,7 +44,7 @@ |
40 | if certs: |
41 | # First ensure they are strings... |
42 | cert_file_contents = "\n".join([str(c) for c in certs]) |
43 | - util.write_file(CA_CERT_FULL_PATH, cert_file_contents, mode=0644) |
44 | + util.write_file(CA_CERT_FULL_PATH, cert_file_contents, mode=0o644) |
45 | |
46 | # Append cert filename to CA_CERT_CONFIG file. |
47 | # We have to strip the content because blank lines in the file |
48 | @@ -63,7 +63,7 @@ |
49 | """ |
50 | util.delete_dir_contents(CA_CERT_PATH) |
51 | util.delete_dir_contents(CA_CERT_SYSTEM_PATH) |
52 | - util.write_file(CA_CERT_CONFIG, "", mode=0644) |
53 | + util.write_file(CA_CERT_CONFIG, "", mode=0o644) |
54 | debconf_sel = "ca-certificates ca-certificates/trust_new_crts select no" |
55 | util.subp(('debconf-set-selections', '-'), debconf_sel) |
56 | |
57 | |
58 | === modified file 'cloudinit/config/cc_chef.py' |
59 | --- cloudinit/config/cc_chef.py 2014-11-22 20:41:31 +0000 |
60 | +++ cloudinit/config/cc_chef.py 2015-01-27 20:16:43 +0000 |
61 | @@ -76,6 +76,8 @@ |
62 | from cloudinit import url_helper |
63 | from cloudinit import util |
64 | |
65 | +import six |
66 | + |
67 | RUBY_VERSION_DEFAULT = "1.8" |
68 | |
69 | CHEF_DIRS = tuple([ |
70 | @@ -261,7 +263,7 @@ |
71 | cmd_args = chef_cfg['exec_arguments'] |
72 | if isinstance(cmd_args, (list, tuple)): |
73 | cmd.extend(cmd_args) |
74 | - elif isinstance(cmd_args, (str, basestring)): |
75 | + elif isinstance(cmd_args, six.string_types): |
76 | cmd.append(cmd_args) |
77 | else: |
78 | log.warn("Unknown type %s provided for chef" |
79 | @@ -300,7 +302,7 @@ |
80 | with util.tempdir() as tmpd: |
81 | # Use tmpdir over tmpfile to avoid 'text file busy' on execute |
82 | tmpf = "%s/chef-omnibus-install" % tmpd |
83 | - util.write_file(tmpf, str(content), mode=0700) |
84 | + util.write_file(tmpf, content, mode=0o700) |
85 | util.subp([tmpf], capture=False) |
86 | else: |
87 | log.warn("Unknown chef install type '%s'", install_type) |
88 | |
89 | === modified file 'cloudinit/config/cc_debug.py' |
90 | --- cloudinit/config/cc_debug.py 2014-11-22 02:10:16 +0000 |
91 | +++ cloudinit/config/cc_debug.py 2015-01-27 20:16:43 +0000 |
92 | @@ -34,7 +34,8 @@ |
93 | """ |
94 | |
95 | import copy |
96 | -from StringIO import StringIO |
97 | + |
98 | +from six import StringIO |
99 | |
100 | from cloudinit import type_utils |
101 | from cloudinit import util |
102 | @@ -77,7 +78,7 @@ |
103 | dump_cfg = copy.deepcopy(cfg) |
104 | for k in SKIP_KEYS: |
105 | dump_cfg.pop(k, None) |
106 | - all_keys = list(dump_cfg.keys()) |
107 | + all_keys = list(dump_cfg) |
108 | for k in all_keys: |
109 | if k.startswith("_"): |
110 | dump_cfg.pop(k, None) |
111 | @@ -103,6 +104,6 @@ |
112 | line = "ci-info: %s\n" % (line) |
113 | content_to_file.append(line) |
114 | if out_file: |
115 | - util.write_file(out_file, "".join(content_to_file), 0644, "w") |
116 | + util.write_file(out_file, "".join(content_to_file), 0o644, "w") |
117 | else: |
118 | util.multi_log("".join(content_to_file), console=True, stderr=False) |
119 | |
120 | === modified file 'cloudinit/config/cc_landscape.py' |
121 | --- cloudinit/config/cc_landscape.py 2014-01-27 22:34:35 +0000 |
122 | +++ cloudinit/config/cc_landscape.py 2015-01-27 20:16:43 +0000 |
123 | @@ -20,7 +20,7 @@ |
124 | |
125 | import os |
126 | |
127 | -from StringIO import StringIO |
128 | +from six import StringIO |
129 | |
130 | from configobj import ConfigObj |
131 | |
132 | |
133 | === modified file 'cloudinit/config/cc_mcollective.py' |
134 | --- cloudinit/config/cc_mcollective.py 2014-01-27 22:34:35 +0000 |
135 | +++ cloudinit/config/cc_mcollective.py 2015-01-27 20:16:43 +0000 |
136 | @@ -19,7 +19,8 @@ |
137 | # You should have received a copy of the GNU General Public License |
138 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
139 | |
140 | -from StringIO import StringIO |
141 | +import six |
142 | +from six import StringIO |
143 | |
144 | # Used since this can maintain comments |
145 | # and doesn't need a top level section |
146 | @@ -51,17 +52,17 @@ |
147 | # original file in order to be able to mix the rest up |
148 | mcollective_config = ConfigObj(SERVER_CFG) |
149 | # See: http://tiny.cc/jh9agw |
150 | - for (cfg_name, cfg) in mcollective_cfg['conf'].iteritems(): |
151 | + for (cfg_name, cfg) in mcollective_cfg['conf'].items(): |
152 | if cfg_name == 'public-cert': |
153 | - util.write_file(PUBCERT_FILE, cfg, mode=0644) |
154 | + util.write_file(PUBCERT_FILE, cfg, mode=0o644) |
155 | mcollective_config['plugin.ssl_server_public'] = PUBCERT_FILE |
156 | mcollective_config['securityprovider'] = 'ssl' |
157 | elif cfg_name == 'private-cert': |
158 | - util.write_file(PRICERT_FILE, cfg, mode=0600) |
159 | + util.write_file(PRICERT_FILE, cfg, mode=0o600) |
160 | mcollective_config['plugin.ssl_server_private'] = PRICERT_FILE |
161 | mcollective_config['securityprovider'] = 'ssl' |
162 | else: |
163 | - if isinstance(cfg, (basestring, str)): |
164 | + if isinstance(cfg, six.string_types): |
165 | # Just set it in the 'main' section |
166 | mcollective_config[cfg_name] = cfg |
167 | elif isinstance(cfg, (dict)): |
168 | @@ -69,7 +70,7 @@ |
169 | # if it is needed and then add/or create items as needed |
170 | if cfg_name not in mcollective_config.sections: |
171 | mcollective_config[cfg_name] = {} |
172 | - for (o, v) in cfg.iteritems(): |
173 | + for (o, v) in cfg.items(): |
174 | mcollective_config[cfg_name][o] = v |
175 | else: |
176 | # Otherwise just try to convert it to a string |
177 | @@ -81,7 +82,7 @@ |
178 | contents = StringIO() |
179 | mcollective_config.write(contents) |
180 | contents = contents.getvalue() |
181 | - util.write_file(SERVER_CFG, contents, mode=0644) |
182 | + util.write_file(SERVER_CFG, contents, mode=0o644) |
183 | |
184 | # Start mcollective |
185 | util.subp(['service', 'mcollective', 'start'], capture=False) |
186 | |
187 | === modified file 'cloudinit/config/cc_phone_home.py' |
188 | --- cloudinit/config/cc_phone_home.py 2014-08-26 18:50:11 +0000 |
189 | +++ cloudinit/config/cc_phone_home.py 2015-01-27 20:16:43 +0000 |
190 | @@ -81,7 +81,7 @@ |
191 | 'pub_key_ecdsa': '/etc/ssh/ssh_host_ecdsa_key.pub', |
192 | } |
193 | |
194 | - for (n, path) in pubkeys.iteritems(): |
195 | + for (n, path) in pubkeys.items(): |
196 | try: |
197 | all_keys[n] = util.load_file(path) |
198 | except: |
199 | @@ -99,7 +99,7 @@ |
200 | |
201 | # Get them read to be posted |
202 | real_submit_keys = {} |
203 | - for (k, v) in submit_keys.iteritems(): |
204 | + for (k, v) in submit_keys.items(): |
205 | if v is None: |
206 | real_submit_keys[k] = 'N/A' |
207 | else: |
208 | |
209 | === modified file 'cloudinit/config/cc_puppet.py' |
210 | --- cloudinit/config/cc_puppet.py 2014-02-05 15:36:47 +0000 |
211 | +++ cloudinit/config/cc_puppet.py 2015-01-27 20:16:43 +0000 |
212 | @@ -18,7 +18,7 @@ |
213 | # You should have received a copy of the GNU General Public License |
214 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
215 | |
216 | -from StringIO import StringIO |
217 | +from six import StringIO |
218 | |
219 | import os |
220 | import socket |
221 | @@ -81,22 +81,22 @@ |
222 | cleaned_contents = '\n'.join(cleaned_lines) |
223 | puppet_config.readfp(StringIO(cleaned_contents), |
224 | filename=PUPPET_CONF_PATH) |
225 | - for (cfg_name, cfg) in puppet_cfg['conf'].iteritems(): |
226 | + for (cfg_name, cfg) in puppet_cfg['conf'].items(): |
227 | # Cert configuration is a special case |
228 | # Dump the puppet master ca certificate in the correct place |
229 | if cfg_name == 'ca_cert': |
230 | # Puppet ssl sub-directory isn't created yet |
231 | # Create it with the proper permissions and ownership |
232 | - util.ensure_dir(PUPPET_SSL_DIR, 0771) |
233 | + util.ensure_dir(PUPPET_SSL_DIR, 0o771) |
234 | util.chownbyname(PUPPET_SSL_DIR, 'puppet', 'root') |
235 | util.ensure_dir(PUPPET_SSL_CERT_DIR) |
236 | util.chownbyname(PUPPET_SSL_CERT_DIR, 'puppet', 'root') |
237 | - util.write_file(PUPPET_SSL_CERT_PATH, str(cfg)) |
238 | + util.write_file(PUPPET_SSL_CERT_PATH, cfg) |
239 | util.chownbyname(PUPPET_SSL_CERT_PATH, 'puppet', 'root') |
240 | else: |
241 | # Iterate throug the config items, we'll use ConfigParser.set |
242 | # to overwrite or create new items as needed |
243 | - for (o, v) in cfg.iteritems(): |
244 | + for (o, v) in cfg.items(): |
245 | if o == 'certname': |
246 | # Expand %f as the fqdn |
247 | # TODO(harlowja) should this use the cloud fqdn?? |
248 | |
249 | === modified file 'cloudinit/config/cc_resolv_conf.py' |
250 | --- cloudinit/config/cc_resolv_conf.py 2014-08-21 20:26:43 +0000 |
251 | +++ cloudinit/config/cc_resolv_conf.py 2015-01-27 20:16:43 +0000 |
252 | @@ -66,8 +66,8 @@ |
253 | false_flags = [] |
254 | |
255 | if 'options' in params: |
256 | - for key, val in params['options'].iteritems(): |
257 | - if type(val) == bool: |
258 | + for key, val in params['options'].items(): |
259 | + if isinstance(val, bool): |
260 | if val: |
261 | flags.append(key) |
262 | else: |
263 | |
264 | === modified file 'cloudinit/config/cc_rightscale_userdata.py' |
265 | --- cloudinit/config/cc_rightscale_userdata.py 2014-08-26 18:50:11 +0000 |
266 | +++ cloudinit/config/cc_rightscale_userdata.py 2015-01-27 20:16:43 +0000 |
267 | @@ -82,7 +82,7 @@ |
268 | resp = uhelp.readurl(url) |
269 | # Ensure its a valid http response (and something gotten) |
270 | if resp.ok() and resp.contents: |
271 | - util.write_file(fname, str(resp), mode=0700) |
272 | + util.write_file(fname, resp, mode=0o700) |
273 | wrote_fns.append(fname) |
274 | except Exception as e: |
275 | captured_excps.append(e) |
276 | |
277 | === modified file 'cloudinit/config/cc_runcmd.py' |
278 | --- cloudinit/config/cc_runcmd.py 2012-10-28 02:25:48 +0000 |
279 | +++ cloudinit/config/cc_runcmd.py 2015-01-27 20:16:43 +0000 |
280 | @@ -33,6 +33,6 @@ |
281 | cmd = cfg["runcmd"] |
282 | try: |
283 | content = util.shellify(cmd) |
284 | - util.write_file(out_fn, content, 0700) |
285 | + util.write_file(out_fn, content, 0o700) |
286 | except: |
287 | util.logexc(log, "Failed to shellify %s into file %s", cmd, out_fn) |
288 | |
289 | === modified file 'cloudinit/config/cc_salt_minion.py' |
290 | --- cloudinit/config/cc_salt_minion.py 2014-02-05 15:36:47 +0000 |
291 | +++ cloudinit/config/cc_salt_minion.py 2015-01-27 20:16:43 +0000 |
292 | @@ -47,7 +47,7 @@ |
293 | # ... copy the key pair if specified |
294 | if 'public_key' in salt_cfg and 'private_key' in salt_cfg: |
295 | pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki') |
296 | - with util.umask(077): |
297 | + with util.umask(0o77): |
298 | util.ensure_dir(pki_dir) |
299 | pub_name = os.path.join(pki_dir, 'minion.pub') |
300 | pem_name = os.path.join(pki_dir, 'minion.pem') |
301 | |
302 | === modified file 'cloudinit/config/cc_seed_random.py' |
303 | --- cloudinit/config/cc_seed_random.py 2014-03-04 19:35:09 +0000 |
304 | +++ cloudinit/config/cc_seed_random.py 2015-01-27 20:16:43 +0000 |
305 | @@ -21,7 +21,8 @@ |
306 | |
307 | import base64 |
308 | import os |
309 | -from StringIO import StringIO |
310 | + |
311 | +from six import StringIO |
312 | |
313 | from cloudinit.settings import PER_INSTANCE |
314 | from cloudinit import log as logging |
315 | @@ -37,7 +38,7 @@ |
316 | if not encoding or encoding.lower() in ['raw']: |
317 | return data |
318 | elif encoding.lower() in ['base64', 'b64']: |
319 | - return base64.b64decode(data) |
320 | + return util.b64d(data) |
321 | elif encoding.lower() in ['gzip', 'gz']: |
322 | return util.decomp_gzip(data, quiet=False) |
323 | else: |
324 | |
325 | === modified file 'cloudinit/config/cc_ssh.py' |
326 | --- cloudinit/config/cc_ssh.py 2014-08-26 18:50:11 +0000 |
327 | +++ cloudinit/config/cc_ssh.py 2015-01-27 20:16:43 +0000 |
328 | @@ -34,12 +34,12 @@ |
329 | "rather than the user \\\"root\\\".\';echo;sleep 10\"") |
330 | |
331 | KEY_2_FILE = { |
332 | - "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0600), |
333 | - "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0644), |
334 | - "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0600), |
335 | - "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0644), |
336 | - "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0600), |
337 | - "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0644), |
338 | + "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0o600), |
339 | + "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0o644), |
340 | + "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0o600), |
341 | + "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0o644), |
342 | + "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0o600), |
343 | + "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0o644), |
344 | } |
345 | |
346 | PRIV_2_PUB = { |
347 | @@ -68,13 +68,13 @@ |
348 | |
349 | if "ssh_keys" in cfg: |
350 | # if there are keys in cloud-config, use them |
351 | - for (key, val) in cfg["ssh_keys"].iteritems(): |
352 | + for (key, val) in cfg["ssh_keys"].items(): |
353 | if key in KEY_2_FILE: |
354 | tgt_fn = KEY_2_FILE[key][0] |
355 | tgt_perms = KEY_2_FILE[key][1] |
356 | util.write_file(tgt_fn, val, tgt_perms) |
357 | |
358 | - for (priv, pub) in PRIV_2_PUB.iteritems(): |
359 | + for (priv, pub) in PRIV_2_PUB.items(): |
360 | if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']: |
361 | continue |
362 | pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0]) |
363 | |
364 | === modified file 'cloudinit/config/cc_ssh_authkey_fingerprints.py' |
365 | --- cloudinit/config/cc_ssh_authkey_fingerprints.py 2014-08-26 18:50:11 +0000 |
366 | +++ cloudinit/config/cc_ssh_authkey_fingerprints.py 2015-01-27 20:16:43 +0000 |
367 | @@ -32,7 +32,7 @@ |
368 | |
369 | def _split_hash(bin_hash): |
370 | split_up = [] |
371 | - for i in xrange(0, len(bin_hash), 2): |
372 | + for i in range(0, len(bin_hash), 2): |
373 | split_up.append(bin_hash[i:i + 2]) |
374 | return split_up |
375 | |
376 | |
377 | === modified file 'cloudinit/config/cc_write_files.py' |
378 | --- cloudinit/config/cc_write_files.py 2012-08-22 18:12:32 +0000 |
379 | +++ cloudinit/config/cc_write_files.py 2015-01-27 20:16:43 +0000 |
380 | @@ -18,6 +18,7 @@ |
381 | |
382 | import base64 |
383 | import os |
384 | +import six |
385 | |
386 | from cloudinit.settings import PER_INSTANCE |
387 | from cloudinit import util |
388 | @@ -25,7 +26,7 @@ |
389 | frequency = PER_INSTANCE |
390 | |
391 | DEFAULT_OWNER = "root:root" |
392 | -DEFAULT_PERMS = 0644 |
393 | +DEFAULT_PERMS = 0o644 |
394 | UNKNOWN_ENC = 'text/plain' |
395 | |
396 | |
397 | @@ -79,7 +80,7 @@ |
398 | |
399 | def decode_perms(perm, default, log): |
400 | try: |
401 | - if isinstance(perm, (int, long, float)): |
402 | + if isinstance(perm, six.integer_types + (float,)): |
403 | # Just 'downcast' it (if a float) |
404 | return int(perm) |
405 | else: |
406 | |
407 | === modified file 'cloudinit/config/cc_yum_add_repo.py' |
408 | --- cloudinit/config/cc_yum_add_repo.py 2014-08-26 18:50:11 +0000 |
409 | +++ cloudinit/config/cc_yum_add_repo.py 2015-01-27 20:16:43 +0000 |
410 | @@ -18,10 +18,11 @@ |
411 | |
412 | import os |
413 | |
414 | +import configobj |
415 | +import six |
416 | + |
417 | from cloudinit import util |
418 | |
419 | -import configobj |
420 | - |
421 | |
422 | def _canonicalize_id(repo_id): |
423 | repo_id = repo_id.lower().replace("-", "_") |
424 | @@ -37,7 +38,7 @@ |
425 | # Can handle 'lists' in certain cases |
426 | # See: http://bit.ly/Qqrf1t |
427 | return "\n ".join([_format_repo_value(v) for v in val]) |
428 | - if not isinstance(val, (basestring, str)): |
429 | + if not isinstance(val, six.string_types): |
430 | return str(val) |
431 | return val |
432 | |
433 | |
434 | === modified file 'cloudinit/distros/__init__.py' |
435 | --- cloudinit/distros/__init__.py 2015-01-16 19:29:48 +0000 |
436 | +++ cloudinit/distros/__init__.py 2015-01-27 20:16:43 +0000 |
437 | @@ -21,10 +21,10 @@ |
438 | # You should have received a copy of the GNU General Public License |
439 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
440 | |
441 | -from StringIO import StringIO |
442 | +import six |
443 | +from six import StringIO |
444 | |
445 | import abc |
446 | -import itertools |
447 | import os |
448 | import re |
449 | |
450 | @@ -36,6 +36,7 @@ |
451 | |
452 | from cloudinit.distros.parsers import hosts |
453 | |
454 | + |
455 | OSFAMILIES = { |
456 | 'debian': ['debian', 'ubuntu'], |
457 | 'redhat': ['fedora', 'rhel'], |
458 | @@ -272,7 +273,7 @@ |
459 | if header: |
460 | contents.write("%s\n" % (header)) |
461 | contents.write("%s\n" % (eh)) |
462 | - util.write_file(self.hosts_fn, contents.getvalue(), mode=0644) |
463 | + util.write_file(self.hosts_fn, contents.getvalue(), mode=0o644) |
464 | |
465 | def _bring_up_interface(self, device_name): |
466 | cmd = ['ifup', device_name] |
467 | @@ -334,7 +335,7 @@ |
468 | redact_opts = ['passwd'] |
469 | |
470 | # Check the values and create the command |
471 | - for key, val in kwargs.iteritems(): |
472 | + for key, val in kwargs.items(): |
473 | |
474 | if key in adduser_opts and val and isinstance(val, str): |
475 | adduser_cmd.extend([adduser_opts[key], val]) |
476 | @@ -393,7 +394,7 @@ |
477 | if 'ssh_authorized_keys' in kwargs: |
478 | # Try to handle this in a smart manner. |
479 | keys = kwargs['ssh_authorized_keys'] |
480 | - if isinstance(keys, (basestring, str)): |
481 | + if isinstance(keys, six.string_types): |
482 | keys = [keys] |
483 | if isinstance(keys, dict): |
484 | keys = list(keys.values()) |
485 | @@ -468,7 +469,7 @@ |
486 | util.make_header(base="added"), |
487 | "#includedir %s" % (path), ''] |
488 | sudoers_contents = "\n".join(lines) |
489 | - util.write_file(sudo_base, sudoers_contents, 0440) |
490 | + util.write_file(sudo_base, sudoers_contents, 0o440) |
491 | else: |
492 | lines = ['', util.make_header(base="added"), |
493 | "#includedir %s" % (path), ''] |
494 | @@ -478,7 +479,7 @@ |
495 | except IOError as e: |
496 | util.logexc(LOG, "Failed to write %s", sudo_base) |
497 | raise e |
498 | - util.ensure_dir(path, 0750) |
499 | + util.ensure_dir(path, 0o750) |
500 | |
501 | def write_sudo_rules(self, user, rules, sudo_file=None): |
502 | if not sudo_file: |
503 | @@ -491,7 +492,7 @@ |
504 | if isinstance(rules, (list, tuple)): |
505 | for rule in rules: |
506 | lines.append("%s %s" % (user, rule)) |
507 | - elif isinstance(rules, (basestring, str)): |
508 | + elif isinstance(rules, six.string_types): |
509 | lines.append("%s %s" % (user, rules)) |
510 | else: |
511 | msg = "Can not create sudoers rule addition with type %r" |
512 | @@ -506,7 +507,7 @@ |
513 | content, |
514 | ] |
515 | try: |
516 | - util.write_file(sudo_file, "\n".join(contents), 0440) |
517 | + util.write_file(sudo_file, "\n".join(contents), 0o440) |
518 | except IOError as e: |
519 | util.logexc(LOG, "Failed to write sudoers file %s", sudo_file) |
520 | raise e |
521 | @@ -561,10 +562,10 @@ |
522 | subst['ec2_region'] = "%s" % availability_zone[0:-1] |
523 | |
524 | results = {} |
525 | - for (name, mirror) in mirror_info.get('failsafe', {}).iteritems(): |
526 | + for (name, mirror) in mirror_info.get('failsafe', {}).items(): |
527 | results[name] = mirror |
528 | |
529 | - for (name, searchlist) in mirror_info.get('search', {}).iteritems(): |
530 | + for (name, searchlist) in mirror_info.get('search', {}).items(): |
531 | mirrors = [] |
532 | for tmpl in searchlist: |
533 | try: |
534 | @@ -604,30 +605,30 @@ |
535 | # is the standard form used in the rest |
536 | # of cloud-init |
537 | def _normalize_groups(grp_cfg): |
538 | - if isinstance(grp_cfg, (str, basestring)): |
539 | + if isinstance(grp_cfg, six.string_types): |
540 | grp_cfg = grp_cfg.strip().split(",") |
541 | - if isinstance(grp_cfg, (list)): |
542 | + if isinstance(grp_cfg, list): |
543 | c_grp_cfg = {} |
544 | for i in grp_cfg: |
545 | - if isinstance(i, (dict)): |
546 | + if isinstance(i, dict): |
547 | for k, v in i.items(): |
548 | if k not in c_grp_cfg: |
549 | - if isinstance(v, (list)): |
550 | + if isinstance(v, list): |
551 | c_grp_cfg[k] = list(v) |
552 | - elif isinstance(v, (basestring, str)): |
553 | + elif isinstance(v, six.string_types): |
554 | c_grp_cfg[k] = [v] |
555 | else: |
556 | raise TypeError("Bad group member type %s" % |
557 | type_utils.obj_name(v)) |
558 | else: |
559 | - if isinstance(v, (list)): |
560 | + if isinstance(v, list): |
561 | c_grp_cfg[k].extend(v) |
562 | - elif isinstance(v, (basestring, str)): |
563 | + elif isinstance(v, six.string_types): |
564 | c_grp_cfg[k].append(v) |
565 | else: |
566 | raise TypeError("Bad group member type %s" % |
567 | type_utils.obj_name(v)) |
568 | - elif isinstance(i, (str, basestring)): |
569 | + elif isinstance(i, six.string_types): |
570 | if i not in c_grp_cfg: |
571 | c_grp_cfg[i] = [] |
572 | else: |
573 | @@ -635,7 +636,7 @@ |
574 | type_utils.obj_name(i)) |
575 | grp_cfg = c_grp_cfg |
576 | groups = {} |
577 | - if isinstance(grp_cfg, (dict)): |
578 | + if isinstance(grp_cfg, dict): |
579 | for (grp_name, grp_members) in grp_cfg.items(): |
580 | groups[grp_name] = util.uniq_merge_sorted(grp_members) |
581 | else: |
582 | @@ -661,29 +662,29 @@ |
583 | # entry 'default' which will be marked as true |
584 | # all other users will be marked as false. |
585 | def _normalize_users(u_cfg, def_user_cfg=None): |
586 | - if isinstance(u_cfg, (dict)): |
587 | + if isinstance(u_cfg, dict): |
588 | ad_ucfg = [] |
589 | for (k, v) in u_cfg.items(): |
590 | - if isinstance(v, (bool, int, basestring, str, float)): |
591 | + if isinstance(v, (bool, int, float) + six.string_types): |
592 | if util.is_true(v): |
593 | ad_ucfg.append(str(k)) |
594 | - elif isinstance(v, (dict)): |
595 | + elif isinstance(v, dict): |
596 | v['name'] = k |
597 | ad_ucfg.append(v) |
598 | else: |
599 | raise TypeError(("Unmappable user value type %s" |
600 | " for key %s") % (type_utils.obj_name(v), k)) |
601 | u_cfg = ad_ucfg |
602 | - elif isinstance(u_cfg, (str, basestring)): |
603 | + elif isinstance(u_cfg, six.string_types): |
604 | u_cfg = util.uniq_merge_sorted(u_cfg) |
605 | |
606 | users = {} |
607 | for user_config in u_cfg: |
608 | - if isinstance(user_config, (str, basestring, list)): |
609 | + if isinstance(user_config, (list,) + six.string_types): |
610 | for u in util.uniq_merge(user_config): |
611 | if u and u not in users: |
612 | users[u] = {} |
613 | - elif isinstance(user_config, (dict)): |
614 | + elif isinstance(user_config, dict): |
615 | if 'name' in user_config: |
616 | n = user_config.pop('name') |
617 | prev_config = users.get(n) or {} |
618 | @@ -784,11 +785,11 @@ |
619 | old_user = cfg['user'] |
620 | # Translate it into the format that is more useful |
621 | # going forward |
622 | - if isinstance(old_user, (basestring, str)): |
623 | + if isinstance(old_user, six.string_types): |
624 | old_user = { |
625 | 'name': old_user, |
626 | } |
627 | - if not isinstance(old_user, (dict)): |
628 | + if not isinstance(old_user, dict): |
629 | LOG.warn(("Format for 'user' key must be a string or " |
630 | "dictionary and not %s"), type_utils.obj_name(old_user)) |
631 | old_user = {} |
632 | @@ -813,7 +814,7 @@ |
633 | default_user_config = util.mergemanydict([old_user, distro_user_config]) |
634 | |
635 | base_users = cfg.get('users', []) |
636 | - if not isinstance(base_users, (list, dict, str, basestring)): |
637 | + if not isinstance(base_users, (list, dict) + six.string_types): |
638 | LOG.warn(("Format for 'users' key must be a comma separated string" |
639 | " or a dictionary or a list and not %s"), |
640 | type_utils.obj_name(base_users)) |
641 | @@ -822,12 +823,12 @@ |
642 | if old_user: |
643 | # Ensure that when user: is provided that this user |
644 | # always gets added (as the default user) |
645 | - if isinstance(base_users, (list)): |
646 | + if isinstance(base_users, list): |
647 | # Just add it on at the end... |
648 | base_users.append({'name': 'default'}) |
649 | - elif isinstance(base_users, (dict)): |
650 | + elif isinstance(base_users, dict): |
651 | base_users['default'] = dict(base_users).get('default', True) |
652 | - elif isinstance(base_users, (str, basestring)): |
653 | + elif isinstance(base_users, six.string_types): |
654 | # Just append it on to be re-parsed later |
655 | base_users += ",default" |
656 | |
657 | @@ -852,11 +853,11 @@ |
658 | return config['default'] |
659 | |
660 | tmp_users = users.items() |
661 | - tmp_users = dict(itertools.ifilter(safe_find, tmp_users)) |
662 | + tmp_users = dict(filter(safe_find, tmp_users)) |
663 | if not tmp_users: |
664 | return (default_name, default_config) |
665 | else: |
666 | - name = tmp_users.keys()[0] |
667 | + name = list(tmp_users)[0] |
668 | config = tmp_users[name] |
669 | config.pop('default', None) |
670 | return (name, config) |
671 | |
672 | === modified file 'cloudinit/distros/arch.py' |
673 | --- cloudinit/distros/arch.py 2015-01-16 19:29:48 +0000 |
674 | +++ cloudinit/distros/arch.py 2015-01-27 20:16:43 +0000 |
675 | @@ -66,7 +66,7 @@ |
676 | settings, entries) |
677 | dev_names = entries.keys() |
678 | # Format for netctl |
679 | - for (dev, info) in entries.iteritems(): |
680 | + for (dev, info) in entries.items(): |
681 | nameservers = [] |
682 | net_fn = self.network_conf_dir + dev |
683 | net_cfg = { |
684 | @@ -129,7 +129,7 @@ |
685 | if not conf: |
686 | conf = HostnameConf('') |
687 | conf.set_hostname(your_hostname) |
688 | - util.write_file(out_fn, str(conf), 0644) |
689 | + util.write_file(out_fn, conf, 0o644) |
690 | |
691 | def _read_system_hostname(self): |
692 | sys_hostname = self._read_hostname(self.hostname_conf_fn) |
693 | |
694 | === modified file 'cloudinit/distros/debian.py' |
695 | --- cloudinit/distros/debian.py 2015-01-16 19:29:48 +0000 |
696 | +++ cloudinit/distros/debian.py 2015-01-27 20:16:43 +0000 |
697 | @@ -97,7 +97,7 @@ |
698 | if not conf: |
699 | conf = HostnameConf('') |
700 | conf.set_hostname(your_hostname) |
701 | - util.write_file(out_fn, str(conf), 0644) |
702 | + util.write_file(out_fn, str(conf), 0o644) |
703 | |
704 | def _read_system_hostname(self): |
705 | sys_hostname = self._read_hostname(self.hostname_conf_fn) |
706 | |
707 | === modified file 'cloudinit/distros/freebsd.py' |
708 | --- cloudinit/distros/freebsd.py 2015-01-16 19:29:48 +0000 |
709 | +++ cloudinit/distros/freebsd.py 2015-01-27 20:16:43 +0000 |
710 | @@ -16,7 +16,8 @@ |
711 | # You should have received a copy of the GNU General Public License |
712 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
713 | |
714 | -from StringIO import StringIO |
715 | +import six |
716 | +from six import StringIO |
717 | |
718 | import re |
719 | |
720 | @@ -203,8 +204,9 @@ |
721 | |
722 | redact_opts = ['passwd'] |
723 | |
724 | - for key, val in kwargs.iteritems(): |
725 | - if key in adduser_opts and val and isinstance(val, basestring): |
726 | + for key, val in kwargs.items(): |
727 | + if (key in adduser_opts and val |
728 | + and isinstance(val, six.string_types)): |
729 | adduser_cmd.extend([adduser_opts[key], val]) |
730 | |
731 | # Redact certain fields from the logs |
732 | @@ -271,7 +273,7 @@ |
733 | nameservers = [] |
734 | searchdomains = [] |
735 | dev_names = entries.keys() |
736 | - for (device, info) in entries.iteritems(): |
737 | + for (device, info) in entries.items(): |
738 | # Skip the loopback interface. |
739 | if device.startswith('lo'): |
740 | continue |
741 | @@ -323,7 +325,7 @@ |
742 | resolvconf.add_search_domain(domain) |
743 | except ValueError: |
744 | util.logexc(LOG, "Failed to add search domain %s", domain) |
745 | - util.write_file(self.resolv_conf_fn, str(resolvconf), 0644) |
746 | + util.write_file(self.resolv_conf_fn, str(resolvconf), 0o644) |
747 | |
748 | return dev_names |
749 | |
750 | |
751 | === modified file 'cloudinit/distros/gentoo.py' |
752 | --- cloudinit/distros/gentoo.py 2015-01-16 19:29:48 +0000 |
753 | +++ cloudinit/distros/gentoo.py 2015-01-27 20:16:43 +0000 |
754 | @@ -108,7 +108,7 @@ |
755 | if not conf: |
756 | conf = HostnameConf('') |
757 | conf.set_hostname(your_hostname) |
758 | - util.write_file(out_fn, str(conf), 0644) |
759 | + util.write_file(out_fn, conf, 0o644) |
760 | |
761 | def _read_system_hostname(self): |
762 | sys_hostname = self._read_hostname(self.hostname_conf_fn) |
763 | |
764 | === modified file 'cloudinit/distros/net_util.py' |
765 | --- cloudinit/distros/net_util.py 2015-01-06 17:02:38 +0000 |
766 | +++ cloudinit/distros/net_util.py 2015-01-27 20:16:43 +0000 |
767 | @@ -103,7 +103,7 @@ |
768 | consume[cmd] = args |
769 | # Check if anything left over to consume |
770 | absorb = False |
771 | - for (cmd, args) in consume.iteritems(): |
772 | + for (cmd, args) in consume.items(): |
773 | if cmd == 'iface': |
774 | absorb = True |
775 | if absorb: |
776 | |
777 | === modified file 'cloudinit/distros/parsers/hostname.py' |
778 | --- cloudinit/distros/parsers/hostname.py 2012-11-12 22:30:08 +0000 |
779 | +++ cloudinit/distros/parsers/hostname.py 2015-01-27 20:16:43 +0000 |
780 | @@ -16,7 +16,7 @@ |
781 | # You should have received a copy of the GNU General Public License |
782 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
783 | |
784 | -from StringIO import StringIO |
785 | +from six import StringIO |
786 | |
787 | from cloudinit.distros.parsers import chop_comment |
788 | |
789 | |
790 | === modified file 'cloudinit/distros/parsers/hosts.py' |
791 | --- cloudinit/distros/parsers/hosts.py 2012-11-13 06:14:31 +0000 |
792 | +++ cloudinit/distros/parsers/hosts.py 2015-01-27 20:16:43 +0000 |
793 | @@ -16,7 +16,7 @@ |
794 | # You should have received a copy of the GNU General Public License |
795 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
796 | |
797 | -from StringIO import StringIO |
798 | +from six import StringIO |
799 | |
800 | from cloudinit.distros.parsers import chop_comment |
801 | |
802 | |
803 | === modified file 'cloudinit/distros/parsers/resolv_conf.py' |
804 | --- cloudinit/distros/parsers/resolv_conf.py 2014-08-26 19:53:41 +0000 |
805 | +++ cloudinit/distros/parsers/resolv_conf.py 2015-01-27 20:16:43 +0000 |
806 | @@ -16,7 +16,7 @@ |
807 | # You should have received a copy of the GNU General Public License |
808 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
809 | |
810 | -from StringIO import StringIO |
811 | +from six import StringIO |
812 | |
813 | from cloudinit import util |
814 | |
815 | |
816 | === modified file 'cloudinit/distros/parsers/sys_conf.py' |
817 | --- cloudinit/distros/parsers/sys_conf.py 2012-11-12 22:30:08 +0000 |
818 | +++ cloudinit/distros/parsers/sys_conf.py 2015-01-27 20:16:43 +0000 |
819 | @@ -16,7 +16,8 @@ |
820 | # You should have received a copy of the GNU General Public License |
821 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
822 | |
823 | -from StringIO import StringIO |
824 | +import six |
825 | +from six import StringIO |
826 | |
827 | import pipes |
828 | import re |
829 | @@ -69,7 +70,7 @@ |
830 | return out_contents.getvalue() |
831 | |
832 | def _quote(self, value, multiline=False): |
833 | - if not isinstance(value, (str, basestring)): |
834 | + if not isinstance(value, six.string_types): |
835 | raise ValueError('Value "%s" is not a string' % (value)) |
836 | if len(value) == 0: |
837 | return '' |
838 | |
839 | === modified file 'cloudinit/distros/rhel.py' |
840 | --- cloudinit/distros/rhel.py 2015-01-06 17:02:38 +0000 |
841 | +++ cloudinit/distros/rhel.py 2015-01-27 20:16:43 +0000 |
842 | @@ -73,7 +73,7 @@ |
843 | searchservers = [] |
844 | dev_names = entries.keys() |
845 | use_ipv6 = False |
846 | - for (dev, info) in entries.iteritems(): |
847 | + for (dev, info) in entries.items(): |
848 | net_fn = self.network_script_tpl % (dev) |
849 | net_cfg = { |
850 | 'DEVICE': dev, |
851 | |
852 | === modified file 'cloudinit/distros/rhel_util.py' |
853 | --- cloudinit/distros/rhel_util.py 2014-01-22 20:04:39 +0000 |
854 | +++ cloudinit/distros/rhel_util.py 2015-01-27 20:16:43 +0000 |
855 | @@ -50,7 +50,7 @@ |
856 | ] |
857 | if not exists: |
858 | lines.insert(0, util.make_header()) |
859 | - util.write_file(fn, "\n".join(lines) + "\n", 0644) |
860 | + util.write_file(fn, "\n".join(lines) + "\n", 0o644) |
861 | |
862 | |
863 | # Helper function to read a RHEL/SUSE /etc/sysconfig/* file |
864 | @@ -86,4 +86,4 @@ |
865 | r_conf.add_search_domain(s) |
866 | except ValueError: |
867 | util.logexc(LOG, "Failed at adding search domain %s", s) |
868 | - util.write_file(fn, str(r_conf), 0644) |
869 | + util.write_file(fn, r_conf, 0o644) |
870 | |
871 | === modified file 'cloudinit/distros/sles.py' |
872 | --- cloudinit/distros/sles.py 2015-01-16 19:29:48 +0000 |
873 | +++ cloudinit/distros/sles.py 2015-01-27 20:16:43 +0000 |
874 | @@ -62,7 +62,7 @@ |
875 | nameservers = [] |
876 | searchservers = [] |
877 | dev_names = entries.keys() |
878 | - for (dev, info) in entries.iteritems(): |
879 | + for (dev, info) in entries.items(): |
880 | net_fn = self.network_script_tpl % (dev) |
881 | mode = info.get('auto') |
882 | if mode and mode.lower() == 'true': |
883 | @@ -113,7 +113,7 @@ |
884 | if not conf: |
885 | conf = HostnameConf('') |
886 | conf.set_hostname(hostname) |
887 | - util.write_file(out_fn, str(conf), 0644) |
888 | + util.write_file(out_fn, str(conf), 0o644) |
889 | |
890 | def _read_system_hostname(self): |
891 | host_fn = self.hostname_conf_fn |
892 | |
893 | === modified file 'cloudinit/ec2_utils.py' |
894 | --- cloudinit/ec2_utils.py 2014-09-05 17:24:19 +0000 |
895 | +++ cloudinit/ec2_utils.py 2015-01-27 20:16:43 +0000 |
896 | @@ -17,7 +17,6 @@ |
897 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
898 | |
899 | import functools |
900 | -import httplib |
901 | import json |
902 | |
903 | from cloudinit import log as logging |
904 | @@ -25,7 +24,7 @@ |
905 | from cloudinit import util |
906 | |
907 | LOG = logging.getLogger(__name__) |
908 | -SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND]) |
909 | +SKIP_USERDATA_CODES = frozenset([url_helper.NOT_FOUND]) |
910 | |
911 | |
912 | class MetadataLeafDecoder(object): |
913 | @@ -123,7 +122,7 @@ |
914 | leaf_contents = {} |
915 | for (field, resource) in leaves.items(): |
916 | leaf_url = url_helper.combine_url(base_url, resource) |
917 | - leaf_blob = str(self._caller(leaf_url)) |
918 | + leaf_blob = self._caller(leaf_url).contents |
919 | leaf_contents[field] = self._leaf_decoder(field, leaf_blob) |
920 | joined = {} |
921 | joined.update(child_contents) |
922 | @@ -160,7 +159,7 @@ |
923 | timeout=timeout, |
924 | retries=retries, |
925 | exception_cb=exception_cb) |
926 | - user_data = str(response) |
927 | + user_data = response.contents |
928 | except url_helper.UrlError as e: |
929 | if e.code not in SKIP_USERDATA_CODES: |
930 | util.logexc(LOG, "Failed fetching userdata from url %s", ud_url) |
931 | @@ -183,7 +182,7 @@ |
932 | |
933 | try: |
934 | response = caller(md_url) |
935 | - materializer = MetadataMaterializer(str(response), |
936 | + materializer = MetadataMaterializer(response.contents, |
937 | md_url, caller, |
938 | leaf_decoder=leaf_decoder) |
939 | md = materializer.materialize() |
940 | |
941 | === modified file 'cloudinit/handlers/__init__.py' |
942 | --- cloudinit/handlers/__init__.py 2014-01-16 21:57:21 +0000 |
943 | +++ cloudinit/handlers/__init__.py 2015-01-27 20:16:43 +0000 |
944 | @@ -22,6 +22,7 @@ |
945 | |
946 | import abc |
947 | import os |
948 | +import six |
949 | |
950 | from cloudinit.settings import (PER_ALWAYS, PER_INSTANCE, FREQUENCIES) |
951 | |
952 | @@ -147,7 +148,7 @@ |
953 | if not modfname.endswith(".py"): |
954 | modfname = "%s.py" % (modfname) |
955 | # TODO(harlowja): Check if path exists?? |
956 | - util.write_file(modfname, payload, 0600) |
957 | + util.write_file(modfname, payload, 0o600) |
958 | handlers = pdata['handlers'] |
959 | try: |
960 | mod = fixup_handler(importer.import_module(modname)) |
961 | @@ -174,11 +175,11 @@ |
962 | |
963 | def _escape_string(text): |
964 | try: |
965 | - return text.encode("string-escape") |
966 | - except TypeError: |
967 | + return text.encode("string_escape") |
968 | + except (LookupError, TypeError): |
969 | try: |
970 | - # Unicode doesn't support string-escape... |
971 | - return text.encode('unicode-escape') |
972 | + # Unicode (and Python 3's str) doesn't support string_escape... |
973 | + return text.encode('unicode_escape') |
974 | except TypeError: |
975 | # Give up... |
976 | pass |
977 | @@ -232,7 +233,8 @@ |
978 | headers = dict(part) |
979 | LOG.debug(headers) |
980 | headers['Content-Type'] = ctype |
981 | - callback(data, filename, part.get_payload(decode=True), headers) |
982 | + payload = util.fully_decoded_payload(part) |
983 | + callback(data, filename, payload, headers) |
984 | partnum = partnum + 1 |
985 | |
986 | |
987 | |
988 | === modified file 'cloudinit/handlers/boot_hook.py' |
989 | --- cloudinit/handlers/boot_hook.py 2014-08-26 19:53:41 +0000 |
990 | +++ cloudinit/handlers/boot_hook.py 2015-01-27 20:16:43 +0000 |
991 | @@ -50,7 +50,7 @@ |
992 | filepath = os.path.join(self.boothook_dir, filename) |
993 | contents = util.strip_prefix_suffix(util.dos2unix(payload), |
994 | prefix=BOOTHOOK_PREFIX) |
995 | - util.write_file(filepath, contents.lstrip(), 0700) |
996 | + util.write_file(filepath, contents.lstrip(), 0o700) |
997 | return filepath |
998 | |
999 | def handle_part(self, data, ctype, filename, payload, frequency): |
1000 | |
1001 | === modified file 'cloudinit/handlers/cloud_config.py' |
1002 | --- cloudinit/handlers/cloud_config.py 2014-08-26 19:53:41 +0000 |
1003 | +++ cloudinit/handlers/cloud_config.py 2015-01-27 20:16:43 +0000 |
1004 | @@ -95,7 +95,7 @@ |
1005 | lines.append(util.yaml_dumps(self.cloud_buf)) |
1006 | else: |
1007 | lines = [] |
1008 | - util.write_file(self.cloud_fn, "\n".join(lines), 0600) |
1009 | + util.write_file(self.cloud_fn, "\n".join(lines), 0o600) |
1010 | |
1011 | def _extract_mergers(self, payload, headers): |
1012 | merge_header_headers = '' |
1013 | |
1014 | === modified file 'cloudinit/handlers/shell_script.py' |
1015 | --- cloudinit/handlers/shell_script.py 2014-08-26 19:53:41 +0000 |
1016 | +++ cloudinit/handlers/shell_script.py 2015-01-27 20:16:43 +0000 |
1017 | @@ -52,4 +52,4 @@ |
1018 | filename = util.clean_filename(filename) |
1019 | payload = util.dos2unix(payload) |
1020 | path = os.path.join(self.script_dir, filename) |
1021 | - util.write_file(path, payload, 0700) |
1022 | + util.write_file(path, payload, 0o700) |
1023 | |
1024 | === modified file 'cloudinit/handlers/upstart_job.py' |
1025 | --- cloudinit/handlers/upstart_job.py 2014-08-26 19:53:41 +0000 |
1026 | +++ cloudinit/handlers/upstart_job.py 2015-01-27 20:16:43 +0000 |
1027 | @@ -65,7 +65,7 @@ |
1028 | |
1029 | payload = util.dos2unix(payload) |
1030 | path = os.path.join(self.upstart_dir, filename) |
1031 | - util.write_file(path, payload, 0644) |
1032 | + util.write_file(path, payload, 0o644) |
1033 | |
1034 | if SUITABLE_UPSTART: |
1035 | util.subp(["initctl", "reload-configuration"], capture=False) |
1036 | |
1037 | === modified file 'cloudinit/helpers.py' |
1038 | --- cloudinit/helpers.py 2014-01-17 20:12:31 +0000 |
1039 | +++ cloudinit/helpers.py 2015-01-27 20:16:43 +0000 |
1040 | @@ -23,10 +23,11 @@ |
1041 | from time import time |
1042 | |
1043 | import contextlib |
1044 | -import io |
1045 | import os |
1046 | |
1047 | -from ConfigParser import (NoSectionError, NoOptionError, RawConfigParser) |
1048 | +import six |
1049 | +from six.moves.configparser import ( |
1050 | + NoSectionError, NoOptionError, RawConfigParser) |
1051 | |
1052 | from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, |
1053 | CFG_ENV_NAME) |
1054 | @@ -318,10 +319,7 @@ |
1055 | return self.registered[content_type] |
1056 | |
1057 | def items(self): |
1058 | - return self.registered.items() |
1059 | - |
1060 | - def iteritems(self): |
1061 | - return self.registered.iteritems() |
1062 | + return list(self.registered.items()) |
1063 | |
1064 | |
1065 | class Paths(object): |
1066 | @@ -449,7 +447,7 @@ |
1067 | |
1068 | def stringify(self, header=None): |
1069 | contents = '' |
1070 | - with io.BytesIO() as outputstream: |
1071 | + with six.StringIO() as outputstream: |
1072 | self.write(outputstream) |
1073 | outputstream.flush() |
1074 | contents = outputstream.getvalue() |
1075 | |
1076 | === modified file 'cloudinit/log.py' |
1077 | --- cloudinit/log.py 2013-04-17 16:42:55 +0000 |
1078 | +++ cloudinit/log.py 2015-01-27 20:16:43 +0000 |
1079 | @@ -28,7 +28,8 @@ |
1080 | import os |
1081 | import sys |
1082 | |
1083 | -from StringIO import StringIO |
1084 | +import six |
1085 | +from six import StringIO |
1086 | |
1087 | # Logging levels for easy access |
1088 | CRITICAL = logging.CRITICAL |
1089 | @@ -72,13 +73,13 @@ |
1090 | |
1091 | log_cfgs = [] |
1092 | log_cfg = cfg.get('logcfg') |
1093 | - if log_cfg and isinstance(log_cfg, (str, basestring)): |
1094 | + if log_cfg and isinstance(log_cfg, six.string_types): |
1095 | # If there is a 'logcfg' entry in the config, |
1096 | # respect it, it is the old keyname |
1097 | log_cfgs.append(str(log_cfg)) |
1098 | elif "log_cfgs" in cfg: |
1099 | for a_cfg in cfg['log_cfgs']: |
1100 | - if isinstance(a_cfg, (basestring, str)): |
1101 | + if isinstance(a_cfg, six.string_types): |
1102 | log_cfgs.append(a_cfg) |
1103 | elif isinstance(a_cfg, (collections.Iterable)): |
1104 | cfg_str = [str(c) for c in a_cfg] |
1105 | |
1106 | === modified file 'cloudinit/mergers/__init__.py' |
1107 | --- cloudinit/mergers/__init__.py 2014-09-02 20:31:18 +0000 |
1108 | +++ cloudinit/mergers/__init__.py 2015-01-27 20:16:43 +0000 |
1109 | @@ -18,6 +18,8 @@ |
1110 | |
1111 | import re |
1112 | |
1113 | +import six |
1114 | + |
1115 | from cloudinit import importer |
1116 | from cloudinit import log as logging |
1117 | from cloudinit import type_utils |
1118 | @@ -95,7 +97,7 @@ |
1119 | raw_mergers = config.pop('merge_type', None) |
1120 | if raw_mergers is None: |
1121 | return parsed_mergers |
1122 | - if isinstance(raw_mergers, (str, basestring)): |
1123 | + if isinstance(raw_mergers, six.string_types): |
1124 | return string_extract_mergers(raw_mergers) |
1125 | for m in raw_mergers: |
1126 | if isinstance(m, (dict)): |
1127 | |
1128 | === modified file 'cloudinit/mergers/m_dict.py' |
1129 | --- cloudinit/mergers/m_dict.py 2013-05-03 22:05:45 +0000 |
1130 | +++ cloudinit/mergers/m_dict.py 2015-01-27 20:16:43 +0000 |
1131 | @@ -16,6 +16,8 @@ |
1132 | # You should have received a copy of the GNU General Public License |
1133 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1134 | |
1135 | +import six |
1136 | + |
1137 | DEF_MERGE_TYPE = 'no_replace' |
1138 | MERGE_TYPES = ('replace', DEF_MERGE_TYPE,) |
1139 | |
1140 | @@ -57,7 +59,7 @@ |
1141 | return new_v |
1142 | if isinstance(new_v, (list, tuple)) and self._recurse_array: |
1143 | return self._merger.merge(old_v, new_v) |
1144 | - if isinstance(new_v, (basestring)) and self._recurse_str: |
1145 | + if isinstance(new_v, six.string_types) and self._recurse_str: |
1146 | return self._merger.merge(old_v, new_v) |
1147 | if isinstance(new_v, (dict)) and self._recurse_dict: |
1148 | return self._merger.merge(old_v, new_v) |
1149 | |
1150 | === modified file 'cloudinit/mergers/m_list.py' |
1151 | --- cloudinit/mergers/m_list.py 2014-08-26 18:50:11 +0000 |
1152 | +++ cloudinit/mergers/m_list.py 2015-01-27 20:16:43 +0000 |
1153 | @@ -16,6 +16,8 @@ |
1154 | # You should have received a copy of the GNU General Public License |
1155 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1156 | |
1157 | +import six |
1158 | + |
1159 | DEF_MERGE_TYPE = 'replace' |
1160 | MERGE_TYPES = ('append', 'prepend', DEF_MERGE_TYPE, 'no_replace') |
1161 | |
1162 | @@ -73,7 +75,7 @@ |
1163 | return old_v |
1164 | if isinstance(new_v, (list, tuple)) and self._recurse_array: |
1165 | return self._merger.merge(old_v, new_v) |
1166 | - if isinstance(new_v, (str, basestring)) and self._recurse_str: |
1167 | + if isinstance(new_v, six.string_types) and self._recurse_str: |
1168 | return self._merger.merge(old_v, new_v) |
1169 | if isinstance(new_v, (dict)) and self._recurse_dict: |
1170 | return self._merger.merge(old_v, new_v) |
1171 | @@ -82,6 +84,6 @@ |
1172 | # Ok now we are replacing same indexes |
1173 | merged_list.extend(value) |
1174 | common_len = min(len(merged_list), len(merge_with)) |
1175 | - for i in xrange(0, common_len): |
1176 | + for i in range(0, common_len): |
1177 | merged_list[i] = merge_same_index(merged_list[i], merge_with[i]) |
1178 | return merged_list |
1179 | |
1180 | === modified file 'cloudinit/mergers/m_str.py' |
1181 | --- cloudinit/mergers/m_str.py 2013-05-03 21:41:28 +0000 |
1182 | +++ cloudinit/mergers/m_str.py 2015-01-27 20:16:43 +0000 |
1183 | @@ -17,6 +17,8 @@ |
1184 | # You should have received a copy of the GNU General Public License |
1185 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1186 | |
1187 | +import six |
1188 | + |
1189 | |
1190 | class Merger(object): |
1191 | def __init__(self, _merger, opts): |
1192 | @@ -34,11 +36,11 @@ |
1193 | # perform the following action, if appending we will |
1194 | # merge them together, otherwise we will just return value. |
1195 | def _on_str(self, value, merge_with): |
1196 | - if not isinstance(value, (basestring)): |
1197 | + if not isinstance(value, six.string_types): |
1198 | return merge_with |
1199 | if not self._append: |
1200 | return merge_with |
1201 | - if isinstance(value, unicode): |
1202 | - return value + unicode(merge_with) |
1203 | + if isinstance(value, six.text_type): |
1204 | + return value + six.text_type(merge_with) |
1205 | else: |
1206 | - return value + str(merge_with) |
1207 | + return value + six.binary_type(merge_with) |
1208 | |
1209 | === modified file 'cloudinit/netinfo.py' |
1210 | --- cloudinit/netinfo.py 2014-12-01 05:35:38 +0000 |
1211 | +++ cloudinit/netinfo.py 2015-01-27 20:16:43 +0000 |
1212 | @@ -87,7 +87,7 @@ |
1213 | devs[curdev][target] = toks[i][len(field) + 1:] |
1214 | |
1215 | if empty != "": |
1216 | - for (_devname, dev) in devs.iteritems(): |
1217 | + for (_devname, dev) in devs.items(): |
1218 | for field in dev: |
1219 | if dev[field] == "": |
1220 | dev[field] = empty |
1221 | @@ -181,7 +181,7 @@ |
1222 | else: |
1223 | fields = ['Device', 'Up', 'Address', 'Mask', 'Scope', 'Hw-Address'] |
1224 | tbl = PrettyTable(fields) |
1225 | - for (dev, d) in netdev.iteritems(): |
1226 | + for (dev, d) in netdev.items(): |
1227 | tbl.add_row([dev, d["up"], d["addr"], d["mask"], ".", d["hwaddr"]]) |
1228 | if d.get('addr6'): |
1229 | tbl.add_row([dev, d["up"], |
1230 | |
1231 | === modified file 'cloudinit/signal_handler.py' |
1232 | --- cloudinit/signal_handler.py 2012-09-19 20:33:56 +0000 |
1233 | +++ cloudinit/signal_handler.py 2015-01-27 20:16:43 +0000 |
1234 | @@ -22,7 +22,7 @@ |
1235 | import signal |
1236 | import sys |
1237 | |
1238 | -from StringIO import StringIO |
1239 | +from six import StringIO |
1240 | |
1241 | from cloudinit import log as logging |
1242 | from cloudinit import util |
1243 | |
1244 | === modified file 'cloudinit/sources/DataSourceAltCloud.py' |
1245 | --- cloudinit/sources/DataSourceAltCloud.py 2015-01-14 19:24:09 +0000 |
1246 | +++ cloudinit/sources/DataSourceAltCloud.py 2015-01-27 20:16:43 +0000 |
1247 | @@ -200,11 +200,11 @@ |
1248 | cmd = CMD_PROBE_FLOPPY |
1249 | (cmd_out, _err) = util.subp(cmd) |
1250 | LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out)) |
1251 | - except ProcessExecutionError, _err: |
1252 | + except ProcessExecutionError as _err: |
1253 | util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), |
1254 | _err.message) |
1255 | return False |
1256 | - except OSError, _err: |
1257 | + except OSError as _err: |
1258 | util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), |
1259 | _err.message) |
1260 | return False |
1261 | @@ -217,11 +217,11 @@ |
1262 | cmd.append('--exit-if-exists=' + floppy_dev) |
1263 | (cmd_out, _err) = util.subp(cmd) |
1264 | LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out)) |
1265 | - except ProcessExecutionError, _err: |
1266 | + except ProcessExecutionError as _err: |
1267 | util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), |
1268 | _err.message) |
1269 | return False |
1270 | - except OSError, _err: |
1271 | + except OSError as _err: |
1272 | util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), |
1273 | _err.message) |
1274 | return False |
1275 | |
1276 | === modified file 'cloudinit/sources/DataSourceAzure.py' |
1277 | --- cloudinit/sources/DataSourceAzure.py 2014-08-26 18:50:11 +0000 |
1278 | +++ cloudinit/sources/DataSourceAzure.py 2015-01-27 20:16:43 +0000 |
1279 | @@ -151,7 +151,7 @@ |
1280 | |
1281 | # walinux agent writes files world readable, but expects |
1282 | # the directory to be protected. |
1283 | - write_files(ddir, files, dirmode=0700) |
1284 | + write_files(ddir, files, dirmode=0o700) |
1285 | |
1286 | # handle the hostname 'publishing' |
1287 | try: |
1288 | @@ -390,7 +390,7 @@ |
1289 | util.ensure_dir(datadir, dirmode) |
1290 | for (name, content) in files.items(): |
1291 | util.write_file(filename=os.path.join(datadir, name), |
1292 | - content=content, mode=0600) |
1293 | + content=content, mode=0o600) |
1294 | |
1295 | |
1296 | def invoke_agent(cmd): |
1297 | |
1298 | === modified file 'cloudinit/sources/DataSourceConfigDrive.py' |
1299 | --- cloudinit/sources/DataSourceConfigDrive.py 2015-01-06 17:02:38 +0000 |
1300 | +++ cloudinit/sources/DataSourceConfigDrive.py 2015-01-27 20:16:43 +0000 |
1301 | @@ -216,11 +216,11 @@ |
1302 | files = data.get('files', {}) |
1303 | if files: |
1304 | LOG.debug("Writing %s injected files", len(files)) |
1305 | - for (filename, content) in files.iteritems(): |
1306 | + for (filename, content) in files.items(): |
1307 | if not filename.startswith(os.sep): |
1308 | filename = os.sep + filename |
1309 | try: |
1310 | - util.write_file(filename, content, mode=0660) |
1311 | + util.write_file(filename, content, mode=0o660) |
1312 | except IOError: |
1313 | util.logexc(LOG, "Failed writing file: %s", filename) |
1314 | |
1315 | |
1316 | === modified file 'cloudinit/sources/DataSourceDigitalOcean.py' |
1317 | --- cloudinit/sources/DataSourceDigitalOcean.py 2015-01-06 17:02:38 +0000 |
1318 | +++ cloudinit/sources/DataSourceDigitalOcean.py 2015-01-27 20:16:43 +0000 |
1319 | @@ -18,7 +18,7 @@ |
1320 | from cloudinit import util |
1321 | from cloudinit import sources |
1322 | from cloudinit import ec2_utils |
1323 | -from types import StringType |
1324 | + |
1325 | import functools |
1326 | |
1327 | |
1328 | @@ -72,10 +72,11 @@ |
1329 | return "\n".join(self.metadata['vendor-data']) |
1330 | |
1331 | def get_public_ssh_keys(self): |
1332 | - if type(self.metadata['public-keys']) is StringType: |
1333 | - return [self.metadata['public-keys']] |
1334 | + public_keys = self.metadata['public-keys'] |
1335 | + if isinstance(public_keys, list): |
1336 | + return public_keys |
1337 | else: |
1338 | - return self.metadata['public-keys'] |
1339 | + return [public_keys] |
1340 | |
1341 | @property |
1342 | def availability_zone(self): |
1343 | |
1344 | === modified file 'cloudinit/sources/DataSourceEc2.py' |
1345 | --- cloudinit/sources/DataSourceEc2.py 2014-02-01 20:03:32 +0000 |
1346 | +++ cloudinit/sources/DataSourceEc2.py 2015-01-27 20:16:43 +0000 |
1347 | @@ -156,8 +156,8 @@ |
1348 | # 'ephemeral0': '/dev/sdb', |
1349 | # 'root': '/dev/sda1'} |
1350 | found = None |
1351 | - bdm_items = self.metadata['block-device-mapping'].iteritems() |
1352 | - for (entname, device) in bdm_items: |
1353 | + bdm = self.metadata['block-device-mapping'] |
1354 | + for (entname, device) in bdm.items(): |
1355 | if entname == name: |
1356 | found = device |
1357 | break |
1358 | |
1359 | === modified file 'cloudinit/sources/DataSourceMAAS.py' |
1360 | --- cloudinit/sources/DataSourceMAAS.py 2013-04-25 15:58:38 +0000 |
1361 | +++ cloudinit/sources/DataSourceMAAS.py 2015-01-27 20:16:43 +0000 |
1362 | @@ -18,12 +18,15 @@ |
1363 | # You should have received a copy of the GNU General Public License |
1364 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1365 | |
1366 | +from __future__ import print_function |
1367 | + |
1368 | from email.utils import parsedate |
1369 | import errno |
1370 | -import oauth.oauth as oauth |
1371 | +import oauthlib |
1372 | import os |
1373 | import time |
1374 | -import urllib2 |
1375 | + |
1376 | +from six.moves.urllib_request import Request, urlopen |
1377 | |
1378 | from cloudinit import log as logging |
1379 | from cloudinit import sources |
1380 | @@ -262,7 +265,7 @@ |
1381 | |
1382 | userdata = content.get('user-data', "") |
1383 | md = {} |
1384 | - for (key, val) in content.iteritems(): |
1385 | + for (key, val) in content.items(): |
1386 | if key == 'user-data': |
1387 | continue |
1388 | md[key] = val |
1389 | @@ -272,25 +275,14 @@ |
1390 | |
1391 | def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret, |
1392 | timestamp=None): |
1393 | - consumer = oauth.OAuthConsumer(consumer_key, consumer_secret) |
1394 | - token = oauth.OAuthToken(token_key, token_secret) |
1395 | - |
1396 | - if timestamp is None: |
1397 | - ts = int(time.time()) |
1398 | - else: |
1399 | - ts = timestamp |
1400 | - |
1401 | - params = { |
1402 | - 'oauth_version': "1.0", |
1403 | - 'oauth_nonce': oauth.generate_nonce(), |
1404 | - 'oauth_timestamp': ts, |
1405 | - 'oauth_token': token.key, |
1406 | - 'oauth_consumer_key': consumer.key, |
1407 | - } |
1408 | - req = oauth.OAuthRequest(http_url=url, parameters=params) |
1409 | - req.sign_request(oauth.OAuthSignatureMethod_PLAINTEXT(), |
1410 | - consumer, token) |
1411 | - return req.to_header() |
1412 | + client = oauthlib.oauth1.Client( |
1413 | + consumer_key, |
1414 | + client_secret=consumer_secret, |
1415 | + resource_owner_key=token_key, |
1416 | + resource_owner_secret=token_secret, |
1417 | + signature_method=oauthlib.SIGNATURE_PLAINTEXT) |
1418 | + uri, signed_headers, body = client.sign(url) |
1419 | + return signed_headers |
1420 | |
1421 | |
1422 | class MAASSeedDirNone(Exception): |
1423 | @@ -357,11 +349,11 @@ |
1424 | creds[key] = cfg[key] |
1425 | |
1426 | def geturl(url, headers_cb): |
1427 | - req = urllib2.Request(url, data=None, headers=headers_cb(url)) |
1428 | - return (urllib2.urlopen(req).read()) |
1429 | + req = Request(url, data=None, headers=headers_cb(url)) |
1430 | + return urlopen(req).read() |
1431 | |
1432 | def printurl(url, headers_cb): |
1433 | - print "== %s ==\n%s\n" % (url, geturl(url, headers_cb)) |
1434 | + print("== %s ==\n%s\n" % (url, geturl(url, headers_cb))) |
1435 | |
1436 | def crawl(url, headers_cb=None): |
1437 | if url.endswith("/"): |
1438 | @@ -386,9 +378,9 @@ |
1439 | version=args.apiver) |
1440 | else: |
1441 | (userdata, metadata) = read_maas_seed_url(args.url) |
1442 | - print "=== userdata ===" |
1443 | - print userdata |
1444 | - print "=== metadata ===" |
1445 | + print("=== userdata ===") |
1446 | + print(userdata) |
1447 | + print("=== metadata ===") |
1448 | pprint.pprint(metadata) |
1449 | |
1450 | elif args.subcmd == "get": |
1451 | |
1452 | === modified file 'cloudinit/sources/DataSourceOVF.py' |
1453 | --- cloudinit/sources/DataSourceOVF.py 2014-09-22 18:35:03 +0000 |
1454 | +++ cloudinit/sources/DataSourceOVF.py 2015-01-27 20:16:43 +0000 |
1455 | @@ -66,7 +66,7 @@ |
1456 | np = {'iso': transport_iso9660, |
1457 | 'vmware-guestd': transport_vmware_guestd, } |
1458 | name = None |
1459 | - for (name, transfunc) in np.iteritems(): |
1460 | + for (name, transfunc) in np.items(): |
1461 | (contents, _dev, _fname) = transfunc() |
1462 | if contents: |
1463 | break |
1464 | @@ -138,7 +138,7 @@ |
1465 | ud = "" |
1466 | cfg_props = ['password'] |
1467 | md_props = ['seedfrom', 'local-hostname', 'public-keys', 'instance-id'] |
1468 | - for (prop, val) in props.iteritems(): |
1469 | + for (prop, val) in props.items(): |
1470 | if prop == 'hostname': |
1471 | prop = "local-hostname" |
1472 | if prop in md_props: |
1473 | @@ -183,7 +183,7 @@ |
1474 | |
1475 | # Go through mounts to see if it was already mounted |
1476 | mounts = util.mounts() |
1477 | - for (dev, info) in mounts.iteritems(): |
1478 | + for (dev, info) in mounts.items(): |
1479 | fstype = info['fstype'] |
1480 | if fstype != "iso9660" and require_iso: |
1481 | continue |
1482 | |
1483 | === modified file 'cloudinit/sources/DataSourceOpenNebula.py' |
1484 | --- cloudinit/sources/DataSourceOpenNebula.py 2014-08-26 19:53:41 +0000 |
1485 | +++ cloudinit/sources/DataSourceOpenNebula.py 2015-01-27 20:16:43 +0000 |
1486 | @@ -34,6 +34,7 @@ |
1487 | from cloudinit import sources |
1488 | from cloudinit import util |
1489 | |
1490 | + |
1491 | LOG = logging.getLogger(__name__) |
1492 | |
1493 | DEFAULT_IID = "iid-dsopennebula" |
1494 | @@ -280,7 +281,7 @@ |
1495 | |
1496 | # allvars expands to all existing variables by using '${!x*}' notation |
1497 | # where x is lower or upper case letters or '_' |
1498 | - allvars = ["${!%s*}" % x for x in string.letters + "_"] |
1499 | + allvars = ["${!%s*}" % x for x in string.ascii_letters + "_"] |
1500 | |
1501 | keylist_in = keylist |
1502 | if keylist is None: |
1503 | @@ -379,9 +380,8 @@ |
1504 | raise BrokenContextDiskDir("configured user '%s' " |
1505 | "does not exist", asuser) |
1506 | try: |
1507 | - with open(os.path.join(source_dir, 'context.sh'), 'r') as f: |
1508 | - content = f.read().strip() |
1509 | - |
1510 | + path = os.path.join(source_dir, 'context.sh') |
1511 | + content = util.load_file(path) |
1512 | context = parse_shell_config(content, asuser=asuser) |
1513 | except util.ProcessExecutionError as e: |
1514 | raise BrokenContextDiskDir("Error processing context.sh: %s" % (e)) |
1515 | @@ -426,14 +426,14 @@ |
1516 | context.get('USER_DATA_ENCODING')) |
1517 | if encoding == "base64": |
1518 | try: |
1519 | - results['userdata'] = base64.b64decode(results['userdata']) |
1520 | + results['userdata'] = util.b64d(results['userdata']) |
1521 | except TypeError: |
1522 | LOG.warn("Failed base64 decoding of userdata") |
1523 | |
1524 | # generate static /etc/network/interfaces |
1525 | # only if there are any required context variables |
1526 | # http://opennebula.org/documentation:rel3.8:cong#network_configuration |
1527 | - for k in context.keys(): |
1528 | + for k in context: |
1529 | if re.match(r'^ETH\d+_IP$', k): |
1530 | (out, _) = util.subp(['/sbin/ip', 'link']) |
1531 | net = OpenNebulaNetwork(out, context) |
1532 | |
1533 | === modified file 'cloudinit/sources/DataSourceSmartOS.py' |
1534 | --- cloudinit/sources/DataSourceSmartOS.py 2015-01-14 19:24:09 +0000 |
1535 | +++ cloudinit/sources/DataSourceSmartOS.py 2015-01-27 20:16:43 +0000 |
1536 | @@ -30,12 +30,13 @@ |
1537 | # Comments with "@datadictionary" are snippets of the definition |
1538 | |
1539 | import base64 |
1540 | +import binascii |
1541 | +import os |
1542 | +import serial |
1543 | + |
1544 | from cloudinit import log as logging |
1545 | from cloudinit import sources |
1546 | from cloudinit import util |
1547 | -import os |
1548 | -import os.path |
1549 | -import serial |
1550 | |
1551 | |
1552 | LOG = logging.getLogger(__name__) |
1553 | @@ -201,7 +202,7 @@ |
1554 | if b64_all is not None: |
1555 | self.b64_all = util.is_true(b64_all) |
1556 | |
1557 | - for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems(): |
1558 | + for ci_noun, attribute in SMARTOS_ATTRIB_MAP.items(): |
1559 | smartos_noun, strip = attribute |
1560 | md[ci_noun] = self.query(smartos_noun, strip=strip) |
1561 | |
1562 | @@ -218,11 +219,12 @@ |
1563 | user_script = os.path.join(data_d, 'user-script') |
1564 | u_script_l = "%s/user-script" % LEGACY_USER_D |
1565 | write_boot_content(md.get('user-script'), content_f=user_script, |
1566 | - link=u_script_l, shebang=True, mode=0700) |
1567 | + link=u_script_l, shebang=True, mode=0o700) |
1568 | |
1569 | operator_script = os.path.join(data_d, 'operator-script') |
1570 | write_boot_content(md.get('operator-script'), |
1571 | - content_f=operator_script, shebang=False, mode=0700) |
1572 | + content_f=operator_script, shebang=False, |
1573 | + mode=0o700) |
1574 | |
1575 | # @datadictionary: This key has no defined format, but its value |
1576 | # is written to the file /var/db/mdata-user-data on each boot prior |
1577 | @@ -349,8 +351,9 @@ |
1578 | |
1579 | if b64: |
1580 | try: |
1581 | - return base64.b64decode(resp) |
1582 | - except TypeError: |
1583 | + return util.b64d(resp) |
1584 | + # Bogus input produces different errors in Python 2 and 3; catch both. |
1585 | + except (TypeError, binascii.Error): |
1586 | LOG.warn("Failed base64 decoding key '%s'", noun) |
1587 | return resp |
1588 | |
1589 | @@ -368,7 +371,7 @@ |
1590 | |
1591 | |
1592 | def write_boot_content(content, content_f, link=None, shebang=False, |
1593 | - mode=0400): |
1594 | + mode=0o400): |
1595 | """ |
1596 | Write the content to content_f. Under the following rules: |
1597 | 1. If no content, remove the file |
1598 | |
1599 | === modified file 'cloudinit/sources/__init__.py' |
1600 | --- cloudinit/sources/__init__.py 2014-09-03 18:46:20 +0000 |
1601 | +++ cloudinit/sources/__init__.py 2015-01-27 20:16:43 +0000 |
1602 | @@ -23,6 +23,8 @@ |
1603 | import abc |
1604 | import os |
1605 | |
1606 | +import six |
1607 | + |
1608 | from cloudinit import importer |
1609 | from cloudinit import log as logging |
1610 | from cloudinit import type_utils |
1611 | @@ -130,7 +132,7 @@ |
1612 | # we want to return the correct value for what will actually |
1613 | # exist in this instance |
1614 | mappings = {"sd": ("vd", "xvd", "vtb")} |
1615 | - for (nfrom, tlist) in mappings.iteritems(): |
1616 | + for (nfrom, tlist) in mappings.items(): |
1617 | if not short_name.startswith(nfrom): |
1618 | continue |
1619 | for nto in tlist: |
1620 | @@ -218,18 +220,18 @@ |
1621 | if not pubkey_data: |
1622 | return keys |
1623 | |
1624 | - if isinstance(pubkey_data, (basestring, str)): |
1625 | + if isinstance(pubkey_data, six.string_types): |
1626 | return str(pubkey_data).splitlines() |
1627 | |
1628 | if isinstance(pubkey_data, (list, set)): |
1629 | return list(pubkey_data) |
1630 | |
1631 | if isinstance(pubkey_data, (dict)): |
1632 | - for (_keyname, klist) in pubkey_data.iteritems(): |
1633 | + for (_keyname, klist) in pubkey_data.items(): |
1634 | # lp:506332 uec metadata service responds with |
1635 | # data that makes boto populate a string for 'klist' rather |
1636 | # than a list. |
1637 | - if isinstance(klist, (str, basestring)): |
1638 | + if isinstance(klist, six.string_types): |
1639 | klist = [klist] |
1640 | if isinstance(klist, (list, set)): |
1641 | for pkey in klist: |
1642 | |
1643 | === modified file 'cloudinit/sources/helpers/openstack.py' |
1644 | --- cloudinit/sources/helpers/openstack.py 2014-09-11 14:41:10 +0000 |
1645 | +++ cloudinit/sources/helpers/openstack.py 2015-01-27 20:16:43 +0000 |
1646 | @@ -24,6 +24,8 @@ |
1647 | import functools |
1648 | import os |
1649 | |
1650 | +import six |
1651 | + |
1652 | from cloudinit import ec2_utils |
1653 | from cloudinit import log as logging |
1654 | from cloudinit import sources |
1655 | @@ -205,7 +207,7 @@ |
1656 | """ |
1657 | |
1658 | load_json_anytype = functools.partial( |
1659 | - util.load_json, root_types=(dict, basestring, list)) |
1660 | + util.load_json, root_types=(dict, list) + six.string_types) |
1661 | |
1662 | def datafiles(version): |
1663 | files = {} |
1664 | @@ -234,7 +236,7 @@ |
1665 | 'version': 2, |
1666 | } |
1667 | data = datafiles(self._find_working_version()) |
1668 | - for (name, (path, required, translator)) in data.iteritems(): |
1669 | + for (name, (path, required, translator)) in data.items(): |
1670 | path = self._path_join(self.base_path, path) |
1671 | data = None |
1672 | found = False |
1673 | @@ -364,7 +366,7 @@ |
1674 | raise NonReadable("%s: no files found" % (self.base_path)) |
1675 | |
1676 | md = {} |
1677 | - for (name, (key, translator, default)) in FILES_V1.iteritems(): |
1678 | + for (name, (key, translator, default)) in FILES_V1.items(): |
1679 | if name in found: |
1680 | path = found[name] |
1681 | try: |
1682 | @@ -478,7 +480,7 @@ |
1683 | """ |
1684 | if not data: |
1685 | return None |
1686 | - if isinstance(data, (str, unicode, basestring)): |
1687 | + if isinstance(data, six.string_types): |
1688 | return data |
1689 | if isinstance(data, list): |
1690 | return copy.deepcopy(data) |
1691 | |
1692 | === modified file 'cloudinit/ssh_util.py' |
1693 | --- cloudinit/ssh_util.py 2014-11-12 13:52:28 +0000 |
1694 | +++ cloudinit/ssh_util.py 2015-01-27 20:16:43 +0000 |
1695 | @@ -239,7 +239,7 @@ |
1696 | # Make sure the users .ssh dir is setup accordingly |
1697 | (ssh_dir, pwent) = users_ssh_info(username) |
1698 | if not os.path.isdir(ssh_dir): |
1699 | - util.ensure_dir(ssh_dir, mode=0700) |
1700 | + util.ensure_dir(ssh_dir, mode=0o700) |
1701 | util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid) |
1702 | |
1703 | # Turn the 'update' keys given into actual entries |
1704 | @@ -252,8 +252,8 @@ |
1705 | (auth_key_fn, auth_key_entries) = extract_authorized_keys(username) |
1706 | with util.SeLinuxGuard(ssh_dir, recursive=True): |
1707 | content = update_authorized_keys(auth_key_entries, key_entries) |
1708 | - util.ensure_dir(os.path.dirname(auth_key_fn), mode=0700) |
1709 | - util.write_file(auth_key_fn, content, mode=0600) |
1710 | + util.ensure_dir(os.path.dirname(auth_key_fn), mode=0o700) |
1711 | + util.write_file(auth_key_fn, content, mode=0o600) |
1712 | util.chownbyid(auth_key_fn, pwent.pw_uid, pwent.pw_gid) |
1713 | |
1714 | |
1715 | |
1716 | === modified file 'cloudinit/stages.py' |
1717 | --- cloudinit/stages.py 2014-09-02 20:31:18 +0000 |
1718 | +++ cloudinit/stages.py 2015-01-27 20:16:43 +0000 |
1719 | @@ -20,12 +20,13 @@ |
1720 | # You should have received a copy of the GNU General Public License |
1721 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1722 | |
1723 | -import cPickle as pickle |
1724 | - |
1725 | import copy |
1726 | import os |
1727 | import sys |
1728 | |
1729 | +import six |
1730 | +from six.moves import cPickle as pickle |
1731 | + |
1732 | from cloudinit.settings import (PER_INSTANCE, FREQUENCIES, CLOUD_CONFIG) |
1733 | |
1734 | from cloudinit import handlers |
1735 | @@ -202,7 +203,7 @@ |
1736 | util.logexc(LOG, "Failed pickling datasource %s", self.datasource) |
1737 | return False |
1738 | try: |
1739 | - util.write_file(pickled_fn, pk_contents, mode=0400) |
1740 | + util.write_file(pickled_fn, pk_contents, mode=0o400) |
1741 | except Exception: |
1742 | util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn) |
1743 | return False |
1744 | @@ -324,15 +325,15 @@ |
1745 | |
1746 | def _store_userdata(self): |
1747 | raw_ud = "%s" % (self.datasource.get_userdata_raw()) |
1748 | - util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0600) |
1749 | + util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0o600) |
1750 | processed_ud = "%s" % (self.datasource.get_userdata()) |
1751 | - util.write_file(self._get_ipath('userdata'), processed_ud, 0600) |
1752 | + util.write_file(self._get_ipath('userdata'), processed_ud, 0o600) |
1753 | |
1754 | def _store_vendordata(self): |
1755 | raw_vd = "%s" % (self.datasource.get_vendordata_raw()) |
1756 | - util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0600) |
1757 | + util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0o600) |
1758 | processed_vd = "%s" % (self.datasource.get_vendordata()) |
1759 | - util.write_file(self._get_ipath('vendordata'), processed_vd, 0600) |
1760 | + util.write_file(self._get_ipath('vendordata'), processed_vd, 0o600) |
1761 | |
1762 | def _default_handlers(self, opts=None): |
1763 | if opts is None: |
1764 | @@ -384,7 +385,7 @@ |
1765 | if not path or not os.path.isdir(path): |
1766 | return |
1767 | potential_handlers = util.find_modules(path) |
1768 | - for (fname, mod_name) in potential_handlers.iteritems(): |
1769 | + for (fname, mod_name) in potential_handlers.items(): |
1770 | try: |
1771 | mod_locs, looked_locs = importer.find_module( |
1772 | mod_name, [''], ['list_types', 'handle_part']) |
1773 | @@ -422,7 +423,7 @@ |
1774 | |
1775 | def init_handlers(): |
1776 | # Init the handlers first |
1777 | - for (_ctype, mod) in c_handlers.iteritems(): |
1778 | + for (_ctype, mod) in c_handlers.items(): |
1779 | if mod in c_handlers.initialized: |
1780 | # Avoid initing the same module twice (if said module |
1781 | # is registered to more than one content-type). |
1782 | @@ -449,7 +450,7 @@ |
1783 | |
1784 | def finalize_handlers(): |
1785 | # Give callbacks opportunity to finalize |
1786 | - for (_ctype, mod) in c_handlers.iteritems(): |
1787 | + for (_ctype, mod) in c_handlers.items(): |
1788 | if mod not in c_handlers.initialized: |
1789 | # Said module was never inited in the first place, so lets |
1790 | # not attempt to finalize those that never got called. |
1791 | @@ -574,7 +575,7 @@ |
1792 | for item in cfg_mods: |
1793 | if not item: |
1794 | continue |
1795 | - if isinstance(item, (str, basestring)): |
1796 | + if isinstance(item, six.string_types): |
1797 | module_list.append({ |
1798 | 'mod': item.strip(), |
1799 | }) |
1800 | |
1801 | === modified file 'cloudinit/templater.py' |
1802 | --- cloudinit/templater.py 2014-11-18 15:40:57 +0000 |
1803 | +++ cloudinit/templater.py 2015-01-27 20:16:43 +0000 |
1804 | @@ -137,7 +137,7 @@ |
1805 | return renderer(content, params) |
1806 | |
1807 | |
1808 | -def render_to_file(fn, outfn, params, mode=0644): |
1809 | +def render_to_file(fn, outfn, params, mode=0o644): |
1810 | contents = render_from_file(fn, params) |
1811 | util.write_file(outfn, contents, mode=mode) |
1812 | |
1813 | |
1814 | === modified file 'cloudinit/type_utils.py' |
1815 | --- cloudinit/type_utils.py 2014-08-26 19:53:41 +0000 |
1816 | +++ cloudinit/type_utils.py 2015-01-27 20:16:43 +0000 |
1817 | @@ -22,11 +22,31 @@ |
1818 | |
1819 | import types |
1820 | |
1821 | +import six |
1822 | + |
1823 | + |
1824 | +if six.PY3: |
1825 | + _NAME_TYPES = ( |
1826 | + types.ModuleType, |
1827 | + types.FunctionType, |
1828 | + types.LambdaType, |
1829 | + type, |
1830 | + ) |
1831 | +else: |
1832 | + _NAME_TYPES = ( |
1833 | + types.TypeType, |
1834 | + types.ModuleType, |
1835 | + types.FunctionType, |
1836 | + types.LambdaType, |
1837 | + types.ClassType, |
1838 | + ) |
1839 | + |
1840 | |
1841 | def obj_name(obj): |
1842 | - if isinstance(obj, (types.TypeType, |
1843 | - types.ModuleType, |
1844 | - types.FunctionType, |
1845 | - types.LambdaType)): |
1846 | - return str(obj.__name__) |
1847 | - return obj_name(obj.__class__) |
1848 | + if isinstance(obj, _NAME_TYPES): |
1849 | + return six.text_type(obj.__name__) |
1850 | + else: |
1851 | + if not hasattr(obj, '__class__'): |
1852 | + return repr(obj) |
1853 | + else: |
1854 | + return obj_name(obj.__class__) |
1855 | |
1856 | === modified file 'cloudinit/url_helper.py' |
1857 | --- cloudinit/url_helper.py 2014-08-26 19:53:41 +0000 |
1858 | +++ cloudinit/url_helper.py 2015-01-27 20:16:43 +0000 |
1859 | @@ -20,21 +20,29 @@ |
1860 | # You should have received a copy of the GNU General Public License |
1861 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1862 | |
1863 | -import httplib |
1864 | import time |
1865 | -import urllib |
1866 | + |
1867 | +import six |
1868 | |
1869 | import requests |
1870 | from requests import exceptions |
1871 | |
1872 | -from urlparse import (urlparse, urlunparse) |
1873 | +from six.moves.urllib.parse import ( |
1874 | + urlparse, urlunparse, |
1875 | + quote as urlquote) |
1876 | |
1877 | from cloudinit import log as logging |
1878 | from cloudinit import version |
1879 | |
1880 | LOG = logging.getLogger(__name__) |
1881 | |
1882 | -NOT_FOUND = httplib.NOT_FOUND |
1883 | +if six.PY2: |
1884 | + import httplib |
1885 | + NOT_FOUND = httplib.NOT_FOUND |
1886 | +else: |
1887 | + import http.client |
1888 | + NOT_FOUND = http.client.NOT_FOUND |
1889 | + |
1890 | |
1891 | # Check if requests has ssl support (added in requests >= 0.8.8) |
1892 | SSL_ENABLED = False |
1893 | @@ -70,7 +78,7 @@ |
1894 | path = url_parsed[2] |
1895 | if path and not path.endswith("/"): |
1896 | path += "/" |
1897 | - path += urllib.quote(str(add_on), safe="/:") |
1898 | + path += urlquote(str(add_on), safe="/:") |
1899 | url_parsed[2] = path |
1900 | return urlunparse(url_parsed) |
1901 | |
1902 | @@ -111,7 +119,7 @@ |
1903 | |
1904 | @property |
1905 | def contents(self): |
1906 | - return self._response.content |
1907 | + return self._response.text |
1908 | |
1909 | @property |
1910 | def url(self): |
1911 | @@ -135,7 +143,7 @@ |
1912 | return self._response.status_code |
1913 | |
1914 | def __str__(self): |
1915 | - return self.contents |
1916 | + return self._response.text |
1917 | |
1918 | |
1919 | class UrlError(IOError): |
1920 | |
1921 | === modified file 'cloudinit/user_data.py' |
1922 | --- cloudinit/user_data.py 2014-01-24 20:29:09 +0000 |
1923 | +++ cloudinit/user_data.py 2015-01-27 20:16:43 +0000 |
1924 | @@ -29,6 +29,8 @@ |
1925 | from email.mime.nonmultipart import MIMENonMultipart |
1926 | from email.mime.text import MIMEText |
1927 | |
1928 | +import six |
1929 | + |
1930 | from cloudinit import handlers |
1931 | from cloudinit import log as logging |
1932 | from cloudinit import util |
1933 | @@ -106,7 +108,7 @@ |
1934 | |
1935 | ctype = None |
1936 | ctype_orig = part.get_content_type() |
1937 | - payload = part.get_payload(decode=True) |
1938 | + payload = util.fully_decoded_payload(part) |
1939 | was_compressed = False |
1940 | |
1941 | # When the message states it is of a gzipped content type ensure |
1942 | @@ -235,7 +237,7 @@ |
1943 | resp = util.read_file_or_url(include_url, |
1944 | ssl_details=self.ssl_details) |
1945 | if include_once_on and resp.ok(): |
1946 | - util.write_file(include_once_fn, str(resp), mode=0600) |
1947 | + util.write_file(include_once_fn, resp, mode=0o600) |
1948 | if resp.ok(): |
1949 | content = str(resp) |
1950 | else: |
1951 | @@ -256,7 +258,7 @@ |
1952 | # filename and type not be present |
1953 | # or |
1954 | # scalar(payload) |
1955 | - if isinstance(ent, (str, basestring)): |
1956 | + if isinstance(ent, six.string_types): |
1957 | ent = {'content': ent} |
1958 | if not isinstance(ent, (dict)): |
1959 | # TODO(harlowja) raise? |
1960 | @@ -337,7 +339,7 @@ |
1961 | data = util.decomp_gzip(raw_data) |
1962 | if "mime-version:" in data[0:4096].lower(): |
1963 | msg = email.message_from_string(data) |
1964 | - for (key, val) in headers.iteritems(): |
1965 | + for (key, val) in headers.items(): |
1966 | _replace_header(msg, key, val) |
1967 | else: |
1968 | mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE) |
1969 | |
1970 | === modified file 'cloudinit/util.py' |
1971 | --- cloudinit/util.py 2015-01-21 22:42:55 +0000 |
1972 | +++ cloudinit/util.py 2015-01-27 20:16:43 +0000 |
1973 | @@ -20,8 +20,6 @@ |
1974 | # You should have received a copy of the GNU General Public License |
1975 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
1976 | |
1977 | -from StringIO import StringIO |
1978 | - |
1979 | import contextlib |
1980 | import copy as obj_copy |
1981 | import ctypes |
1982 | @@ -45,8 +43,11 @@ |
1983 | import sys |
1984 | import tempfile |
1985 | import time |
1986 | -import urlparse |
1987 | - |
1988 | + |
1989 | +from base64 import b64decode, b64encode |
1990 | +from six.moves.urllib import parse as urlparse |
1991 | + |
1992 | +import six |
1993 | import yaml |
1994 | |
1995 | from cloudinit import importer |
1996 | @@ -69,8 +70,60 @@ |
1997 | } |
1998 | FN_ALLOWED = ('_-.()' + string.digits + string.ascii_letters) |
1999 | |
2000 | +TRUE_STRINGS = ('true', '1', 'on', 'yes') |
2001 | +FALSE_STRINGS = ('off', '0', 'no', 'false') |
2002 | + |
2003 | + |
2004 | # Helper utils to see if running in a container |
2005 | -CONTAINER_TESTS = ['running-in-container', 'lxc-is-container'] |
2006 | +CONTAINER_TESTS = ('running-in-container', 'lxc-is-container') |
2007 | + |
2008 | + |
2009 | +def decode_binary(blob, encoding='utf-8'): |
2010 | + # Converts a binary type into a text type using given encoding. |
2011 | + if isinstance(blob, six.text_type): |
2012 | + return blob |
2013 | + return blob.decode(encoding) |
2014 | + |
2015 | + |
2016 | +def encode_text(text, encoding='utf-8'): |
2017 | + # Converts a text string into a binary type using given encoding. |
2018 | + if isinstance(text, six.binary_type): |
2019 | + return text |
2020 | + return text.encode(encoding) |
2021 | + |
2022 | + |
2023 | +def b64d(source): |
2024 | + # Base64 decode some data, accepting bytes or unicode/str, and returning |
2025 | + # str/unicode if the result is utf-8 compatible, otherwise returning bytes. |
2026 | + decoded = b64decode(source) |
2027 | + if isinstance(decoded, bytes): |
2028 | + try: |
2029 | + return decoded.decode('utf-8') |
2030 | + except UnicodeDecodeError: |
2031 | + return decoded |
2032 | + |
2033 | +def b64e(source): |
2034 | + # Base64 encode some data, accepting bytes or unicode/str, and returning |
2035 | + # str/unicode if the result is utf-8 compatible, otherwise returning bytes. |
2036 | + if not isinstance(source, bytes): |
2037 | + source = source.encode('utf-8') |
2038 | + return b64encode(source).decode('utf-8') |
2039 | + |
2040 | + |
2041 | +def fully_decoded_payload(part): |
2042 | + # In Python 3, decoding the payload will ironically hand us a bytes object. |
2043 | + # 'decode' means to decode according to Content-Transfer-Encoding, not |
2044 | + # according to any charset in the Content-Type. So, if we end up with |
2045 | + # bytes, first try to decode to str via CT charset, and failing that, try |
2046 | + # utf-8 using surrogate escapes. |
2047 | + cte_payload = part.get_payload(decode=True) |
2048 | + if ( six.PY3 and |
2049 | + part.get_content_maintype() == 'text' and |
2050 | + isinstance(cte_payload, bytes)): |
2051 | + charset = part.get_charset() or 'utf-8' |
2052 | + return cte_payload.decode(charset, errors='surrogateescape') |
2053 | + return cte_payload |
2054 | + |
2055 | |
2056 | # Path for DMI Data |
2057 | DMI_SYS_PATH = "/sys/class/dmi/id" |
2058 | @@ -98,7 +151,7 @@ |
2059 | else: |
2060 | self.description = description |
2061 | |
2062 | - if not isinstance(exit_code, (long, int)): |
2063 | + if not isinstance(exit_code, six.integer_types): |
2064 | self.exit_code = '-' |
2065 | else: |
2066 | self.exit_code = exit_code |
2067 | @@ -127,6 +180,9 @@ |
2068 | 'reason': self.reason, |
2069 | } |
2070 | IOError.__init__(self, message) |
2071 | + # For backward compatibility with Python 2. |
2072 | + if not hasattr(self, 'message'): |
2073 | + self.message = message |
2074 | |
2075 | |
2076 | class SeLinuxGuard(object): |
2077 | @@ -154,7 +210,8 @@ |
2078 | |
2079 | path = os.path.realpath(self.path) |
2080 | # path should be a string, not unicode |
2081 | - path = str(path) |
2082 | + if six.PY2: |
2083 | + path = str(path) |
2084 | try: |
2085 | stats = os.lstat(path) |
2086 | self.selinux.matchpathcon(path, stats[stat.ST_MODE]) |
2087 | @@ -212,10 +269,10 @@ |
2088 | def is_true(val, addons=None): |
2089 | if isinstance(val, (bool)): |
2090 | return val is True |
2091 | - check_set = ['true', '1', 'on', 'yes'] |
2092 | + check_set = TRUE_STRINGS |
2093 | if addons: |
2094 | - check_set = check_set + addons |
2095 | - if str(val).lower().strip() in check_set: |
2096 | + check_set = list(check_set) + addons |
2097 | + if six.text_type(val).lower().strip() in check_set: |
2098 | return True |
2099 | return False |
2100 | |
2101 | @@ -223,10 +280,10 @@ |
2102 | def is_false(val, addons=None): |
2103 | if isinstance(val, (bool)): |
2104 | return val is False |
2105 | - check_set = ['off', '0', 'no', 'false'] |
2106 | + check_set = FALSE_STRINGS |
2107 | if addons: |
2108 | - check_set = check_set + addons |
2109 | - if str(val).lower().strip() in check_set: |
2110 | + check_set = list(check_set) + addons |
2111 | + if six.text_type(val).lower().strip() in check_set: |
2112 | return True |
2113 | return False |
2114 | |
2115 | @@ -244,7 +301,7 @@ |
2116 | |
2117 | def rand_str(strlen=32, select_from=None): |
2118 | if not select_from: |
2119 | - select_from = string.letters + string.digits |
2120 | + select_from = string.ascii_letters + string.digits |
2121 | return "".join([random.choice(select_from) for _x in range(0, strlen)]) |
2122 | |
2123 | |
2124 | @@ -276,7 +333,7 @@ |
2125 | def uniq_merge(*lists): |
2126 | combined_list = [] |
2127 | for a_list in lists: |
2128 | - if isinstance(a_list, (str, basestring)): |
2129 | + if isinstance(a_list, six.string_types): |
2130 | a_list = a_list.strip().split(",") |
2131 | # Kickout the empty ones |
2132 | a_list = [a for a in a_list if len(a)] |
2133 | @@ -285,7 +342,7 @@ |
2134 | |
2135 | |
2136 | def clean_filename(fn): |
2137 | - for (k, v) in FN_REPLACEMENTS.iteritems(): |
2138 | + for (k, v) in FN_REPLACEMENTS.items(): |
2139 | fn = fn.replace(k, v) |
2140 | removals = [] |
2141 | for k in fn: |
2142 | @@ -299,14 +356,14 @@ |
2143 | |
2144 | def decomp_gzip(data, quiet=True): |
2145 | try: |
2146 | - buf = StringIO(str(data)) |
2147 | + buf = six.BytesIO(encode_text(data)) |
2148 | with contextlib.closing(gzip.GzipFile(None, "rb", 1, buf)) as gh: |
2149 | - return gh.read() |
2150 | + return decode_binary(gh.read()) |
2151 | except Exception as e: |
2152 | if quiet: |
2153 | return data |
2154 | else: |
2155 | - raise DecompressionError(str(e)) |
2156 | + raise DecompressionError(six.text_type(e)) |
2157 | |
2158 | |
2159 | def extract_usergroup(ug_pair): |
2160 | @@ -365,7 +422,7 @@ |
2161 | |
2162 | |
2163 | def load_json(text, root_types=(dict,)): |
2164 | - decoded = json.loads(text) |
2165 | + decoded = json.loads(decode_binary(text)) |
2166 | if not isinstance(decoded, tuple(root_types)): |
2167 | expected_types = ", ".join([str(t) for t in root_types]) |
2168 | raise TypeError("(%s) root types expected, got %s instead" |
2169 | @@ -397,7 +454,7 @@ |
2170 | if key not in yobj: |
2171 | return default |
2172 | val = yobj[key] |
2173 | - if not isinstance(val, (str, basestring)): |
2174 | + if not isinstance(val, six.string_types): |
2175 | val = str(val) |
2176 | return val |
2177 | |
2178 | @@ -436,7 +493,7 @@ |
2179 | if isinstance(val, (list)): |
2180 | cval = [v for v in val] |
2181 | return cval |
2182 | - if not isinstance(val, (basestring)): |
2183 | + if not isinstance(val, six.string_types): |
2184 | val = str(val) |
2185 | return [val] |
2186 | |
2187 | @@ -711,10 +768,10 @@ |
2188 | |
2189 | def load_yaml(blob, default=None, allowed=(dict,)): |
2190 | loaded = default |
2191 | + blob = decode_binary(blob) |
2192 | try: |
2193 | - blob = str(blob) |
2194 | - LOG.debug(("Attempting to load yaml from string " |
2195 | - "of length %s with allowed root types %s"), |
2196 | + LOG.debug("Attempting to load yaml from string " |
2197 | + "of length %s with allowed root types %s", |
2198 | len(blob), allowed) |
2199 | converted = safeyaml.load(blob) |
2200 | if not isinstance(converted, allowed): |
2201 | @@ -749,14 +806,12 @@ |
2202 | md_resp = read_file_or_url(md_url, timeout, retries, file_retries) |
2203 | md = None |
2204 | if md_resp.ok(): |
2205 | - md_str = str(md_resp) |
2206 | - md = load_yaml(md_str, default={}) |
2207 | + md = load_yaml(md_resp.contents, default={}) |
2208 | |
2209 | ud_resp = read_file_or_url(ud_url, timeout, retries, file_retries) |
2210 | ud = None |
2211 | if ud_resp.ok(): |
2212 | - ud_str = str(ud_resp) |
2213 | - ud = ud_str |
2214 | + ud = ud_resp.contents |
2215 | |
2216 | return (md, ud) |
2217 | |
2218 | @@ -787,7 +842,7 @@ |
2219 | if "conf_d" in cfg: |
2220 | confd = cfg['conf_d'] |
2221 | if confd: |
2222 | - if not isinstance(confd, (str, basestring)): |
2223 | + if not isinstance(confd, six.string_types): |
2224 | raise TypeError(("Config file %s contains 'conf_d' " |
2225 | "with non-string type %s") % |
2226 | (cfgfile, type_utils.obj_name(confd))) |
2227 | @@ -924,8 +979,8 @@ |
2228 | return (None, None, None) |
2229 | |
2230 | resp = read_file_or_url(url) |
2231 | - if resp.contents.startswith(starts) and resp.ok(): |
2232 | - return (key, url, str(resp)) |
2233 | + if resp.ok() and resp.contents.startswith(starts): |
2234 | + return (key, url, resp.contents) |
2235 | |
2236 | return (key, url, None) |
2237 | |
2238 | @@ -1079,9 +1134,9 @@ |
2239 | return out_list |
2240 | |
2241 | |
2242 | -def load_file(fname, read_cb=None, quiet=False): |
2243 | +def load_file(fname, read_cb=None, quiet=False, decode=True): |
2244 | LOG.debug("Reading from %s (quiet=%s)", fname, quiet) |
2245 | - ofh = StringIO() |
2246 | + ofh = six.BytesIO() |
2247 | try: |
2248 | with open(fname, 'rb') as ifh: |
2249 | pipe_in_out(ifh, ofh, chunk_cb=read_cb) |
2250 | @@ -1092,7 +1147,10 @@ |
2251 | raise |
2252 | contents = ofh.getvalue() |
2253 | LOG.debug("Read %s bytes from %s", len(contents), fname) |
2254 | - return contents |
2255 | + if decode: |
2256 | + return decode_binary(contents) |
2257 | + else: |
2258 | + return contents |
2259 | |
2260 | |
2261 | def get_cmdline(): |
2262 | @@ -1110,7 +1168,7 @@ |
2263 | bytes_piped = 0 |
2264 | while True: |
2265 | data = in_fh.read(chunk_size) |
2266 | - if data == '': |
2267 | + if len(data) == 0: |
2268 | break |
2269 | else: |
2270 | out_fh.write(data) |
2271 | @@ -1216,13 +1274,20 @@ |
2272 | # coming out to a non-debug stream |
2273 | if msg: |
2274 | log.warn(msg, *args) |
2275 | - # Debug gets the full trace |
2276 | - log.debug(msg, exc_info=1, *args) |
2277 | + # Debug gets the full trace. However, nose has a bug whereby its |
2278 | + # logcapture plugin doesn't properly handle the case where there is no |
2279 | + # actual exception. To avoid tracebacks during the test suite then, we'll |
2280 | + # do the actual exc_info extraction here, and if there is no exception in |
2281 | + # flight, we'll just pass in None. |
2282 | + exc_info = sys.exc_info() |
2283 | + if exc_info == (None, None, None): |
2284 | + exc_info = None |
2285 | + log.debug(msg, exc_info=exc_info, *args) |
2286 | |
2287 | |
2288 | def hash_blob(blob, routine, mlen=None): |
2289 | hasher = hashlib.new(routine) |
2290 | - hasher.update(blob) |
2291 | + hasher.update(encode_text(blob)) |
2292 | digest = hasher.hexdigest() |
2293 | # Don't get to long now |
2294 | if mlen is not None: |
2295 | @@ -1253,7 +1318,7 @@ |
2296 | os.rename(src, dest) |
2297 | |
2298 | |
2299 | -def ensure_dirs(dirlist, mode=0755): |
2300 | +def ensure_dirs(dirlist, mode=0o755): |
2301 | for d in dirlist: |
2302 | ensure_dir(d, mode) |
2303 | |
2304 | @@ -1267,7 +1332,7 @@ |
2305 | return |
2306 | try: |
2307 | if key and content: |
2308 | - write_file(target_fn, content, mode=0600) |
2309 | + write_file(target_fn, content, mode=0o600) |
2310 | LOG.debug(("Wrote to %s with contents of command line" |
2311 | " url %s (len=%s)"), target_fn, url, len(content)) |
2312 | elif key and not content: |
2313 | @@ -1283,8 +1348,7 @@ |
2314 | indent=4, |
2315 | explicit_start=explicit_start, |
2316 | explicit_end=explicit_end, |
2317 | - default_flow_style=False, |
2318 | - allow_unicode=True) |
2319 | + default_flow_style=False) |
2320 | |
2321 | |
2322 | def ensure_dir(path, mode=None): |
2323 | @@ -1492,7 +1556,7 @@ |
2324 | write_file(path, content, omode="ab", mode=None) |
2325 | |
2326 | |
2327 | -def ensure_file(path, mode=0644): |
2328 | +def ensure_file(path, mode=0o644): |
2329 | write_file(path, content='', omode="ab", mode=mode) |
2330 | |
2331 | |
2332 | @@ -1510,7 +1574,7 @@ |
2333 | os.chmod(path, real_mode) |
2334 | |
2335 | |
2336 | -def write_file(filename, content, mode=0644, omode="wb"): |
2337 | +def write_file(filename, content, mode=0o644, omode="wb"): |
2338 | """ |
2339 | Writes a file with the given content and sets the file mode as specified. |
2340 | Resotres the SELinux context if possible. |
2341 | @@ -1518,11 +1582,17 @@ |
2342 | @param filename: The full path of the file to write. |
2343 | @param content: The content to write to the file. |
2344 | @param mode: The filesystem mode to set on the file. |
2345 | - @param omode: The open mode used when opening the file (r, rb, a, etc.) |
2346 | + @param omode: The open mode used when opening the file (w, wb, a, etc.) |
2347 | """ |
2348 | ensure_dir(os.path.dirname(filename)) |
2349 | - LOG.debug("Writing to %s - %s: [%s] %s bytes", |
2350 | - filename, omode, mode, len(content)) |
2351 | + if 'b' in omode.lower(): |
2352 | + content = encode_text(content) |
2353 | + write_type = 'bytes' |
2354 | + else: |
2355 | + content = decode_binary(content) |
2356 | + write_type = 'characters' |
2357 | + LOG.debug("Writing to %s - %s: [%s] %s %s", |
2358 | + filename, omode, mode, len(content), write_type) |
2359 | with SeLinuxGuard(path=filename): |
2360 | with open(filename, omode) as fh: |
2361 | fh.write(content) |
2362 | @@ -1564,9 +1634,12 @@ |
2363 | stdout = subprocess.PIPE |
2364 | stderr = subprocess.PIPE |
2365 | stdin = subprocess.PIPE |
2366 | - sp = subprocess.Popen(args, stdout=stdout, |
2367 | - stderr=stderr, stdin=stdin, |
2368 | - env=env, shell=shell) |
2369 | + kws = dict(stdout=stdout, stderr=stderr, stdin=stdin, |
2370 | + env=env, shell=shell) |
2371 | + if six.PY3: |
2372 | + # Use this so subprocess output will be (Python 3) str, not bytes. |
2373 | + kws['universal_newlines'] = True |
2374 | + sp = subprocess.Popen(args, **kws) |
2375 | (out, err) = sp.communicate(data) |
2376 | except OSError as e: |
2377 | raise ProcessExecutionError(cmd=args, reason=e) |
2378 | @@ -1611,10 +1684,10 @@ |
2379 | if isinstance(args, list): |
2380 | fixed = [] |
2381 | for f in args: |
2382 | - fixed.append("'%s'" % (str(f).replace("'", escaped))) |
2383 | + fixed.append("'%s'" % (six.text_type(f).replace("'", escaped))) |
2384 | content = "%s%s\n" % (content, ' '.join(fixed)) |
2385 | cmds_made += 1 |
2386 | - elif isinstance(args, (str, basestring)): |
2387 | + elif isinstance(args, six.string_types): |
2388 | content = "%s%s\n" % (content, args) |
2389 | cmds_made += 1 |
2390 | else: |
2391 | @@ -1725,7 +1798,7 @@ |
2392 | |
2393 | pkglist = [] |
2394 | for pkg in pkgs: |
2395 | - if isinstance(pkg, basestring): |
2396 | + if isinstance(pkg, six.string_types): |
2397 | pkglist.append(pkg) |
2398 | continue |
2399 | |
2400 | @@ -2021,23 +2094,23 @@ |
2401 | Reads dmi data with from /sys/class/dmi/id |
2402 | """ |
2403 | |
2404 | - dmi_key = "{}/{}".format(DMI_SYS_PATH, key) |
2405 | - LOG.debug("querying dmi data {}".format(dmi_key)) |
2406 | + dmi_key = "{0}/{1}".format(DMI_SYS_PATH, key) |
2407 | + LOG.debug("querying dmi data {0}".format(dmi_key)) |
2408 | try: |
2409 | if not os.path.exists(dmi_key): |
2410 | - LOG.debug("did not find {}".format(dmi_key)) |
2411 | + LOG.debug("did not find {0}".format(dmi_key)) |
2412 | return None |
2413 | |
2414 | key_data = load_file(dmi_key) |
2415 | if not key_data: |
2416 | - LOG.debug("{} did not return any data".format(key)) |
2417 | + LOG.debug("{0} did not return any data".format(key)) |
2418 | return None |
2419 | |
2420 | - LOG.debug("dmi data {} returned {}".format(dmi_key, key_data)) |
2421 | + LOG.debug("dmi data {0} returned {0}".format(dmi_key, key_data)) |
2422 | return key_data.strip() |
2423 | |
2424 | except Exception as e: |
2425 | - logexc(LOG, "failed read of {}".format(dmi_key), e) |
2426 | + logexc(LOG, "failed read of {0}".format(dmi_key), e) |
2427 | return None |
2428 | |
2429 | |
2430 | @@ -2049,10 +2122,10 @@ |
2431 | try: |
2432 | cmd = [dmidecode_path, "--string", key] |
2433 | (result, _err) = subp(cmd) |
2434 | - LOG.debug("dmidecode returned '{}' for '{}'".format(result, key)) |
2435 | + LOG.debug("dmidecode returned '{0}' for '{0}'".format(result, key)) |
2436 | return result |
2437 | - except OSError, _err: |
2438 | - LOG.debug('failed dmidecode cmd: {}\n{}'.format(cmd, _err.message)) |
2439 | + except OSError as _err: |
2440 | + LOG.debug('failed dmidecode cmd: {0}\n{0}'.format(cmd, _err.message)) |
2441 | return None |
2442 | |
2443 | |
2444 | @@ -2068,7 +2141,7 @@ |
2445 | if dmidecode_path: |
2446 | return _call_dmidecode(key, dmidecode_path) |
2447 | |
2448 | - LOG.warn("did not find either path {} or dmidecode command".format( |
2449 | + LOG.warn("did not find either path {0} or dmidecode command".format( |
2450 | DMI_SYS_PATH)) |
2451 | |
2452 | return None |
2453 | |
2454 | === modified file 'packages/bddeb' |
2455 | --- packages/bddeb 2014-07-24 12:49:42 +0000 |
2456 | +++ packages/bddeb 2015-01-27 20:16:43 +0000 |
2457 | @@ -38,6 +38,7 @@ |
2458 | 'pyserial': 'python-serial', |
2459 | 'pyyaml': 'python-yaml', |
2460 | 'requests': 'python-requests', |
2461 | + 'six': 'python-six', |
2462 | } |
2463 | DEBUILD_ARGS = ["-S", "-d"] |
2464 | |
2465 | |
2466 | === modified file 'packages/brpm' |
2467 | --- packages/brpm 2014-10-13 22:36:30 +0000 |
2468 | +++ packages/brpm 2015-01-27 20:16:43 +0000 |
2469 | @@ -45,6 +45,7 @@ |
2470 | 'pyserial': 'pyserial', |
2471 | 'pyyaml': 'PyYAML', |
2472 | 'requests': 'python-requests', |
2473 | + 'six': 'python-six', |
2474 | }, |
2475 | 'suse': { |
2476 | 'argparse': 'python-argparse', |
2477 | @@ -56,6 +57,7 @@ |
2478 | 'pyserial': 'python-pyserial', |
2479 | 'pyyaml': 'python-yaml', |
2480 | 'requests': 'python-requests', |
2481 | + 'six': 'python-six', |
2482 | } |
2483 | } |
2484 | |
2485 | |
2486 | === modified file 'requirements.txt' |
2487 | --- requirements.txt 2014-03-05 23:05:59 +0000 |
2488 | +++ requirements.txt 2015-01-27 20:16:43 +0000 |
2489 | @@ -1,7 +1,6 @@ |
2490 | # Pypi requirements for cloud-init to work |
2491 | |
2492 | # Used for untemplating any files or strings with parameters. |
2493 | -cheetah |
2494 | jinja2 |
2495 | |
2496 | # This is used for any pretty printing of tabular data. |
2497 | @@ -9,7 +8,7 @@ |
2498 | |
2499 | # This one is currently only used by the MAAS datasource. If that |
2500 | # datasource is removed, this is no longer needed |
2501 | -oauth |
2502 | +oauthlib |
2503 | |
2504 | # This one is currently used only by the CloudSigma and SmartOS datasources. |
2505 | # If these datasources are removed, this is no longer needed |
2506 | @@ -32,3 +31,6 @@ |
2507 | |
2508 | # For patching pieces of cloud-config together |
2509 | jsonpatch |
2510 | + |
2511 | +# For Python 2/3 compatibility |
2512 | +six |
2513 | |
2514 | === modified file 'setup.py' |
2515 | --- setup.py 2015-01-06 17:02:38 +0000 |
2516 | +++ setup.py 2015-01-27 20:16:43 +0000 |
2517 | @@ -45,7 +45,8 @@ |
2518 | stdout = None |
2519 | stderr = None |
2520 | sp = subprocess.Popen(cmd, stdout=stdout, |
2521 | - stderr=stderr, stdin=None) |
2522 | + stderr=stderr, stdin=None, |
2523 | + universal_newlines=True) |
2524 | (out, err) = sp.communicate() |
2525 | ret = sp.returncode |
2526 | if ret not in [0]: |
2527 | @@ -144,9 +145,9 @@ |
2528 | raise DistutilsArgError( |
2529 | "Invalid --init-system: %s" % (','.join(bad))) |
2530 | |
2531 | - for sys in self.init_system: |
2532 | + for system in self.init_system: |
2533 | self.distribution.data_files.append( |
2534 | - (INITSYS_ROOTS[sys], INITSYS_FILES[sys])) |
2535 | + (INITSYS_ROOTS[system], INITSYS_FILES[system])) |
2536 | # Force that command to reinitalize (with new file list) |
2537 | self.distribution.reinitialize_command('install_data', True) |
2538 | |
2539 | @@ -174,6 +175,11 @@ |
2540 | } |
2541 | |
2542 | |
2543 | +requirements = read_requires() |
2544 | +if sys.version_info < (3,): |
2545 | + requirements.append('cheetah') |
2546 | + |
2547 | + |
2548 | setuptools.setup(name='cloud-init', |
2549 | version=get_version(), |
2550 | description='EC2 initialisation magic', |
2551 | @@ -186,6 +192,6 @@ |
2552 | ], |
2553 | license='GPLv3', |
2554 | data_files=data_files, |
2555 | - install_requires=read_requires(), |
2556 | + install_requires=requirements, |
2557 | cmdclass=cmdclass, |
2558 | ) |
2559 | |
2560 | === modified file 'templates/resolv.conf.tmpl' |
2561 | --- templates/resolv.conf.tmpl 2014-08-21 20:26:43 +0000 |
2562 | +++ templates/resolv.conf.tmpl 2015-01-27 20:16:43 +0000 |
2563 | @@ -24,7 +24,7 @@ |
2564 | {% if options or flags %} |
2565 | |
2566 | options {% for flag in flags %}{{flag}} {% endfor %} |
2567 | -{% for key, value in options.iteritems() -%} |
2568 | +{% for key, value in options.items() -%} |
2569 | {{key}}:{{value}} |
2570 | {% endfor %} |
2571 | {% endif %} |
2572 | |
2573 | === modified file 'tests/unittests/helpers.py' |
2574 | --- tests/unittests/helpers.py 2014-10-30 20:07:41 +0000 |
2575 | +++ tests/unittests/helpers.py 2015-01-27 20:16:43 +0000 |
2576 | @@ -1,17 +1,23 @@ |
2577 | import os |
2578 | import sys |
2579 | +import shutil |
2580 | +import tempfile |
2581 | import unittest |
2582 | |
2583 | -from contextlib import contextmanager |
2584 | +import six |
2585 | |
2586 | -from mocker import Mocker |
2587 | -from mocker import MockerTestCase |
2588 | +try: |
2589 | + from unittest import mock |
2590 | +except ImportError: |
2591 | + import mock |
2592 | +try: |
2593 | + from contextlib import ExitStack |
2594 | +except ImportError: |
2595 | + from contextlib2 import ExitStack |
2596 | |
2597 | from cloudinit import helpers as ch |
2598 | from cloudinit import util |
2599 | |
2600 | -import shutil |
2601 | - |
2602 | # Used for detecting different python versions |
2603 | PY2 = False |
2604 | PY26 = False |
2605 | @@ -33,8 +39,20 @@ |
2606 | PY3 = True |
2607 | |
2608 | if PY26: |
2609 | - # For now add these on, taken from python 2.7 + slightly adjusted |
2610 | + # For now add these on, taken from python 2.7 + slightly adjusted. Drop |
2611 | + # all this once Python 2.6 is dropped as a minimum requirement. |
2612 | class TestCase(unittest.TestCase): |
2613 | + def setUp(self): |
2614 | + super(TestCase, self).setUp() |
2615 | + self.__all_cleanups = ExitStack() |
2616 | + |
2617 | + def tearDown(self): |
2618 | + self.__all_cleanups.close() |
2619 | + unittest.TestCase.tearDown(self) |
2620 | + |
2621 | + def addCleanup(self, function, *args, **kws): |
2622 | + self.__all_cleanups.callback(function, *args, **kws) |
2623 | + |
2624 | def assertIs(self, expr1, expr2, msg=None): |
2625 | if expr1 is not expr2: |
2626 | standardMsg = '%r is not %r' % (expr1, expr2) |
2627 | @@ -57,10 +75,17 @@ |
2628 | standardMsg = standardMsg % (value) |
2629 | self.fail(self._formatMessage(msg, standardMsg)) |
2630 | |
2631 | + def assertIsInstance(self, obj, cls, msg=None): |
2632 | + """Same as self.assertTrue(isinstance(obj, cls)), with a nicer |
2633 | + default message.""" |
2634 | + if not isinstance(obj, cls): |
2635 | + standardMsg = '%s is not an instance of %r' % (repr(obj), cls) |
2636 | + self.fail(self._formatMessage(msg, standardMsg)) |
2637 | + |
2638 | def assertDictContainsSubset(self, expected, actual, msg=None): |
2639 | missing = [] |
2640 | mismatched = [] |
2641 | - for k, v in expected.iteritems(): |
2642 | + for k, v in expected.items(): |
2643 | if k not in actual: |
2644 | missing.append(k) |
2645 | elif actual[k] != v: |
2646 | @@ -86,17 +111,6 @@ |
2647 | pass |
2648 | |
2649 | |
2650 | -@contextmanager |
2651 | -def mocker(verify_calls=True): |
2652 | - m = Mocker() |
2653 | - try: |
2654 | - yield m |
2655 | - finally: |
2656 | - m.restore() |
2657 | - if verify_calls: |
2658 | - m.verify() |
2659 | - |
2660 | - |
2661 | # Makes the old path start |
2662 | # with new base instead of whatever |
2663 | # it previously had |
2664 | @@ -121,14 +135,19 @@ |
2665 | nam = len(n_args) |
2666 | for i in range(0, nam): |
2667 | path = args[i] |
2668 | - n_args[i] = rebase_path(path, new_base) |
2669 | + # patchOS() wraps various os and os.path functions, however in |
2670 | + # Python 3 some of these now accept file-descriptors (integers). |
2671 | + # That breaks rebase_path() so in lieu of a better solution, just |
2672 | + # don't rebase if we get a fd. |
2673 | + if isinstance(path, six.string_types): |
2674 | + n_args[i] = rebase_path(path, new_base) |
2675 | return old_func(*n_args, **kwds) |
2676 | return wrapper |
2677 | |
2678 | |
2679 | -class ResourceUsingTestCase(MockerTestCase): |
2680 | - def __init__(self, methodName="runTest"): |
2681 | - MockerTestCase.__init__(self, methodName) |
2682 | +class ResourceUsingTestCase(TestCase): |
2683 | + def setUp(self): |
2684 | + super(ResourceUsingTestCase, self).setUp() |
2685 | self.resource_path = None |
2686 | |
2687 | def resourceLocation(self, subname=None): |
2688 | @@ -156,17 +175,23 @@ |
2689 | return fh.read() |
2690 | |
2691 | def getCloudPaths(self): |
2692 | + tmpdir = tempfile.mkdtemp() |
2693 | + self.addCleanup(shutil.rmtree, tmpdir) |
2694 | cp = ch.Paths({ |
2695 | - 'cloud_dir': self.makeDir(), |
2696 | + 'cloud_dir': tmpdir, |
2697 | 'templates_dir': self.resourceLocation(), |
2698 | }) |
2699 | return cp |
2700 | |
2701 | |
2702 | class FilesystemMockingTestCase(ResourceUsingTestCase): |
2703 | - def __init__(self, methodName="runTest"): |
2704 | - ResourceUsingTestCase.__init__(self, methodName) |
2705 | - self.patched_funcs = [] |
2706 | + def setUp(self): |
2707 | + super(FilesystemMockingTestCase, self).setUp() |
2708 | + self.patched_funcs = ExitStack() |
2709 | + |
2710 | + def tearDown(self): |
2711 | + self.patched_funcs.close() |
2712 | + ResourceUsingTestCase.tearDown(self) |
2713 | |
2714 | def replicateTestRoot(self, example_root, target_root): |
2715 | real_root = self.resourceLocation() |
2716 | @@ -180,15 +205,6 @@ |
2717 | make_path = util.abs_join(make_path, f) |
2718 | shutil.copy(real_path, make_path) |
2719 | |
2720 | - def tearDown(self): |
2721 | - self.restore() |
2722 | - ResourceUsingTestCase.tearDown(self) |
2723 | - |
2724 | - def restore(self): |
2725 | - for (mod, f, func) in self.patched_funcs: |
2726 | - setattr(mod, f, func) |
2727 | - self.patched_funcs = [] |
2728 | - |
2729 | def patchUtils(self, new_root): |
2730 | patch_funcs = { |
2731 | util: [('write_file', 1), |
2732 | @@ -205,8 +221,8 @@ |
2733 | for (f, am) in funcs: |
2734 | func = getattr(mod, f) |
2735 | trap_func = retarget_many_wrapper(new_root, am, func) |
2736 | - setattr(mod, f, trap_func) |
2737 | - self.patched_funcs.append((mod, f, func)) |
2738 | + self.patched_funcs.enter_context( |
2739 | + mock.patch.object(mod, f, trap_func)) |
2740 | |
2741 | # Handle subprocess calls |
2742 | func = getattr(util, 'subp') |
2743 | @@ -214,16 +230,15 @@ |
2744 | def nsubp(*_args, **_kwargs): |
2745 | return ('', '') |
2746 | |
2747 | - setattr(util, 'subp', nsubp) |
2748 | - self.patched_funcs.append((util, 'subp', func)) |
2749 | + self.patched_funcs.enter_context( |
2750 | + mock.patch.object(util, 'subp', nsubp)) |
2751 | |
2752 | def null_func(*_args, **_kwargs): |
2753 | return None |
2754 | |
2755 | for f in ['chownbyid', 'chownbyname']: |
2756 | - func = getattr(util, f) |
2757 | - setattr(util, f, null_func) |
2758 | - self.patched_funcs.append((util, f, func)) |
2759 | + self.patched_funcs.enter_context( |
2760 | + mock.patch.object(util, f, null_func)) |
2761 | |
2762 | def patchOS(self, new_root): |
2763 | patch_funcs = { |
2764 | @@ -234,8 +249,8 @@ |
2765 | for f in funcs: |
2766 | func = getattr(mod, f) |
2767 | trap_func = retarget_many_wrapper(new_root, 1, func) |
2768 | - setattr(mod, f, trap_func) |
2769 | - self.patched_funcs.append((mod, f, func)) |
2770 | + self.patched_funcs.enter_context( |
2771 | + mock.patch.object(mod, f, trap_func)) |
2772 | |
2773 | |
2774 | class HttprettyTestCase(TestCase): |
2775 | @@ -256,7 +271,7 @@ |
2776 | def populate_dir(path, files): |
2777 | if not os.path.exists(path): |
2778 | os.makedirs(path) |
2779 | - for (name, content) in files.iteritems(): |
2780 | + for (name, content) in files.items(): |
2781 | with open(os.path.join(path, name), "w") as fp: |
2782 | fp.write(content) |
2783 | fp.close() |
2784 | |
2785 | === modified file 'tests/unittests/test__init__.py' |
2786 | --- tests/unittests/test__init__.py 2014-08-26 19:53:41 +0000 |
2787 | +++ tests/unittests/test__init__.py 2015-01-27 20:16:43 +0000 |
2788 | @@ -1,14 +1,25 @@ |
2789 | import os |
2790 | +import shutil |
2791 | +import tempfile |
2792 | +import unittest |
2793 | |
2794 | -from mocker import MockerTestCase, ARGS, KWARGS |
2795 | +try: |
2796 | + from unittest import mock |
2797 | +except ImportError: |
2798 | + import mock |
2799 | +try: |
2800 | + from contextlib import ExitStack |
2801 | +except ImportError: |
2802 | + from contextlib2 import ExitStack |
2803 | |
2804 | from cloudinit import handlers |
2805 | from cloudinit import helpers |
2806 | -from cloudinit import importer |
2807 | from cloudinit import settings |
2808 | from cloudinit import url_helper |
2809 | from cloudinit import util |
2810 | |
2811 | +from .helpers import TestCase |
2812 | + |
2813 | |
2814 | class FakeModule(handlers.Handler): |
2815 | def __init__(self): |
2816 | @@ -22,76 +33,73 @@ |
2817 | pass |
2818 | |
2819 | |
2820 | -class TestWalkerHandleHandler(MockerTestCase): |
2821 | +class TestWalkerHandleHandler(TestCase): |
2822 | |
2823 | def setUp(self): |
2824 | - |
2825 | - MockerTestCase.setUp(self) |
2826 | + super(TestWalkerHandleHandler, self).setUp() |
2827 | + tmpdir = tempfile.mkdtemp() |
2828 | + self.addCleanup(shutil.rmtree, tmpdir) |
2829 | |
2830 | self.data = { |
2831 | "handlercount": 0, |
2832 | "frequency": "", |
2833 | - "handlerdir": self.makeDir(), |
2834 | + "handlerdir": tmpdir, |
2835 | "handlers": helpers.ContentHandlers(), |
2836 | "data": None} |
2837 | |
2838 | self.expected_module_name = "part-handler-%03d" % ( |
2839 | self.data["handlercount"],) |
2840 | expected_file_name = "%s.py" % self.expected_module_name |
2841 | - expected_file_fullname = os.path.join(self.data["handlerdir"], |
2842 | - expected_file_name) |
2843 | + self.expected_file_fullname = os.path.join( |
2844 | + self.data["handlerdir"], expected_file_name) |
2845 | self.module_fake = FakeModule() |
2846 | self.ctype = None |
2847 | self.filename = None |
2848 | self.payload = "dummy payload" |
2849 | |
2850 | - # Mock the write_file function |
2851 | - write_file_mock = self.mocker.replace(util.write_file, |
2852 | - passthrough=False) |
2853 | - write_file_mock(expected_file_fullname, self.payload, 0600) |
2854 | + # Mock the write_file() function. We'll assert that it got called as |
2855 | + # expected in each of the individual tests. |
2856 | + resources = ExitStack() |
2857 | + self.addCleanup(resources.close) |
2858 | + self.write_file_mock = resources.enter_context( |
2859 | + mock.patch('cloudinit.util.write_file')) |
2860 | |
2861 | def test_no_errors(self): |
2862 | """Payload gets written to file and added to C{pdata}.""" |
2863 | - import_mock = self.mocker.replace(importer.import_module, |
2864 | - passthrough=False) |
2865 | - import_mock(self.expected_module_name) |
2866 | - self.mocker.result(self.module_fake) |
2867 | - self.mocker.replay() |
2868 | - |
2869 | - handlers.walker_handle_handler(self.data, self.ctype, self.filename, |
2870 | - self.payload) |
2871 | - |
2872 | - self.assertEqual(1, self.data["handlercount"]) |
2873 | + with mock.patch('cloudinit.importer.import_module', |
2874 | + return_value=self.module_fake) as mockobj: |
2875 | + handlers.walker_handle_handler(self.data, self.ctype, |
2876 | + self.filename, self.payload) |
2877 | + mockobj.assert_called_with_once(self.expected_module_name) |
2878 | + self.write_file_mock.assert_called_with_once( |
2879 | + self.expected_file_fullname, self.payload, 0o600) |
2880 | + self.assertEqual(self.data['handlercount'], 1) |
2881 | |
2882 | def test_import_error(self): |
2883 | """Module import errors are logged. No handler added to C{pdata}.""" |
2884 | - import_mock = self.mocker.replace(importer.import_module, |
2885 | - passthrough=False) |
2886 | - import_mock(self.expected_module_name) |
2887 | - self.mocker.throw(ImportError()) |
2888 | - self.mocker.replay() |
2889 | - |
2890 | - handlers.walker_handle_handler(self.data, self.ctype, self.filename, |
2891 | - self.payload) |
2892 | - |
2893 | - self.assertEqual(0, self.data["handlercount"]) |
2894 | + with mock.patch('cloudinit.importer.import_module', |
2895 | + side_effect=ImportError) as mockobj: |
2896 | + handlers.walker_handle_handler(self.data, self.ctype, |
2897 | + self.filename, self.payload) |
2898 | + mockobj.assert_called_with_once(self.expected_module_name) |
2899 | + self.write_file_mock.assert_called_with_once( |
2900 | + self.expected_file_fullname, self.payload, 0o600) |
2901 | + self.assertEqual(self.data['handlercount'], 0) |
2902 | |
2903 | def test_attribute_error(self): |
2904 | """Attribute errors are logged. No handler added to C{pdata}.""" |
2905 | - import_mock = self.mocker.replace(importer.import_module, |
2906 | - passthrough=False) |
2907 | - import_mock(self.expected_module_name) |
2908 | - self.mocker.result(self.module_fake) |
2909 | - self.mocker.throw(AttributeError()) |
2910 | - self.mocker.replay() |
2911 | - |
2912 | - handlers.walker_handle_handler(self.data, self.ctype, self.filename, |
2913 | - self.payload) |
2914 | - |
2915 | - self.assertEqual(0, self.data["handlercount"]) |
2916 | - |
2917 | - |
2918 | -class TestHandlerHandlePart(MockerTestCase): |
2919 | + with mock.patch('cloudinit.importer.import_module', |
2920 | + side_effect=AttributeError, |
2921 | + return_value=self.module_fake) as mockobj: |
2922 | + handlers.walker_handle_handler(self.data, self.ctype, |
2923 | + self.filename, self.payload) |
2924 | + mockobj.assert_called_with_once(self.expected_module_name) |
2925 | + self.write_file_mock.assert_called_with_once( |
2926 | + self.expected_file_fullname, self.payload, 0o600) |
2927 | + self.assertEqual(self.data['handlercount'], 0) |
2928 | + |
2929 | + |
2930 | +class TestHandlerHandlePart(unittest.TestCase): |
2931 | |
2932 | def setUp(self): |
2933 | self.data = "fake data" |
2934 | @@ -108,95 +116,80 @@ |
2935 | C{handle_part} is called without C{frequency} for |
2936 | C{handler_version} == 1. |
2937 | """ |
2938 | - mod_mock = self.mocker.mock() |
2939 | - getattr(mod_mock, "frequency") |
2940 | - self.mocker.result(settings.PER_INSTANCE) |
2941 | - getattr(mod_mock, "handler_version") |
2942 | - self.mocker.result(1) |
2943 | - mod_mock.handle_part(self.data, self.ctype, self.filename, |
2944 | - self.payload) |
2945 | - self.mocker.replay() |
2946 | - |
2947 | - handlers.run_part(mod_mock, self.data, self.filename, |
2948 | - self.payload, self.frequency, self.headers) |
2949 | + mod_mock = mock.Mock(frequency=settings.PER_INSTANCE, |
2950 | + handler_version=1) |
2951 | + handlers.run_part(mod_mock, self.data, self.filename, self.payload, |
2952 | + self.frequency, self.headers) |
2953 | + # Assert that the handle_part() method of the mock object got |
2954 | + # called with the expected arguments. |
2955 | + mod_mock.handle_part.assert_called_with_once( |
2956 | + self.data, self.ctype, self.filename, self.payload) |
2957 | |
2958 | def test_normal_version_2(self): |
2959 | """ |
2960 | C{handle_part} is called with C{frequency} for |
2961 | C{handler_version} == 2. |
2962 | """ |
2963 | - mod_mock = self.mocker.mock() |
2964 | - getattr(mod_mock, "frequency") |
2965 | - self.mocker.result(settings.PER_INSTANCE) |
2966 | - getattr(mod_mock, "handler_version") |
2967 | - self.mocker.result(2) |
2968 | - mod_mock.handle_part(self.data, self.ctype, self.filename, |
2969 | - self.payload, self.frequency) |
2970 | - self.mocker.replay() |
2971 | - |
2972 | - handlers.run_part(mod_mock, self.data, self.filename, |
2973 | - self.payload, self.frequency, self.headers) |
2974 | + mod_mock = mock.Mock(frequency=settings.PER_INSTANCE, |
2975 | + handler_version=2) |
2976 | + handlers.run_part(mod_mock, self.data, self.filename, self.payload, |
2977 | + self.frequency, self.headers) |
2978 | + # Assert that the handle_part() method of the mock object got |
2979 | + # called with the expected arguments. |
2980 | + mod_mock.handle_part.assert_called_with_once( |
2981 | + self.data, self.ctype, self.filename, self.payload) |
2982 | |
2983 | def test_modfreq_per_always(self): |
2984 | """ |
2985 | C{handle_part} is called regardless of frequency if nofreq is always. |
2986 | """ |
2987 | self.frequency = "once" |
2988 | - mod_mock = self.mocker.mock() |
2989 | - getattr(mod_mock, "frequency") |
2990 | - self.mocker.result(settings.PER_ALWAYS) |
2991 | - getattr(mod_mock, "handler_version") |
2992 | - self.mocker.result(1) |
2993 | - mod_mock.handle_part(self.data, self.ctype, self.filename, |
2994 | - self.payload) |
2995 | - self.mocker.replay() |
2996 | - |
2997 | - handlers.run_part(mod_mock, self.data, self.filename, |
2998 | - self.payload, self.frequency, self.headers) |
2999 | + mod_mock = mock.Mock(frequency=settings.PER_ALWAYS, |
3000 | + handler_version=1) |
3001 | + handlers.run_part(mod_mock, self.data, self.filename, self.payload, |
3002 | + self.frequency, self.headers) |
3003 | + # Assert that the handle_part() method of the mock object got |
3004 | + # called with the expected arguments. |
3005 | + mod_mock.handle_part.assert_called_with_once( |
3006 | + self.data, self.ctype, self.filename, self.payload) |
3007 | |
3008 | def test_no_handle_when_modfreq_once(self): |
3009 | """C{handle_part} is not called if frequency is once.""" |
3010 | self.frequency = "once" |
3011 | - mod_mock = self.mocker.mock() |
3012 | - getattr(mod_mock, "frequency") |
3013 | - self.mocker.result(settings.PER_ONCE) |
3014 | - self.mocker.replay() |
3015 | - |
3016 | - handlers.run_part(mod_mock, self.data, self.filename, |
3017 | - self.payload, self.frequency, self.headers) |
3018 | + mod_mock = mock.Mock(frequency=settings.PER_ONCE) |
3019 | + handlers.run_part(mod_mock, self.data, self.filename, self.payload, |
3020 | + self.frequency, self.headers) |
3021 | + # Assert that the handle_part() method of the mock object got |
3022 | + # called with the expected arguments. |
3023 | + mod_mock.handle_part.assert_called_with_once( |
3024 | + self.data, self.ctype, self.filename, self.payload) |
3025 | |
3026 | def test_exception_is_caught(self): |
3027 | """Exceptions within C{handle_part} are caught and logged.""" |
3028 | - mod_mock = self.mocker.mock() |
3029 | - getattr(mod_mock, "frequency") |
3030 | - self.mocker.result(settings.PER_INSTANCE) |
3031 | - getattr(mod_mock, "handler_version") |
3032 | - self.mocker.result(1) |
3033 | - mod_mock.handle_part(self.data, self.ctype, self.filename, |
3034 | - self.payload) |
3035 | - self.mocker.throw(Exception()) |
3036 | - self.mocker.replay() |
3037 | - |
3038 | - handlers.run_part(mod_mock, self.data, self.filename, |
3039 | - self.payload, self.frequency, self.headers) |
3040 | - |
3041 | - |
3042 | -class TestCmdlineUrl(MockerTestCase): |
3043 | + mod_mock = mock.Mock(frequency=settings.PER_INSTANCE, |
3044 | + handler_version=1) |
3045 | + handlers.run_part(mod_mock, self.data, self.filename, self.payload, |
3046 | + self.frequency, self.headers) |
3047 | + mod_mock.handle_part.side_effect = Exception |
3048 | + handlers.run_part(mod_mock, self.data, self.filename, self.payload, |
3049 | + self.frequency, self.headers) |
3050 | + mod_mock.handle_part.assert_called_with_once( |
3051 | + self.data, self.ctype, self.filename, self.payload) |
3052 | + |
3053 | + |
3054 | +class TestCmdlineUrl(unittest.TestCase): |
3055 | def test_invalid_content(self): |
3056 | url = "http://example.com/foo" |
3057 | key = "mykey" |
3058 | payload = "0" |
3059 | cmdline = "ro %s=%s bar=1" % (key, url) |
3060 | |
3061 | - mock_readurl = self.mocker.replace(url_helper.readurl, |
3062 | - passthrough=False) |
3063 | - mock_readurl(url, ARGS, KWARGS) |
3064 | - self.mocker.result(url_helper.StringResponse(payload)) |
3065 | - self.mocker.replay() |
3066 | - |
3067 | - self.assertEqual((key, url, None), |
3068 | - util.get_cmdline_url(names=[key], starts="xxxxxx", |
3069 | - cmdline=cmdline)) |
3070 | + with mock.patch('cloudinit.url_helper.readurl', |
3071 | + return_value=url_helper.StringResponse(payload)): |
3072 | + self.assertEqual( |
3073 | + util.get_cmdline_url(names=[key], starts="xxxxxx", |
3074 | + cmdline=cmdline), |
3075 | + (key, url, None)) |
3076 | |
3077 | def test_valid_content(self): |
3078 | url = "http://example.com/foo" |
3079 | @@ -204,27 +197,24 @@ |
3080 | payload = "xcloud-config\nmydata: foo\nbar: wark\n" |
3081 | cmdline = "ro %s=%s bar=1" % (key, url) |
3082 | |
3083 | - mock_readurl = self.mocker.replace(url_helper.readurl, |
3084 | - passthrough=False) |
3085 | - mock_readurl(url, ARGS, KWARGS) |
3086 | - self.mocker.result(url_helper.StringResponse(payload)) |
3087 | - self.mocker.replay() |
3088 | - |
3089 | - self.assertEqual((key, url, payload), |
3090 | - util.get_cmdline_url(names=[key], starts="xcloud-config", |
3091 | - cmdline=cmdline)) |
3092 | + with mock.patch('cloudinit.url_helper.readurl', |
3093 | + return_value=url_helper.StringResponse(payload)): |
3094 | + self.assertEqual( |
3095 | + util.get_cmdline_url(names=[key], starts="xcloud-config", |
3096 | + cmdline=cmdline), |
3097 | + (key, url, payload)) |
3098 | |
3099 | def test_no_key_found(self): |
3100 | url = "http://example.com/foo" |
3101 | key = "mykey" |
3102 | cmdline = "ro %s=%s bar=1" % (key, url) |
3103 | |
3104 | - self.mocker.replace(url_helper.readurl, passthrough=False) |
3105 | - self.mocker.result(url_helper.StringResponse("")) |
3106 | - self.mocker.replay() |
3107 | + with mock.patch('cloudinit.url_helper.readurl', |
3108 | + return_value=url_helper.StringResponse('')): |
3109 | + self.assertEqual( |
3110 | + util.get_cmdline_url(names=["does-not-appear"], |
3111 | + starts="#cloud-config", cmdline=cmdline), |
3112 | + (None, None, None)) |
3113 | |
3114 | - self.assertEqual((None, None, None), |
3115 | - util.get_cmdline_url(names=["does-not-appear"], |
3116 | - starts="#cloud-config", cmdline=cmdline)) |
3117 | |
3118 | # vi: ts=4 expandtab |
3119 | |
3120 | === modified file 'tests/unittests/test_builtin_handlers.py' |
3121 | --- tests/unittests/test_builtin_handlers.py 2014-07-23 16:07:12 +0000 |
3122 | +++ tests/unittests/test_builtin_handlers.py 2015-01-27 20:16:43 +0000 |
3123 | @@ -1,6 +1,13 @@ |
3124 | """Tests of the built-in user data handlers.""" |
3125 | |
3126 | import os |
3127 | +import shutil |
3128 | +import tempfile |
3129 | + |
3130 | +try: |
3131 | + from unittest import mock |
3132 | +except ImportError: |
3133 | + import mock |
3134 | |
3135 | from . import helpers as test_helpers |
3136 | |
3137 | @@ -14,10 +21,11 @@ |
3138 | |
3139 | |
3140 | class TestBuiltins(test_helpers.FilesystemMockingTestCase): |
3141 | - |
3142 | def test_upstart_frequency_no_out(self): |
3143 | - c_root = self.makeDir() |
3144 | - up_root = self.makeDir() |
3145 | + c_root = tempfile.mkdtemp() |
3146 | + self.addCleanup(shutil.rmtree, c_root) |
3147 | + up_root = tempfile.mkdtemp() |
3148 | + self.addCleanup(shutil.rmtree, up_root) |
3149 | paths = helpers.Paths({ |
3150 | 'cloud_dir': c_root, |
3151 | 'upstart_dir': up_root, |
3152 | @@ -36,7 +44,8 @@ |
3153 | |
3154 | def test_upstart_frequency_single(self): |
3155 | # files should be written out when frequency is ! per-instance |
3156 | - new_root = self.makeDir() |
3157 | + new_root = tempfile.mkdtemp() |
3158 | + self.addCleanup(shutil.rmtree, new_root) |
3159 | freq = PER_INSTANCE |
3160 | |
3161 | self.patchOS(new_root) |
3162 | @@ -49,16 +58,16 @@ |
3163 | util.ensure_dir("/run") |
3164 | util.ensure_dir("/etc/upstart") |
3165 | |
3166 | - mock_subp = self.mocker.replace(util.subp, passthrough=False) |
3167 | - mock_subp(["initctl", "reload-configuration"], capture=False) |
3168 | - self.mocker.replay() |
3169 | - |
3170 | - h = upstart_job.UpstartJobPartHandler(paths) |
3171 | - h.handle_part('', handlers.CONTENT_START, |
3172 | - None, None, None) |
3173 | - h.handle_part('blah', 'text/upstart-job', |
3174 | - 'test.conf', 'blah', freq) |
3175 | - h.handle_part('', handlers.CONTENT_END, |
3176 | - None, None, None) |
3177 | - |
3178 | - self.assertEquals(1, len(os.listdir('/etc/upstart'))) |
3179 | + with mock.patch.object(util, 'subp') as mockobj: |
3180 | + h = upstart_job.UpstartJobPartHandler(paths) |
3181 | + h.handle_part('', handlers.CONTENT_START, |
3182 | + None, None, None) |
3183 | + h.handle_part('blah', 'text/upstart-job', |
3184 | + 'test.conf', 'blah', freq) |
3185 | + h.handle_part('', handlers.CONTENT_END, |
3186 | + None, None, None) |
3187 | + |
3188 | + self.assertEquals(len(os.listdir('/etc/upstart')), 1) |
3189 | + |
3190 | + mockobj.assert_called_once_with( |
3191 | + ['initctl', 'reload-configuration'], capture=False) |
3192 | |
3193 | === modified file 'tests/unittests/test_cs_util.py' |
3194 | --- tests/unittests/test_cs_util.py 2014-02-12 10:14:49 +0000 |
3195 | +++ tests/unittests/test_cs_util.py 2015-01-27 20:16:43 +0000 |
3196 | @@ -1,7 +1,21 @@ |
3197 | -from mocker import MockerTestCase |
3198 | +from __future__ import print_function |
3199 | + |
3200 | +import sys |
3201 | +import unittest |
3202 | |
3203 | from cloudinit.cs_utils import Cepko |
3204 | |
3205 | +try: |
3206 | + skip = unittest.skip |
3207 | +except AttributeError: |
3208 | + # Python 2.6. Doesn't have to be high fidelity. |
3209 | + def skip(reason): |
3210 | + def decorator(func): |
3211 | + def wrapper(*args, **kws): |
3212 | + print(reason, file=sys.stderr) |
3213 | + return wrapper |
3214 | + return decorator |
3215 | + |
3216 | |
3217 | SERVER_CONTEXT = { |
3218 | "cpu": 1000, |
3219 | @@ -26,16 +40,21 @@ |
3220 | return SERVER_CONTEXT['tags'] |
3221 | |
3222 | |
3223 | -class CepkoResultTests(MockerTestCase): |
3224 | +# 2015-01-22 BAW: This test is completely useless because it only ever tests |
3225 | +# the CepkoMock object. Even in its original form, I don't think it ever |
3226 | +# touched the underlying Cepko class methods. |
3227 | +@skip('This test is completely useless') |
3228 | +class CepkoResultTests(unittest.TestCase): |
3229 | def setUp(self): |
3230 | - self.mocked = self.mocker.replace("cloudinit.cs_utils.Cepko", |
3231 | - spec=CepkoMock, |
3232 | - count=False, |
3233 | - passthrough=False) |
3234 | - self.mocked() |
3235 | - self.mocker.result(CepkoMock()) |
3236 | - self.mocker.replay() |
3237 | - self.c = Cepko() |
3238 | + pass |
3239 | + ## self.mocked = self.mocker.replace("cloudinit.cs_utils.Cepko", |
3240 | + ## spec=CepkoMock, |
3241 | + ## count=False, |
3242 | + ## passthrough=False) |
3243 | + ## self.mocked() |
3244 | + ## self.mocker.result(CepkoMock()) |
3245 | + ## self.mocker.replay() |
3246 | + ## self.c = Cepko() |
3247 | |
3248 | def test_getitem(self): |
3249 | result = self.c.all() |
3250 | |
3251 | === modified file 'tests/unittests/test_data.py' |
3252 | --- tests/unittests/test_data.py 2014-09-10 18:32:37 +0000 |
3253 | +++ tests/unittests/test_data.py 2015-01-27 20:16:43 +0000 |
3254 | @@ -1,10 +1,17 @@ |
3255 | """Tests for handling of userdata within cloud init.""" |
3256 | |
3257 | -import StringIO |
3258 | - |
3259 | import gzip |
3260 | import logging |
3261 | import os |
3262 | +import shutil |
3263 | +import tempfile |
3264 | + |
3265 | +try: |
3266 | + from unittest import mock |
3267 | +except ImportError: |
3268 | + import mock |
3269 | + |
3270 | +from six import BytesIO, StringIO |
3271 | |
3272 | from email.mime.application import MIMEApplication |
3273 | from email.mime.base import MIMEBase |
3274 | @@ -37,23 +44,22 @@ |
3275 | class TestConsumeUserData(helpers.FilesystemMockingTestCase): |
3276 | |
3277 | def setUp(self): |
3278 | - helpers.FilesystemMockingTestCase.setUp(self) |
3279 | + super(TestConsumeUserData, self).setUp() |
3280 | self._log = None |
3281 | self._log_file = None |
3282 | self._log_handler = None |
3283 | |
3284 | def tearDown(self): |
3285 | - helpers.FilesystemMockingTestCase.tearDown(self) |
3286 | if self._log_handler and self._log: |
3287 | self._log.removeHandler(self._log_handler) |
3288 | + helpers.FilesystemMockingTestCase.tearDown(self) |
3289 | |
3290 | def _patchIn(self, root): |
3291 | - self.restore() |
3292 | self.patchOS(root) |
3293 | self.patchUtils(root) |
3294 | |
3295 | def capture_log(self, lvl=logging.DEBUG): |
3296 | - log_file = StringIO.StringIO() |
3297 | + log_file = StringIO() |
3298 | self._log_handler = logging.StreamHandler(log_file) |
3299 | self._log_handler.setLevel(lvl) |
3300 | self._log = log.getLogger() |
3301 | @@ -71,7 +77,8 @@ |
3302 | |
3303 | ci = stages.Init() |
3304 | ci.datasource = FakeDataSource(blob) |
3305 | - new_root = self.makeDir() |
3306 | + new_root = tempfile.mkdtemp() |
3307 | + self.addCleanup(shutil.rmtree, new_root) |
3308 | self.patchUtils(new_root) |
3309 | self.patchOS(new_root) |
3310 | ci.fetch() |
3311 | @@ -99,7 +106,8 @@ |
3312 | { "op": "add", "path": "/foo", "value": "quxC" } |
3313 | ] |
3314 | ''' |
3315 | - new_root = self.makeDir() |
3316 | + new_root = tempfile.mkdtemp() |
3317 | + self.addCleanup(shutil.rmtree, new_root) |
3318 | self._patchIn(new_root) |
3319 | initer = stages.Init() |
3320 | initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) |
3321 | @@ -138,7 +146,8 @@ |
3322 | { "op": "add", "path": "/foo", "value": "quxC" } |
3323 | ] |
3324 | ''' |
3325 | - new_root = self.makeDir() |
3326 | + new_root = tempfile.mkdtemp() |
3327 | + self.addCleanup(shutil.rmtree, new_root) |
3328 | self._patchIn(new_root) |
3329 | initer = stages.Init() |
3330 | initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) |
3331 | @@ -184,7 +193,8 @@ |
3332 | |
3333 | ci = stages.Init() |
3334 | ci.datasource = FakeDataSource(str(message)) |
3335 | - new_root = self.makeDir() |
3336 | + new_root = tempfile.mkdtemp() |
3337 | + self.addCleanup(shutil.rmtree, new_root) |
3338 | self.patchUtils(new_root) |
3339 | self.patchOS(new_root) |
3340 | ci.fetch() |
3341 | @@ -214,7 +224,8 @@ |
3342 | run: |
3343 | - z |
3344 | ''' |
3345 | - new_root = self.makeDir() |
3346 | + new_root = tempfile.mkdtemp() |
3347 | + self.addCleanup(shutil.rmtree, new_root) |
3348 | self._patchIn(new_root) |
3349 | initer = stages.Init() |
3350 | initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) |
3351 | @@ -249,7 +260,8 @@ |
3352 | enabled: True |
3353 | prefix: /bin/true |
3354 | ''' |
3355 | - new_root = self.makeDir() |
3356 | + new_root = tempfile.mkdtemp() |
3357 | + self.addCleanup(shutil.rmtree, new_root) |
3358 | self._patchIn(new_root) |
3359 | initer = stages.Init() |
3360 | initer.datasource = FakeDataSource(user_blob, vendordata=vendor_blob) |
3361 | @@ -309,7 +321,8 @@ |
3362 | paths = c_helpers.Paths({}, ds=FakeDataSource('')) |
3363 | cloud_cfg = handlers.cloud_config.CloudConfigPartHandler(paths) |
3364 | |
3365 | - new_root = self.makeDir() |
3366 | + new_root = tempfile.mkdtemp() |
3367 | + self.addCleanup(shutil.rmtree, new_root) |
3368 | self.patchUtils(new_root) |
3369 | self.patchOS(new_root) |
3370 | cloud_cfg.handle_part(None, handlers.CONTENT_START, None, None, None, |
3371 | @@ -335,25 +348,25 @@ |
3372 | data = "arbitrary text\n" |
3373 | ci.datasource = FakeDataSource(data) |
3374 | |
3375 | - mock_write = self.mocker.replace("cloudinit.util.write_file", |
3376 | - passthrough=False) |
3377 | - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) |
3378 | - self.mocker.replay() |
3379 | - |
3380 | - log_file = self.capture_log(logging.WARNING) |
3381 | - ci.fetch() |
3382 | - ci.consume_data() |
3383 | - self.assertIn( |
3384 | - "Unhandled non-multipart (text/x-not-multipart) userdata:", |
3385 | - log_file.getvalue()) |
3386 | + with mock.patch('cloudinit.util.write_file') as mockobj: |
3387 | + log_file = self.capture_log(logging.WARNING) |
3388 | + ci.fetch() |
3389 | + ci.consume_data() |
3390 | + self.assertIn( |
3391 | + "Unhandled non-multipart (text/x-not-multipart) userdata:", |
3392 | + log_file.getvalue()) |
3393 | + |
3394 | + mockobj.assert_called_once_with( |
3395 | + ci.paths.get_ipath("cloud_config"), "", 0o600) |
3396 | + |
3397 | |
3398 | def test_mime_gzip_compressed(self): |
3399 | """Tests that individual message gzip encoding works.""" |
3400 | |
3401 | def gzip_part(text): |
3402 | - contents = StringIO.StringIO() |
3403 | - f = gzip.GzipFile(fileobj=contents, mode='w') |
3404 | - f.write(str(text)) |
3405 | + contents = BytesIO() |
3406 | + f = gzip.GzipFile(fileobj=contents, mode='wb') |
3407 | + f.write(util.encode_text(text)) |
3408 | f.flush() |
3409 | f.close() |
3410 | return MIMEApplication(contents.getvalue(), 'gzip') |
3411 | @@ -374,7 +387,8 @@ |
3412 | message.attach(gzip_part(base_content2)) |
3413 | ci = stages.Init() |
3414 | ci.datasource = FakeDataSource(str(message)) |
3415 | - new_root = self.makeDir() |
3416 | + new_root = tempfile.mkdtemp() |
3417 | + self.addCleanup(shutil.rmtree, new_root) |
3418 | self.patchUtils(new_root) |
3419 | self.patchOS(new_root) |
3420 | ci.fetch() |
3421 | @@ -394,17 +408,15 @@ |
3422 | message.set_payload("Just text") |
3423 | ci.datasource = FakeDataSource(message.as_string()) |
3424 | |
3425 | - mock_write = self.mocker.replace("cloudinit.util.write_file", |
3426 | - passthrough=False) |
3427 | - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) |
3428 | - self.mocker.replay() |
3429 | - |
3430 | - log_file = self.capture_log(logging.WARNING) |
3431 | - ci.fetch() |
3432 | - ci.consume_data() |
3433 | - self.assertIn( |
3434 | - "Unhandled unknown content-type (text/plain)", |
3435 | - log_file.getvalue()) |
3436 | + with mock.patch('cloudinit.util.write_file') as mockobj: |
3437 | + log_file = self.capture_log(logging.WARNING) |
3438 | + ci.fetch() |
3439 | + ci.consume_data() |
3440 | + self.assertIn( |
3441 | + "Unhandled unknown content-type (text/plain)", |
3442 | + log_file.getvalue()) |
3443 | + mockobj.assert_called_once_with( |
3444 | + ci.paths.get_ipath("cloud_config"), "", 0o600) |
3445 | |
3446 | def test_shellscript(self): |
3447 | """Raw text starting #!/bin/sh is treated as script.""" |
3448 | @@ -413,16 +425,17 @@ |
3449 | ci.datasource = FakeDataSource(script) |
3450 | |
3451 | outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") |
3452 | - mock_write = self.mocker.replace("cloudinit.util.write_file", |
3453 | - passthrough=False) |
3454 | - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) |
3455 | - mock_write(outpath, script, 0700) |
3456 | - self.mocker.replay() |
3457 | - |
3458 | - log_file = self.capture_log(logging.WARNING) |
3459 | - ci.fetch() |
3460 | - ci.consume_data() |
3461 | - self.assertEqual("", log_file.getvalue()) |
3462 | + |
3463 | + with mock.patch('cloudinit.util.write_file') as mockobj: |
3464 | + log_file = self.capture_log(logging.WARNING) |
3465 | + ci.fetch() |
3466 | + ci.consume_data() |
3467 | + self.assertEqual("", log_file.getvalue()) |
3468 | + |
3469 | + mockobj.assert_has_calls([ |
3470 | + mock.call(outpath, script, 0o700), |
3471 | + mock.call(ci.paths.get_ipath("cloud_config"), "", 0o600), |
3472 | + ]) |
3473 | |
3474 | def test_mime_text_x_shellscript(self): |
3475 | """Mime message of type text/x-shellscript is treated as script.""" |
3476 | @@ -433,16 +446,17 @@ |
3477 | ci.datasource = FakeDataSource(message.as_string()) |
3478 | |
3479 | outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") |
3480 | - mock_write = self.mocker.replace("cloudinit.util.write_file", |
3481 | - passthrough=False) |
3482 | - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) |
3483 | - mock_write(outpath, script, 0700) |
3484 | - self.mocker.replay() |
3485 | - |
3486 | - log_file = self.capture_log(logging.WARNING) |
3487 | - ci.fetch() |
3488 | - ci.consume_data() |
3489 | - self.assertEqual("", log_file.getvalue()) |
3490 | + |
3491 | + with mock.patch('cloudinit.util.write_file') as mockobj: |
3492 | + log_file = self.capture_log(logging.WARNING) |
3493 | + ci.fetch() |
3494 | + ci.consume_data() |
3495 | + self.assertEqual("", log_file.getvalue()) |
3496 | + |
3497 | + mockobj.assert_has_calls([ |
3498 | + mock.call(outpath, script, 0o700), |
3499 | + mock.call(ci.paths.get_ipath("cloud_config"), "", 0o600), |
3500 | + ]) |
3501 | |
3502 | def test_mime_text_plain_shell(self): |
3503 | """Mime type text/plain starting #!/bin/sh is treated as script.""" |
3504 | @@ -453,13 +467,14 @@ |
3505 | ci.datasource = FakeDataSource(message.as_string()) |
3506 | |
3507 | outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001") |
3508 | - mock_write = self.mocker.replace("cloudinit.util.write_file", |
3509 | - passthrough=False) |
3510 | - mock_write(outpath, script, 0700) |
3511 | - mock_write(ci.paths.get_ipath("cloud_config"), "", 0600) |
3512 | - self.mocker.replay() |
3513 | - |
3514 | - log_file = self.capture_log(logging.WARNING) |
3515 | - ci.fetch() |
3516 | - ci.consume_data() |
3517 | - self.assertEqual("", log_file.getvalue()) |
3518 | + |
3519 | + with mock.patch('cloudinit.util.write_file') as mockobj: |
3520 | + log_file = self.capture_log(logging.WARNING) |
3521 | + ci.fetch() |
3522 | + ci.consume_data() |
3523 | + self.assertEqual("", log_file.getvalue()) |
3524 | + |
3525 | + mockobj.assert_has_calls([ |
3526 | + mock.call(outpath, script, 0o700), |
3527 | + mock.call(ci.paths.get_ipath("cloud_config"), "", 0o600), |
3528 | + ]) |
3529 | |
3530 | === modified file 'tests/unittests/test_datasource/test_altcloud.py' |
3531 | --- tests/unittests/test_datasource/test_altcloud.py 2015-01-14 19:24:09 +0000 |
3532 | +++ tests/unittests/test_datasource/test_altcloud.py 2015-01-27 20:16:43 +0000 |
3533 | @@ -46,7 +46,7 @@ |
3534 | cifile = open(cloudinit.sources.DataSourceAltCloud.CLOUD_INFO_FILE, 'w') |
3535 | cifile.write(value) |
3536 | cifile.close() |
3537 | - os.chmod(cloudinit.sources.DataSourceAltCloud.CLOUD_INFO_FILE, 0664) |
3538 | + os.chmod(cloudinit.sources.DataSourceAltCloud.CLOUD_INFO_FILE, 0o664) |
3539 | |
3540 | |
3541 | def _remove_cloud_info_file(): |
3542 | @@ -67,12 +67,12 @@ |
3543 | udfile = open(deltacloud_user_data_file, 'w') |
3544 | udfile.write(value) |
3545 | udfile.close() |
3546 | - os.chmod(deltacloud_user_data_file, 0664) |
3547 | + os.chmod(deltacloud_user_data_file, 0o664) |
3548 | |
3549 | udfile = open(user_data_file, 'w') |
3550 | udfile.write(value) |
3551 | udfile.close() |
3552 | - os.chmod(user_data_file, 0664) |
3553 | + os.chmod(user_data_file, 0o664) |
3554 | |
3555 | |
3556 | def _remove_user_data_files(mount_dir, |
3557 | |
3558 | === modified file 'tests/unittests/test_datasource/test_azure.py' |
3559 | --- tests/unittests/test_datasource/test_azure.py 2014-08-26 18:50:11 +0000 |
3560 | +++ tests/unittests/test_datasource/test_azure.py 2015-01-27 20:16:43 +0000 |
3561 | @@ -1,14 +1,24 @@ |
3562 | from cloudinit import helpers |
3563 | -from cloudinit.util import load_file |
3564 | +from cloudinit.util import b64e, load_file |
3565 | from cloudinit.sources import DataSourceAzure |
3566 | -from ..helpers import populate_dir |
3567 | - |
3568 | -import base64 |
3569 | +from ..helpers import TestCase, populate_dir |
3570 | + |
3571 | +try: |
3572 | + from unittest import mock |
3573 | +except ImportError: |
3574 | + import mock |
3575 | +try: |
3576 | + from contextlib import ExitStack |
3577 | +except ImportError: |
3578 | + from contextlib2 import ExitStack |
3579 | + |
3580 | import crypt |
3581 | -from mocker import MockerTestCase |
3582 | import os |
3583 | import stat |
3584 | import yaml |
3585 | +import shutil |
3586 | +import tempfile |
3587 | +import unittest |
3588 | |
3589 | |
3590 | def construct_valid_ovf_env(data=None, pubkeys=None, userdata=None): |
3591 | @@ -40,7 +50,7 @@ |
3592 | content += "<%s%s>%s</%s>\n" % (key, attrs, val, key) |
3593 | |
3594 | if userdata: |
3595 | - content += "<UserData>%s</UserData>\n" % (base64.b64encode(userdata)) |
3596 | + content += "<UserData>%s</UserData>\n" % (b64e(userdata)) |
3597 | |
3598 | if pubkeys: |
3599 | content += "<SSH><PublicKeys>\n" |
3600 | @@ -66,26 +76,25 @@ |
3601 | return content |
3602 | |
3603 | |
3604 | -class TestAzureDataSource(MockerTestCase): |
3605 | +class TestAzureDataSource(TestCase): |
3606 | |
3607 | def setUp(self): |
3608 | - # makeDir comes from MockerTestCase |
3609 | - self.tmp = self.makeDir() |
3610 | + super(TestAzureDataSource, self).setUp() |
3611 | + self.tmp = tempfile.mkdtemp() |
3612 | + self.addCleanup(shutil.rmtree, self.tmp) |
3613 | |
3614 | # patch cloud_dir, so our 'seed_dir' is guaranteed empty |
3615 | self.paths = helpers.Paths({'cloud_dir': self.tmp}) |
3616 | self.waagent_d = os.path.join(self.tmp, 'var', 'lib', 'waagent') |
3617 | |
3618 | - self.unapply = [] |
3619 | + self.patches = ExitStack() |
3620 | + self.addCleanup(self.patches.close) |
3621 | + |
3622 | super(TestAzureDataSource, self).setUp() |
3623 | |
3624 | - def tearDown(self): |
3625 | - apply_patches([i for i in reversed(self.unapply)]) |
3626 | - super(TestAzureDataSource, self).tearDown() |
3627 | - |
3628 | def apply_patches(self, patches): |
3629 | - ret = apply_patches(patches) |
3630 | - self.unapply += ret |
3631 | + for module, name, new in patches: |
3632 | + self.patches.enter_context(mock.patch.object(module, name, new)) |
3633 | |
3634 | def _get_ds(self, data): |
3635 | |
3636 | @@ -117,16 +126,14 @@ |
3637 | mod = DataSourceAzure |
3638 | mod.BUILTIN_DS_CONFIG['data_dir'] = self.waagent_d |
3639 | |
3640 | - self.apply_patches([(mod, 'list_possible_azure_ds_devs', dsdevs)]) |
3641 | - |
3642 | - self.apply_patches([(mod, 'invoke_agent', _invoke_agent), |
3643 | - (mod, 'wait_for_files', _wait_for_files), |
3644 | - (mod, 'pubkeys_from_crt_files', |
3645 | - _pubkeys_from_crt_files), |
3646 | - (mod, 'iid_from_shared_config', |
3647 | - _iid_from_shared_config), |
3648 | - (mod, 'apply_hostname_bounce', |
3649 | - _apply_hostname_bounce), ]) |
3650 | + self.apply_patches([ |
3651 | + (mod, 'list_possible_azure_ds_devs', dsdevs), |
3652 | + (mod, 'invoke_agent', _invoke_agent), |
3653 | + (mod, 'wait_for_files', _wait_for_files), |
3654 | + (mod, 'pubkeys_from_crt_files', _pubkeys_from_crt_files), |
3655 | + (mod, 'iid_from_shared_config', _iid_from_shared_config), |
3656 | + (mod, 'apply_hostname_bounce', _apply_hostname_bounce), |
3657 | + ]) |
3658 | |
3659 | dsrc = mod.DataSourceAzureNet( |
3660 | data.get('sys_cfg', {}), distro=None, paths=self.paths) |
3661 | @@ -153,7 +160,7 @@ |
3662 | ret = dsrc.get_data() |
3663 | self.assertTrue(ret) |
3664 | self.assertTrue(os.path.isdir(self.waagent_d)) |
3665 | - self.assertEqual(stat.S_IMODE(os.stat(self.waagent_d).st_mode), 0700) |
3666 | + self.assertEqual(stat.S_IMODE(os.stat(self.waagent_d).st_mode), 0o700) |
3667 | |
3668 | def test_user_cfg_set_agent_command_plain(self): |
3669 | # set dscfg in via plaintext |
3670 | @@ -174,7 +181,7 @@ |
3671 | # set dscfg in via base64 encoded yaml |
3672 | cfg = {'agent_command': "my_command"} |
3673 | odata = {'HostName': "myhost", 'UserName': "myuser", |
3674 | - 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), |
3675 | + 'dscfg': {'text': b64e(yaml.dump(cfg)), |
3676 | 'encoding': 'base64'}} |
3677 | data = {'ovfcontent': construct_valid_ovf_env(data=odata)} |
3678 | |
3679 | @@ -226,13 +233,13 @@ |
3680 | |
3681 | def test_userdata_found(self): |
3682 | mydata = "FOOBAR" |
3683 | - odata = {'UserData': base64.b64encode(mydata)} |
3684 | + odata = {'UserData': b64e(mydata)} |
3685 | data = {'ovfcontent': construct_valid_ovf_env(data=odata)} |
3686 | |
3687 | dsrc = self._get_ds(data) |
3688 | ret = dsrc.get_data() |
3689 | self.assertTrue(ret) |
3690 | - self.assertEqual(dsrc.userdata_raw, mydata) |
3691 | + self.assertEqual(dsrc.userdata_raw, mydata.encode('utf-8')) |
3692 | |
3693 | def test_no_datasource_expected(self): |
3694 | # no source should be found if no seed_dir and no devs |
3695 | @@ -274,7 +281,7 @@ |
3696 | 'command': 'my-bounce-command', |
3697 | 'hostname_command': 'my-hostname-command'}} |
3698 | odata = {'HostName': "xhost", |
3699 | - 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), |
3700 | + 'dscfg': {'text': b64e(yaml.dump(cfg)), |
3701 | 'encoding': 'base64'}} |
3702 | data = {'ovfcontent': construct_valid_ovf_env(data=odata)} |
3703 | self._get_ds(data).get_data() |
3704 | @@ -289,7 +296,7 @@ |
3705 | # config specifying set_hostname off should not bounce |
3706 | cfg = {'set_hostname': False} |
3707 | odata = {'HostName': "xhost", |
3708 | - 'dscfg': {'text': base64.b64encode(yaml.dump(cfg)), |
3709 | + 'dscfg': {'text': b64e(yaml.dump(cfg)), |
3710 | 'encoding': 'base64'}} |
3711 | data = {'ovfcontent': construct_valid_ovf_env(data=odata)} |
3712 | self._get_ds(data).get_data() |
3713 | @@ -318,7 +325,7 @@ |
3714 | # Make sure that user can affect disk aliases |
3715 | dscfg = {'disk_aliases': {'ephemeral0': '/dev/sdc'}} |
3716 | odata = {'HostName': "myhost", 'UserName': "myuser", |
3717 | - 'dscfg': {'text': base64.b64encode(yaml.dump(dscfg)), |
3718 | + 'dscfg': {'text': b64e(yaml.dump(dscfg)), |
3719 | 'encoding': 'base64'}} |
3720 | usercfg = {'disk_setup': {'/dev/sdc': {'something': '...'}, |
3721 | 'ephemeral0': False}} |
3722 | @@ -340,7 +347,7 @@ |
3723 | dsrc = self._get_ds(data) |
3724 | dsrc.get_data() |
3725 | |
3726 | - self.assertEqual(userdata, dsrc.userdata_raw) |
3727 | + self.assertEqual(userdata.encode('us-ascii'), dsrc.userdata_raw) |
3728 | |
3729 | def test_ovf_env_arrives_in_waagent_dir(self): |
3730 | xml = construct_valid_ovf_env(data={}, userdata="FOODATA") |
3731 | @@ -355,7 +362,7 @@ |
3732 | |
3733 | def test_existing_ovf_same(self): |
3734 | # waagent/SharedConfig left alone if found ovf-env.xml same as cached |
3735 | - odata = {'UserData': base64.b64encode("SOMEUSERDATA")} |
3736 | + odata = {'UserData': b64e("SOMEUSERDATA")} |
3737 | data = {'ovfcontent': construct_valid_ovf_env(data=odata)} |
3738 | |
3739 | populate_dir(self.waagent_d, |
3740 | @@ -379,9 +386,9 @@ |
3741 | # 'get_data' should remove SharedConfig.xml in /var/lib/waagent |
3742 | # if ovf-env.xml differs. |
3743 | cached_ovfenv = construct_valid_ovf_env( |
3744 | - {'userdata': base64.b64encode("FOO_USERDATA")}) |
3745 | + {'userdata': b64e("FOO_USERDATA")}) |
3746 | new_ovfenv = construct_valid_ovf_env( |
3747 | - {'userdata': base64.b64encode("NEW_USERDATA")}) |
3748 | + {'userdata': b64e("NEW_USERDATA")}) |
3749 | |
3750 | populate_dir(self.waagent_d, |
3751 | {'ovf-env.xml': cached_ovfenv, |
3752 | @@ -391,7 +398,7 @@ |
3753 | dsrc = self._get_ds({'ovfcontent': new_ovfenv}) |
3754 | ret = dsrc.get_data() |
3755 | self.assertTrue(ret) |
3756 | - self.assertEqual(dsrc.userdata_raw, "NEW_USERDATA") |
3757 | + self.assertEqual(dsrc.userdata_raw, b"NEW_USERDATA") |
3758 | self.assertTrue(os.path.exists( |
3759 | os.path.join(self.waagent_d, 'otherfile'))) |
3760 | self.assertFalse( |
3761 | @@ -402,7 +409,7 @@ |
3762 | load_file(os.path.join(self.waagent_d, 'ovf-env.xml'))) |
3763 | |
3764 | |
3765 | -class TestReadAzureOvf(MockerTestCase): |
3766 | +class TestReadAzureOvf(TestCase): |
3767 | def test_invalid_xml_raises_non_azure_ds(self): |
3768 | invalid_xml = "<foo>" + construct_valid_ovf_env(data={}) |
3769 | self.assertRaises(DataSourceAzure.BrokenAzureDataSource, |
3770 | @@ -417,7 +424,7 @@ |
3771 | self.assertIn(mypk, cfg['_pubkeys']) |
3772 | |
3773 | |
3774 | -class TestReadAzureSharedConfig(MockerTestCase): |
3775 | +class TestReadAzureSharedConfig(unittest.TestCase): |
3776 | def test_valid_content(self): |
3777 | xml = """<?xml version="1.0" encoding="utf-8"?> |
3778 | <SharedConfig> |
3779 | @@ -429,14 +436,3 @@ |
3780 | </SharedConfig>""" |
3781 | ret = DataSourceAzure.iid_from_shared_config_content(xml) |
3782 | self.assertEqual("MY_INSTANCE_ID", ret) |
3783 | - |
3784 | - |
3785 | -def apply_patches(patches): |
3786 | - ret = [] |
3787 | - for (ref, name, replace) in patches: |
3788 | - if replace is None: |
3789 | - continue |
3790 | - orig = getattr(ref, name) |
3791 | - setattr(ref, name, replace) |
3792 | - ret.append((ref, name, orig)) |
3793 | - return ret |
3794 | |
3795 | === modified file 'tests/unittests/test_datasource/test_cloudsigma.py' |
3796 | --- tests/unittests/test_datasource/test_cloudsigma.py 2014-07-23 16:18:16 +0000 |
3797 | +++ tests/unittests/test_datasource/test_cloudsigma.py 2015-01-27 20:16:43 +0000 |
3798 | @@ -39,6 +39,7 @@ |
3799 | |
3800 | class DataSourceCloudSigmaTest(test_helpers.TestCase): |
3801 | def setUp(self): |
3802 | + super(DataSourceCloudSigmaTest, self).setUp() |
3803 | self.datasource = DataSourceCloudSigma.DataSourceCloudSigma("", "", "") |
3804 | self.datasource.is_running_in_cloudsigma = lambda: True |
3805 | self.datasource.cepko = CepkoMock(SERVER_CONTEXT) |
3806 | |
3807 | === modified file 'tests/unittests/test_datasource/test_configdrive.py' |
3808 | --- tests/unittests/test_datasource/test_configdrive.py 2014-07-23 16:18:16 +0000 |
3809 | +++ tests/unittests/test_datasource/test_configdrive.py 2015-01-27 20:16:43 +0000 |
3810 | @@ -1,10 +1,17 @@ |
3811 | from copy import copy |
3812 | import json |
3813 | import os |
3814 | -import os.path |
3815 | +import shutil |
3816 | +import tempfile |
3817 | |
3818 | -import mocker |
3819 | -from mocker import MockerTestCase |
3820 | +try: |
3821 | + from unittest import mock |
3822 | +except ImportError: |
3823 | + import mock |
3824 | +try: |
3825 | + from contextlib import ExitStack |
3826 | +except ImportError: |
3827 | + from contextlib2 import ExitStack |
3828 | |
3829 | from cloudinit import helpers |
3830 | from cloudinit import settings |
3831 | @@ -12,7 +19,8 @@ |
3832 | from cloudinit.sources.helpers import openstack |
3833 | from cloudinit import util |
3834 | |
3835 | -from .. import helpers as unit_helpers |
3836 | +from ..helpers import TestCase |
3837 | + |
3838 | |
3839 | PUBKEY = u'ssh-rsa AAAAB3NzaC1....sIkJhq8wdX+4I3A4cYbYP ubuntu@server-460\n' |
3840 | EC2_META = { |
3841 | @@ -64,11 +72,12 @@ |
3842 | 'openstack/latest/user_data': USER_DATA} |
3843 | |
3844 | |
3845 | -class TestConfigDriveDataSource(MockerTestCase): |
3846 | +class TestConfigDriveDataSource(TestCase): |
3847 | |
3848 | def setUp(self): |
3849 | super(TestConfigDriveDataSource, self).setUp() |
3850 | - self.tmp = self.makeDir() |
3851 | + self.tmp = tempfile.mkdtemp() |
3852 | + self.addCleanup(shutil.rmtree, self.tmp) |
3853 | |
3854 | def test_ec2_metadata(self): |
3855 | populate_dir(self.tmp, CFG_DRIVE_FILES_V2) |
3856 | @@ -91,23 +100,28 @@ |
3857 | 'swap': '/dev/vda3', |
3858 | } |
3859 | for name, dev_name in name_tests.items(): |
3860 | - with unit_helpers.mocker() as my_mock: |
3861 | - find_mock = my_mock.replace(util.find_devs_with, |
3862 | - spec=False, passthrough=False) |
3863 | + with ExitStack() as mocks: |
3864 | provided_name = dev_name[len('/dev/'):] |
3865 | provided_name = "s" + provided_name[1:] |
3866 | - find_mock(mocker.ARGS) |
3867 | - my_mock.result([provided_name]) |
3868 | - exists_mock = my_mock.replace(os.path.exists, |
3869 | - spec=False, passthrough=False) |
3870 | - exists_mock(mocker.ARGS) |
3871 | - my_mock.result(False) |
3872 | - exists_mock(mocker.ARGS) |
3873 | - my_mock.result(True) |
3874 | - my_mock.replay() |
3875 | + find_mock = mocks.enter_context( |
3876 | + mock.patch.object(util, 'find_devs_with', |
3877 | + return_value=[provided_name])) |
3878 | + # We want os.path.exists() to return False on its first call, |
3879 | + # and True on its second call. We use a handy generator as |
3880 | + # the mock side effect for this. The mocked function returns |
3881 | + # what the side effect returns. |
3882 | + def exists_side_effect(): |
3883 | + yield False |
3884 | + yield True |
3885 | + exists_mock = mocks.enter_context( |
3886 | + mock.patch.object(os.path, 'exists', |
3887 | + side_effect=exists_side_effect())) |
3888 | device = cfg_ds.device_name_to_device(name) |
3889 | self.assertEquals(dev_name, device) |
3890 | |
3891 | + find_mock.assert_called_once_with(mock.ANY) |
3892 | + self.assertEqual(exists_mock.call_count, 2) |
3893 | + |
3894 | def test_dev_os_map(self): |
3895 | populate_dir(self.tmp, CFG_DRIVE_FILES_V2) |
3896 | cfg_ds = ds.DataSourceConfigDrive(settings.CFG_BUILTIN, |
3897 | @@ -123,19 +137,19 @@ |
3898 | 'swap': '/dev/vda3', |
3899 | } |
3900 | for name, dev_name in name_tests.items(): |
3901 | - with unit_helpers.mocker() as my_mock: |
3902 | - find_mock = my_mock.replace(util.find_devs_with, |
3903 | - spec=False, passthrough=False) |
3904 | - find_mock(mocker.ARGS) |
3905 | - my_mock.result([dev_name]) |
3906 | - exists_mock = my_mock.replace(os.path.exists, |
3907 | - spec=False, passthrough=False) |
3908 | - exists_mock(mocker.ARGS) |
3909 | - my_mock.result(True) |
3910 | - my_mock.replay() |
3911 | + with ExitStack() as mocks: |
3912 | + find_mock = mocks.enter_context( |
3913 | + mock.patch.object(util, 'find_devs_with', |
3914 | + return_value=[dev_name])) |
3915 | + exists_mock = mocks.enter_context( |
3916 | + mock.patch.object(os.path, 'exists', |
3917 | + return_value=True)) |
3918 | device = cfg_ds.device_name_to_device(name) |
3919 | self.assertEquals(dev_name, device) |
3920 | |
3921 | + find_mock.assert_called_once_with(mock.ANY) |
3922 | + exists_mock.assert_called_once_with(mock.ANY) |
3923 | + |
3924 | def test_dev_ec2_remap(self): |
3925 | populate_dir(self.tmp, CFG_DRIVE_FILES_V2) |
3926 | cfg_ds = ds.DataSourceConfigDrive(settings.CFG_BUILTIN, |
3927 | @@ -156,16 +170,21 @@ |
3928 | 'root2k': None, |
3929 | } |
3930 | for name, dev_name in name_tests.items(): |
3931 | - with unit_helpers.mocker(verify_calls=False) as my_mock: |
3932 | - exists_mock = my_mock.replace(os.path.exists, |
3933 | - spec=False, passthrough=False) |
3934 | - exists_mock(mocker.ARGS) |
3935 | - my_mock.result(False) |
3936 | - exists_mock(mocker.ARGS) |
3937 | - my_mock.result(True) |
3938 | - my_mock.replay() |
3939 | + # We want os.path.exists() to return False on its first call, |
3940 | + # and True on its second call. We use a handy generator as |
3941 | + # the mock side effect for this. The mocked function returns |
3942 | + # what the side effect returns. |
3943 | + def exists_side_effect(): |
3944 | + yield False |
3945 | + yield True |
3946 | + with mock.patch.object(os.path, 'exists', |
3947 | + side_effect=exists_side_effect()): |
3948 | device = cfg_ds.device_name_to_device(name) |
3949 | self.assertEquals(dev_name, device) |
3950 | + # We don't assert the call count for os.path.exists() because |
3951 | + # not all of the entries in name_tests results in two calls to |
3952 | + # that function. Specifically, 'root2k' doesn't seem to call |
3953 | + # it at all. |
3954 | |
3955 | def test_dev_ec2_map(self): |
3956 | populate_dir(self.tmp, CFG_DRIVE_FILES_V2) |
3957 | @@ -173,12 +192,6 @@ |
3958 | None, |
3959 | helpers.Paths({})) |
3960 | found = ds.read_config_drive(self.tmp) |
3961 | - exists_mock = self.mocker.replace(os.path.exists, |
3962 | - spec=False, passthrough=False) |
3963 | - exists_mock(mocker.ARGS) |
3964 | - self.mocker.count(0, None) |
3965 | - self.mocker.result(True) |
3966 | - self.mocker.replay() |
3967 | ec2_md = found['ec2-metadata'] |
3968 | os_md = found['metadata'] |
3969 | cfg_ds.ec2_metadata = ec2_md |
3970 | @@ -193,8 +206,9 @@ |
3971 | 'root2k': None, |
3972 | } |
3973 | for name, dev_name in name_tests.items(): |
3974 | - device = cfg_ds.device_name_to_device(name) |
3975 | - self.assertEquals(dev_name, device) |
3976 | + with mock.patch.object(os.path, 'exists', return_value=True): |
3977 | + device = cfg_ds.device_name_to_device(name) |
3978 | + self.assertEquals(dev_name, device) |
3979 | |
3980 | def test_dir_valid(self): |
3981 | """Verify a dir is read as such.""" |
3982 | @@ -326,7 +340,7 @@ |
3983 | |
3984 | |
3985 | def populate_dir(seed_dir, files): |
3986 | - for (name, content) in files.iteritems(): |
3987 | + for (name, content) in files.items(): |
3988 | path = os.path.join(seed_dir, name) |
3989 | dirname = os.path.dirname(path) |
3990 | if not os.path.isdir(dirname): |
3991 | |
3992 | === modified file 'tests/unittests/test_datasource/test_digitalocean.py' |
3993 | --- tests/unittests/test_datasource/test_digitalocean.py 2015-01-06 17:02:38 +0000 |
3994 | +++ tests/unittests/test_datasource/test_digitalocean.py 2015-01-27 20:16:43 +0000 |
3995 | @@ -18,8 +18,7 @@ |
3996 | import httpretty |
3997 | import re |
3998 | |
3999 | -from types import ListType |
4000 | -from urlparse import urlparse |
4001 | +from six.moves.urllib_parse import urlparse |
4002 | |
4003 | from cloudinit import settings |
4004 | from cloudinit import helpers |
4005 | @@ -110,7 +109,7 @@ |
4006 | self.assertEqual([DO_META.get('public-keys')], |
4007 | self.ds.get_public_ssh_keys()) |
4008 | |
4009 | - self.assertIs(type(self.ds.get_public_ssh_keys()), ListType) |
4010 | + self.assertIsInstance(self.ds.get_public_ssh_keys(), list) |
4011 | |
4012 | @httpretty.activate |
4013 | def test_multiple_ssh_keys(self): |
4014 | @@ -124,4 +123,4 @@ |
4015 | self.assertEqual(DO_META.get('public-keys').splitlines(), |
4016 | self.ds.get_public_ssh_keys()) |
4017 | |
4018 | - self.assertIs(type(self.ds.get_public_ssh_keys()), ListType) |
4019 | + self.assertIsInstance(self.ds.get_public_ssh_keys(), list) |
4020 | |
4021 | === modified file 'tests/unittests/test_datasource/test_gce.py' |
4022 | --- tests/unittests/test_datasource/test_gce.py 2015-01-14 14:29:57 +0000 |
4023 | +++ tests/unittests/test_datasource/test_gce.py 2015-01-27 20:16:43 +0000 |
4024 | @@ -19,7 +19,7 @@ |
4025 | import re |
4026 | |
4027 | from base64 import b64encode, b64decode |
4028 | -from urlparse import urlparse |
4029 | +from six.moves.urllib_parse import urlparse |
4030 | |
4031 | from cloudinit import settings |
4032 | from cloudinit import helpers |
4033 | @@ -45,7 +45,7 @@ |
4034 | 'instance/id': '12345', |
4035 | 'instance/hostname': 'server.project-baz.local', |
4036 | 'instance/zone': 'baz/bang', |
4037 | - 'instance/attributes/user-data': b64encode('/bin/echo baz\n'), |
4038 | + 'instance/attributes/user-data': b64encode(b'/bin/echo baz\n'), |
4039 | 'instance/attributes/user-data-encoding': 'base64', |
4040 | } |
4041 | |
4042 | |
4043 | === modified file 'tests/unittests/test_datasource/test_maas.py' |
4044 | --- tests/unittests/test_datasource/test_maas.py 2014-07-23 16:50:45 +0000 |
4045 | +++ tests/unittests/test_datasource/test_maas.py 2015-01-27 20:16:43 +0000 |
4046 | @@ -1,19 +1,25 @@ |
4047 | from copy import copy |
4048 | import os |
4049 | +import shutil |
4050 | +import tempfile |
4051 | |
4052 | from cloudinit.sources import DataSourceMAAS |
4053 | from cloudinit import url_helper |
4054 | -from ..helpers import populate_dir |
4055 | - |
4056 | -import mocker |
4057 | - |
4058 | - |
4059 | -class TestMAASDataSource(mocker.MockerTestCase): |
4060 | +from ..helpers import TestCase, populate_dir |
4061 | + |
4062 | +try: |
4063 | + from unittest import mock |
4064 | +except ImportError: |
4065 | + import mock |
4066 | + |
4067 | + |
4068 | +class TestMAASDataSource(TestCase): |
4069 | |
4070 | def setUp(self): |
4071 | super(TestMAASDataSource, self).setUp() |
4072 | # Make a temp directoy for tests to use. |
4073 | - self.tmp = self.makeDir() |
4074 | + self.tmp = tempfile.mkdtemp() |
4075 | + self.addCleanup(shutil.rmtree, self.tmp) |
4076 | |
4077 | def test_seed_dir_valid(self): |
4078 | """Verify a valid seeddir is read as such.""" |
4079 | @@ -93,16 +99,18 @@ |
4080 | |
4081 | def test_seed_url_valid(self): |
4082 | """Verify that valid seed_url is read as such.""" |
4083 | - valid = {'meta-data/instance-id': 'i-instanceid', |
4084 | + valid = { |
4085 | + 'meta-data/instance-id': 'i-instanceid', |
4086 | 'meta-data/local-hostname': 'test-hostname', |
4087 | 'meta-data/public-keys': 'test-hostname', |
4088 | - 'user-data': 'foodata'} |
4089 | + 'user-data': 'foodata', |
4090 | + } |
4091 | valid_order = [ |
4092 | 'meta-data/local-hostname', |
4093 | 'meta-data/instance-id', |
4094 | 'meta-data/public-keys', |
4095 | 'user-data', |
4096 | - ] |
4097 | + ] |
4098 | my_seed = "http://example.com/xmeta" |
4099 | my_ver = "1999-99-99" |
4100 | my_headers = {'header1': 'value1', 'header2': 'value2'} |
4101 | @@ -110,28 +118,38 @@ |
4102 | def my_headers_cb(url): |
4103 | return my_headers |
4104 | |
4105 | - mock_request = self.mocker.replace(url_helper.readurl, |
4106 | - passthrough=False) |
4107 | - |
4108 | - for key in valid_order: |
4109 | - url = "%s/%s/%s" % (my_seed, my_ver, key) |
4110 | - mock_request(url, headers=None, timeout=mocker.ANY, |
4111 | - data=mocker.ANY, sec_between=mocker.ANY, |
4112 | - ssl_details=mocker.ANY, retries=mocker.ANY, |
4113 | - headers_cb=my_headers_cb, |
4114 | - exception_cb=mocker.ANY) |
4115 | - resp = valid.get(key) |
4116 | - self.mocker.result(url_helper.StringResponse(resp)) |
4117 | - self.mocker.replay() |
4118 | - |
4119 | - (userdata, metadata) = DataSourceMAAS.read_maas_seed_url(my_seed, |
4120 | - header_cb=my_headers_cb, version=my_ver) |
4121 | - |
4122 | - self.assertEqual("foodata", userdata) |
4123 | - self.assertEqual(metadata['instance-id'], |
4124 | - valid['meta-data/instance-id']) |
4125 | - self.assertEqual(metadata['local-hostname'], |
4126 | - valid['meta-data/local-hostname']) |
4127 | + # Each time url_helper.readurl() is called, something different is |
4128 | + # returned based on the canned data above. We need to build up a list |
4129 | + # of side effect return values, which the mock will return. At the |
4130 | + # same time, we'll build up a list of expected call arguments for |
4131 | + # asserting after the code under test is run. |
4132 | + calls = [] |
4133 | + |
4134 | + def side_effect(): |
4135 | + for key in valid_order: |
4136 | + resp = valid.get(key) |
4137 | + url = "%s/%s/%s" % (my_seed, my_ver, key) |
4138 | + calls.append( |
4139 | + mock.call(url, headers=None, timeout=mock.ANY, |
4140 | + data=mock.ANY, sec_between=mock.ANY, |
4141 | + ssl_details=mock.ANY, retries=mock.ANY, |
4142 | + headers_cb=my_headers_cb, |
4143 | + exception_cb=mock.ANY)) |
4144 | + yield url_helper.StringResponse(resp) |
4145 | + |
4146 | + # Now do the actual call of the code under test. |
4147 | + with mock.patch.object(url_helper, 'readurl', |
4148 | + side_effect=side_effect()) as mockobj: |
4149 | + userdata, metadata = DataSourceMAAS.read_maas_seed_url( |
4150 | + my_seed, header_cb=my_headers_cb, version=my_ver) |
4151 | + |
4152 | + self.assertEqual("foodata", userdata) |
4153 | + self.assertEqual(metadata['instance-id'], |
4154 | + valid['meta-data/instance-id']) |
4155 | + self.assertEqual(metadata['local-hostname'], |
4156 | + valid['meta-data/local-hostname']) |
4157 | + |
4158 | + mockobj.has_calls(calls) |
4159 | |
4160 | def test_seed_url_invalid(self): |
4161 | """Verify that invalid seed_url raises MAASSeedDirMalformed.""" |
4162 | |
4163 | === modified file 'tests/unittests/test_datasource/test_nocloud.py' |
4164 | --- tests/unittests/test_datasource/test_nocloud.py 2014-09-10 18:32:37 +0000 |
4165 | +++ tests/unittests/test_datasource/test_nocloud.py 2015-01-27 20:16:43 +0000 |
4166 | @@ -1,35 +1,39 @@ |
4167 | from cloudinit import helpers |
4168 | from cloudinit.sources import DataSourceNoCloud |
4169 | from cloudinit import util |
4170 | -from ..helpers import populate_dir |
4171 | +from ..helpers import TestCase, populate_dir |
4172 | |
4173 | -from mocker import MockerTestCase |
4174 | import os |
4175 | import yaml |
4176 | - |
4177 | - |
4178 | -class TestNoCloudDataSource(MockerTestCase): |
4179 | +import shutil |
4180 | +import tempfile |
4181 | +import unittest |
4182 | + |
4183 | +try: |
4184 | + from unittest import mock |
4185 | +except ImportError: |
4186 | + import mock |
4187 | +try: |
4188 | + from contextlib import ExitStack |
4189 | +except ImportError: |
4190 | + from contextlib2 import ExitStack |
4191 | + |
4192 | + |
4193 | +class TestNoCloudDataSource(TestCase): |
4194 | |
4195 | def setUp(self): |
4196 | - self.tmp = self.makeDir() |
4197 | + super(TestNoCloudDataSource, self).setUp() |
4198 | + self.tmp = tempfile.mkdtemp() |
4199 | + self.addCleanup(shutil.rmtree, self.tmp) |
4200 | self.paths = helpers.Paths({'cloud_dir': self.tmp}) |
4201 | |
4202 | self.cmdline = "root=TESTCMDLINE" |
4203 | |
4204 | - self.unapply = [] |
4205 | - self.apply_patches([(util, 'get_cmdline', self._getcmdline)]) |
4206 | - super(TestNoCloudDataSource, self).setUp() |
4207 | - |
4208 | - def tearDown(self): |
4209 | - apply_patches([i for i in reversed(self.unapply)]) |
4210 | - super(TestNoCloudDataSource, self).tearDown() |
4211 | - |
4212 | - def apply_patches(self, patches): |
4213 | - ret = apply_patches(patches) |
4214 | - self.unapply += ret |
4215 | - |
4216 | - def _getcmdline(self): |
4217 | - return self.cmdline |
4218 | + self.mocks = ExitStack() |
4219 | + self.addCleanup(self.mocks.close) |
4220 | + |
4221 | + self.mocks.enter_context( |
4222 | + mock.patch.object(util, 'get_cmdline', return_value=self.cmdline)) |
4223 | |
4224 | def test_nocloud_seed_dir(self): |
4225 | md = {'instance-id': 'IID', 'dsmode': 'local'} |
4226 | @@ -59,7 +63,9 @@ |
4227 | def my_find_devs_with(*args, **kwargs): |
4228 | raise PsuedoException |
4229 | |
4230 | - self.apply_patches([(util, 'find_devs_with', my_find_devs_with)]) |
4231 | + self.mocks.enter_context( |
4232 | + mock.patch.object(util, 'find_devs_with', |
4233 | + side_effect=PsuedoException)) |
4234 | |
4235 | # by default, NoCloud should search for filesystems by label |
4236 | sys_cfg = {'datasource': {'NoCloud': {}}} |
4237 | @@ -85,7 +91,7 @@ |
4238 | |
4239 | data = { |
4240 | 'fs_label': None, |
4241 | - 'meta-data': {'instance-id': 'IID'}, |
4242 | + 'meta-data': yaml.safe_dump({'instance-id': 'IID'}), |
4243 | 'user-data': "USER_DATA_RAW", |
4244 | } |
4245 | |
4246 | @@ -133,7 +139,7 @@ |
4247 | self.assertTrue(ret) |
4248 | |
4249 | |
4250 | -class TestParseCommandLineData(MockerTestCase): |
4251 | +class TestParseCommandLineData(unittest.TestCase): |
4252 | |
4253 | def test_parse_cmdline_data_valid(self): |
4254 | ds_id = "ds=nocloud" |
4255 | @@ -178,15 +184,4 @@ |
4256 | self.assertFalse(ret) |
4257 | |
4258 | |
4259 | -def apply_patches(patches): |
4260 | - ret = [] |
4261 | - for (ref, name, replace) in patches: |
4262 | - if replace is None: |
4263 | - continue |
4264 | - orig = getattr(ref, name) |
4265 | - setattr(ref, name, replace) |
4266 | - ret.append((ref, name, orig)) |
4267 | - return ret |
4268 | - |
4269 | - |
4270 | # vi: ts=4 expandtab |
4271 | |
4272 | === modified file 'tests/unittests/test_datasource/test_opennebula.py' |
4273 | --- tests/unittests/test_datasource/test_opennebula.py 2014-07-23 16:50:45 +0000 |
4274 | +++ tests/unittests/test_datasource/test_opennebula.py 2015-01-27 20:16:43 +0000 |
4275 | @@ -1,12 +1,14 @@ |
4276 | from cloudinit import helpers |
4277 | from cloudinit.sources import DataSourceOpenNebula as ds |
4278 | from cloudinit import util |
4279 | -from mocker import MockerTestCase |
4280 | -from ..helpers import populate_dir |
4281 | +from ..helpers import TestCase, populate_dir |
4282 | |
4283 | -from base64 import b64encode |
4284 | import os |
4285 | import pwd |
4286 | +import shutil |
4287 | +import tempfile |
4288 | +import unittest |
4289 | + |
4290 | |
4291 | TEST_VARS = { |
4292 | 'VAR1': 'single', |
4293 | @@ -37,12 +39,13 @@ |
4294 | ''' |
4295 | |
4296 | |
4297 | -class TestOpenNebulaDataSource(MockerTestCase): |
4298 | +class TestOpenNebulaDataSource(TestCase): |
4299 | parsed_user = None |
4300 | |
4301 | def setUp(self): |
4302 | super(TestOpenNebulaDataSource, self).setUp() |
4303 | - self.tmp = self.makeDir() |
4304 | + self.tmp = tempfile.mkdtemp() |
4305 | + self.addCleanup(shutil.rmtree, self.tmp) |
4306 | self.paths = helpers.Paths({'cloud_dir': self.tmp}) |
4307 | |
4308 | # defaults for few tests |
4309 | @@ -176,7 +179,7 @@ |
4310 | self.assertEqual(USER_DATA, results['userdata']) |
4311 | |
4312 | def test_user_data_encoding_required_for_decode(self): |
4313 | - b64userdata = b64encode(USER_DATA) |
4314 | + b64userdata = util.b64e(USER_DATA) |
4315 | for k in ('USER_DATA', 'USERDATA'): |
4316 | my_d = os.path.join(self.tmp, k) |
4317 | populate_context_dir(my_d, {k: b64userdata}) |
4318 | @@ -188,7 +191,7 @@ |
4319 | def test_user_data_base64_encoding(self): |
4320 | for k in ('USER_DATA', 'USERDATA'): |
4321 | my_d = os.path.join(self.tmp, k) |
4322 | - populate_context_dir(my_d, {k: b64encode(USER_DATA), |
4323 | + populate_context_dir(my_d, {k: util.b64e(USER_DATA), |
4324 | 'USERDATA_ENCODING': 'base64'}) |
4325 | results = ds.read_context_disk_dir(my_d) |
4326 | |
4327 | @@ -228,7 +231,7 @@ |
4328 | util.find_devs_with = orig_find_devs_with |
4329 | |
4330 | |
4331 | -class TestOpenNebulaNetwork(MockerTestCase): |
4332 | +class TestOpenNebulaNetwork(unittest.TestCase): |
4333 | |
4334 | def setUp(self): |
4335 | super(TestOpenNebulaNetwork, self).setUp() |
4336 | @@ -280,7 +283,7 @@ |
4337 | ''') |
4338 | |
4339 | |
4340 | -class TestParseShellConfig(MockerTestCase): |
4341 | +class TestParseShellConfig(unittest.TestCase): |
4342 | def test_no_seconds(self): |
4343 | cfg = '\n'.join(["foo=bar", "SECONDS=2", "xx=foo"]) |
4344 | # we could test 'sleep 2', but that would make the test run slower. |
4345 | @@ -290,7 +293,7 @@ |
4346 | |
4347 | def populate_context_dir(path, variables): |
4348 | data = "# Context variables generated by OpenNebula\n" |
4349 | - for (k, v) in variables.iteritems(): |
4350 | + for k, v in variables.items(): |
4351 | data += ("%s='%s'\n" % (k.upper(), v.replace(r"'", r"'\''"))) |
4352 | populate_dir(path, {'context.sh': data}) |
4353 | |
4354 | |
4355 | === modified file 'tests/unittests/test_datasource/test_openstack.py' |
4356 | --- tests/unittests/test_datasource/test_openstack.py 2014-10-20 18:29:54 +0000 |
4357 | +++ tests/unittests/test_datasource/test_openstack.py 2015-01-27 20:16:43 +0000 |
4358 | @@ -20,12 +20,11 @@ |
4359 | import json |
4360 | import re |
4361 | |
4362 | -from StringIO import StringIO |
4363 | - |
4364 | -from urlparse import urlparse |
4365 | - |
4366 | from .. import helpers as test_helpers |
4367 | |
4368 | +from six import StringIO |
4369 | +from six.moves.urllib.parse import urlparse |
4370 | + |
4371 | from cloudinit import helpers |
4372 | from cloudinit import settings |
4373 | from cloudinit.sources import DataSourceOpenStack as ds |
4374 | |
4375 | === modified file 'tests/unittests/test_datasource/test_smartos.py' |
4376 | --- tests/unittests/test_datasource/test_smartos.py 2015-01-06 17:02:38 +0000 |
4377 | +++ tests/unittests/test_datasource/test_smartos.py 2015-01-27 20:16:43 +0000 |
4378 | @@ -22,16 +22,21 @@ |
4379 | # return responses. |
4380 | # |
4381 | |
4382 | -import base64 |
4383 | +from __future__ import print_function |
4384 | + |
4385 | from cloudinit import helpers as c_helpers |
4386 | from cloudinit.sources import DataSourceSmartOS |
4387 | +from cloudinit.util import b64e |
4388 | from .. import helpers |
4389 | import os |
4390 | import os.path |
4391 | import re |
4392 | +import shutil |
4393 | +import tempfile |
4394 | import stat |
4395 | import uuid |
4396 | |
4397 | + |
4398 | MOCK_RETURNS = { |
4399 | 'hostname': 'test-host', |
4400 | 'root_authorized_keys': 'ssh-rsa AAAAB3Nz...aC1yc2E= keyname', |
4401 | @@ -107,11 +112,12 @@ |
4402 | |
4403 | class TestSmartOSDataSource(helpers.FilesystemMockingTestCase): |
4404 | def setUp(self): |
4405 | - helpers.FilesystemMockingTestCase.setUp(self) |
4406 | + super(TestSmartOSDataSource, self).setUp() |
4407 | |
4408 | - # makeDir comes from MockerTestCase |
4409 | - self.tmp = self.makeDir() |
4410 | - self.legacy_user_d = self.makeDir() |
4411 | + self.tmp = tempfile.mkdtemp() |
4412 | + self.addCleanup(shutil.rmtree, self.tmp) |
4413 | + self.legacy_user_d = tempfile.mkdtemp() |
4414 | + self.addCleanup(shutil.rmtree, self.legacy_user_d) |
4415 | |
4416 | # If you should want to watch the logs... |
4417 | self._log = None |
4418 | @@ -227,7 +233,7 @@ |
4419 | my_returns = MOCK_RETURNS.copy() |
4420 | my_returns['base64_all'] = "true" |
4421 | for k in ('hostname', 'cloud-init:user-data'): |
4422 | - my_returns[k] = base64.b64encode(my_returns[k]) |
4423 | + my_returns[k] = b64e(my_returns[k]) |
4424 | |
4425 | dsrc = self._get_ds(mockdata=my_returns) |
4426 | ret = dsrc.get_data() |
4427 | @@ -248,7 +254,7 @@ |
4428 | my_returns['b64-cloud-init:user-data'] = "true" |
4429 | my_returns['b64-hostname'] = "true" |
4430 | for k in ('hostname', 'cloud-init:user-data'): |
4431 | - my_returns[k] = base64.b64encode(my_returns[k]) |
4432 | + my_returns[k] = b64e(my_returns[k]) |
4433 | |
4434 | dsrc = self._get_ds(mockdata=my_returns) |
4435 | ret = dsrc.get_data() |
4436 | @@ -264,7 +270,7 @@ |
4437 | my_returns = MOCK_RETURNS.copy() |
4438 | my_returns['base64_keys'] = 'hostname,ignored' |
4439 | for k in ('hostname',): |
4440 | - my_returns[k] = base64.b64encode(my_returns[k]) |
4441 | + my_returns[k] = b64e(my_returns[k]) |
4442 | |
4443 | dsrc = self._get_ds(mockdata=my_returns) |
4444 | ret = dsrc.get_data() |
4445 | @@ -365,7 +371,7 @@ |
4446 | permissions = oct(os.stat(name_f)[stat.ST_MODE])[-3:] |
4447 | if re.match(r'.*\/mdata-user-data$', name_f): |
4448 | found_new = True |
4449 | - print name_f |
4450 | + print(name_f) |
4451 | self.assertEquals(permissions, '400') |
4452 | |
4453 | self.assertFalse(found_new) |
4454 | |
4455 | === modified file 'tests/unittests/test_distros/test_generic.py' |
4456 | --- tests/unittests/test_distros/test_generic.py 2014-08-26 19:53:41 +0000 |
4457 | +++ tests/unittests/test_distros/test_generic.py 2015-01-27 20:16:43 +0000 |
4458 | @@ -4,6 +4,8 @@ |
4459 | from .. import helpers |
4460 | |
4461 | import os |
4462 | +import shutil |
4463 | +import tempfile |
4464 | |
4465 | unknown_arch_info = { |
4466 | 'arches': ['default'], |
4467 | @@ -53,7 +55,8 @@ |
4468 | def setUp(self): |
4469 | super(TestGenericDistro, self).setUp() |
4470 | # Make a temp directoy for tests to use. |
4471 | - self.tmp = self.makeDir() |
4472 | + self.tmp = tempfile.mkdtemp() |
4473 | + self.addCleanup(shutil.rmtree, self.tmp) |
4474 | |
4475 | def _write_load_sudoers(self, _user, rules): |
4476 | cls = distros.fetch("ubuntu") |
4477 | @@ -64,7 +67,6 @@ |
4478 | self.patchUtils(self.tmp) |
4479 | d.write_sudo_rules("harlowja", rules) |
4480 | contents = util.load_file(d.ci_sudoers_fn) |
4481 | - self.restore() |
4482 | return contents |
4483 | |
4484 | def _count_in(self, lines_look_for, text_content): |
4485 | |
4486 | === modified file 'tests/unittests/test_distros/test_hostname.py' |
4487 | --- tests/unittests/test_distros/test_hostname.py 2012-10-11 19:49:45 +0000 |
4488 | +++ tests/unittests/test_distros/test_hostname.py 2015-01-27 20:16:43 +0000 |
4489 | @@ -1,4 +1,4 @@ |
4490 | -from mocker import MockerTestCase |
4491 | +import unittest |
4492 | |
4493 | from cloudinit.distros.parsers import hostname |
4494 | |
4495 | @@ -12,7 +12,7 @@ |
4496 | BASE_HOSTNAME = BASE_HOSTNAME.strip() |
4497 | |
4498 | |
4499 | -class TestHostnameHelper(MockerTestCase): |
4500 | +class TestHostnameHelper(unittest.TestCase): |
4501 | def test_parse_same(self): |
4502 | hn = hostname.HostnameConf(BASE_HOSTNAME) |
4503 | self.assertEquals(str(hn).strip(), BASE_HOSTNAME) |
4504 | |
4505 | === modified file 'tests/unittests/test_distros/test_hosts.py' |
4506 | --- tests/unittests/test_distros/test_hosts.py 2012-10-10 23:21:22 +0000 |
4507 | +++ tests/unittests/test_distros/test_hosts.py 2015-01-27 20:16:43 +0000 |
4508 | @@ -1,4 +1,4 @@ |
4509 | -from mocker import MockerTestCase |
4510 | +import unittest |
4511 | |
4512 | from cloudinit.distros.parsers import hosts |
4513 | |
4514 | @@ -14,7 +14,7 @@ |
4515 | BASE_ETC = BASE_ETC.strip() |
4516 | |
4517 | |
4518 | -class TestHostsHelper(MockerTestCase): |
4519 | +class TestHostsHelper(unittest.TestCase): |
4520 | def test_parse(self): |
4521 | eh = hosts.HostsConf(BASE_ETC) |
4522 | self.assertEquals(eh.get_entry('127.0.0.1'), [['localhost']]) |
4523 | |
4524 | === modified file 'tests/unittests/test_distros/test_netconfig.py' |
4525 | --- tests/unittests/test_distros/test_netconfig.py 2015-01-06 17:02:38 +0000 |
4526 | +++ tests/unittests/test_distros/test_netconfig.py 2015-01-27 20:16:43 +0000 |
4527 | @@ -1,9 +1,17 @@ |
4528 | -from mocker import MockerTestCase |
4529 | - |
4530 | -import mocker |
4531 | - |
4532 | import os |
4533 | |
4534 | +try: |
4535 | + from unittest import mock |
4536 | +except ImportError: |
4537 | + import mock |
4538 | +try: |
4539 | + from contextlib import ExitStack |
4540 | +except ImportError: |
4541 | + from contextlib2 import ExitStack |
4542 | + |
4543 | +from six import StringIO |
4544 | +from ..helpers import TestCase |
4545 | + |
4546 | from cloudinit import distros |
4547 | from cloudinit import helpers |
4548 | from cloudinit import settings |
4549 | @@ -11,8 +19,6 @@ |
4550 | |
4551 | from cloudinit.distros.parsers.sys_conf import SysConf |
4552 | |
4553 | -from StringIO import StringIO |
4554 | - |
4555 | |
4556 | BASE_NET_CFG = ''' |
4557 | auto lo |
4558 | @@ -74,7 +80,7 @@ |
4559 | return self.buffer.getvalue() |
4560 | |
4561 | |
4562 | -class TestNetCfgDistro(MockerTestCase): |
4563 | +class TestNetCfgDistro(TestCase): |
4564 | |
4565 | def _get_distro(self, dname): |
4566 | cls = distros.fetch(dname) |
4567 | @@ -85,34 +91,28 @@ |
4568 | |
4569 | def test_simple_write_ub(self): |
4570 | ub_distro = self._get_distro('ubuntu') |
4571 | - util_mock = self.mocker.replace(util.write_file, |
4572 | - spec=False, passthrough=False) |
4573 | - exists_mock = self.mocker.replace(os.path.isfile, |
4574 | - spec=False, passthrough=False) |
4575 | - |
4576 | - exists_mock(mocker.ARGS) |
4577 | - self.mocker.count(0, None) |
4578 | - self.mocker.result(False) |
4579 | - |
4580 | - write_bufs = {} |
4581 | - |
4582 | - def replace_write(filename, content, mode=0644, omode="wb"): |
4583 | - buf = WriteBuffer() |
4584 | - buf.mode = mode |
4585 | - buf.omode = omode |
4586 | - buf.write(content) |
4587 | - write_bufs[filename] = buf |
4588 | - |
4589 | - util_mock(mocker.ARGS) |
4590 | - self.mocker.call(replace_write) |
4591 | - self.mocker.replay() |
4592 | - ub_distro.apply_network(BASE_NET_CFG, False) |
4593 | - |
4594 | - self.assertEquals(len(write_bufs), 1) |
4595 | - self.assertIn('/etc/network/interfaces', write_bufs) |
4596 | - write_buf = write_bufs['/etc/network/interfaces'] |
4597 | - self.assertEquals(str(write_buf).strip(), BASE_NET_CFG.strip()) |
4598 | - self.assertEquals(write_buf.mode, 0644) |
4599 | + with ExitStack() as mocks: |
4600 | + write_bufs = {} |
4601 | + |
4602 | + def replace_write(filename, content, mode=0o644, omode="wb"): |
4603 | + buf = WriteBuffer() |
4604 | + buf.mode = mode |
4605 | + buf.omode = omode |
4606 | + buf.write(content) |
4607 | + write_bufs[filename] = buf |
4608 | + |
4609 | + mocks.enter_context( |
4610 | + mock.patch.object(util, 'write_file', replace_write)) |
4611 | + mocks.enter_context( |
4612 | + mock.patch.object(os.path, 'isfile', return_value=False)) |
4613 | + |
4614 | + ub_distro.apply_network(BASE_NET_CFG, False) |
4615 | + |
4616 | + self.assertEquals(len(write_bufs), 1) |
4617 | + self.assertIn('/etc/network/interfaces', write_bufs) |
4618 | + write_buf = write_bufs['/etc/network/interfaces'] |
4619 | + self.assertEquals(str(write_buf).strip(), BASE_NET_CFG.strip()) |
4620 | + self.assertEquals(write_buf.mode, 0o644) |
4621 | |
4622 | def assertCfgEquals(self, blob1, blob2): |
4623 | b1 = dict(SysConf(blob1.strip().splitlines())) |
4624 | @@ -127,53 +127,41 @@ |
4625 | |
4626 | def test_simple_write_rh(self): |
4627 | rh_distro = self._get_distro('rhel') |
4628 | - write_mock = self.mocker.replace(util.write_file, |
4629 | - spec=False, passthrough=False) |
4630 | - load_mock = self.mocker.replace(util.load_file, |
4631 | - spec=False, passthrough=False) |
4632 | - exists_mock = self.mocker.replace(os.path.isfile, |
4633 | - spec=False, passthrough=False) |
4634 | |
4635 | write_bufs = {} |
4636 | |
4637 | - def replace_write(filename, content, mode=0644, omode="wb"): |
4638 | + def replace_write(filename, content, mode=0o644, omode="wb"): |
4639 | buf = WriteBuffer() |
4640 | buf.mode = mode |
4641 | buf.omode = omode |
4642 | buf.write(content) |
4643 | write_bufs[filename] = buf |
4644 | |
4645 | - exists_mock(mocker.ARGS) |
4646 | - self.mocker.count(0, None) |
4647 | - self.mocker.result(False) |
4648 | - |
4649 | - load_mock(mocker.ARGS) |
4650 | - self.mocker.count(0, None) |
4651 | - self.mocker.result('') |
4652 | - |
4653 | - for _i in range(0, 3): |
4654 | - write_mock(mocker.ARGS) |
4655 | - self.mocker.call(replace_write) |
4656 | - |
4657 | - write_mock(mocker.ARGS) |
4658 | - self.mocker.call(replace_write) |
4659 | - |
4660 | - self.mocker.replay() |
4661 | - rh_distro.apply_network(BASE_NET_CFG, False) |
4662 | - |
4663 | - self.assertEquals(len(write_bufs), 4) |
4664 | - self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo', write_bufs) |
4665 | - write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo'] |
4666 | - expected_buf = ''' |
4667 | + with ExitStack() as mocks: |
4668 | + mocks.enter_context( |
4669 | + mock.patch.object(util, 'write_file', replace_write)) |
4670 | + mocks.enter_context( |
4671 | + mock.patch.object(util, 'load_file', return_value='')) |
4672 | + mocks.enter_context( |
4673 | + mock.patch.object(os.path, 'isfile', return_value=False)) |
4674 | + |
4675 | + rh_distro.apply_network(BASE_NET_CFG, False) |
4676 | + |
4677 | + self.assertEquals(len(write_bufs), 4) |
4678 | + self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo', |
4679 | + write_bufs) |
4680 | + write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo'] |
4681 | + expected_buf = ''' |
4682 | DEVICE="lo" |
4683 | ONBOOT=yes |
4684 | ''' |
4685 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4686 | - self.assertEquals(write_buf.mode, 0644) |
4687 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4688 | + self.assertEquals(write_buf.mode, 0o644) |
4689 | |
4690 | - self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0', write_bufs) |
4691 | - write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0'] |
4692 | - expected_buf = ''' |
4693 | + self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0', |
4694 | + write_bufs) |
4695 | + write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0'] |
4696 | + expected_buf = ''' |
4697 | DEVICE="eth0" |
4698 | BOOTPROTO="static" |
4699 | NETMASK="255.255.255.0" |
4700 | @@ -182,77 +170,66 @@ |
4701 | GATEWAY="192.168.1.254" |
4702 | BROADCAST="192.168.1.0" |
4703 | ''' |
4704 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4705 | - self.assertEquals(write_buf.mode, 0644) |
4706 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4707 | + self.assertEquals(write_buf.mode, 0o644) |
4708 | |
4709 | - self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1', write_bufs) |
4710 | - write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1'] |
4711 | - expected_buf = ''' |
4712 | + self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1', |
4713 | + write_bufs) |
4714 | + write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1'] |
4715 | + expected_buf = ''' |
4716 | DEVICE="eth1" |
4717 | BOOTPROTO="dhcp" |
4718 | ONBOOT=yes |
4719 | ''' |
4720 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4721 | - self.assertEquals(write_buf.mode, 0644) |
4722 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4723 | + self.assertEquals(write_buf.mode, 0o644) |
4724 | |
4725 | - self.assertIn('/etc/sysconfig/network', write_bufs) |
4726 | - write_buf = write_bufs['/etc/sysconfig/network'] |
4727 | - expected_buf = ''' |
4728 | + self.assertIn('/etc/sysconfig/network', write_bufs) |
4729 | + write_buf = write_bufs['/etc/sysconfig/network'] |
4730 | + expected_buf = ''' |
4731 | # Created by cloud-init v. 0.7 |
4732 | NETWORKING=yes |
4733 | ''' |
4734 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4735 | - self.assertEquals(write_buf.mode, 0644) |
4736 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4737 | + self.assertEquals(write_buf.mode, 0o644) |
4738 | |
4739 | def test_write_ipv6_rhel(self): |
4740 | rh_distro = self._get_distro('rhel') |
4741 | - write_mock = self.mocker.replace(util.write_file, |
4742 | - spec=False, passthrough=False) |
4743 | - load_mock = self.mocker.replace(util.load_file, |
4744 | - spec=False, passthrough=False) |
4745 | - exists_mock = self.mocker.replace(os.path.isfile, |
4746 | - spec=False, passthrough=False) |
4747 | |
4748 | write_bufs = {} |
4749 | |
4750 | - def replace_write(filename, content, mode=0644, omode="wb"): |
4751 | + def replace_write(filename, content, mode=0o644, omode="wb"): |
4752 | buf = WriteBuffer() |
4753 | buf.mode = mode |
4754 | buf.omode = omode |
4755 | buf.write(content) |
4756 | write_bufs[filename] = buf |
4757 | |
4758 | - exists_mock(mocker.ARGS) |
4759 | - self.mocker.count(0, None) |
4760 | - self.mocker.result(False) |
4761 | - |
4762 | - load_mock(mocker.ARGS) |
4763 | - self.mocker.count(0, None) |
4764 | - self.mocker.result('') |
4765 | - |
4766 | - for _i in range(0, 3): |
4767 | - write_mock(mocker.ARGS) |
4768 | - self.mocker.call(replace_write) |
4769 | - |
4770 | - write_mock(mocker.ARGS) |
4771 | - self.mocker.call(replace_write) |
4772 | - |
4773 | - self.mocker.replay() |
4774 | - rh_distro.apply_network(BASE_NET_CFG_IPV6, False) |
4775 | - |
4776 | - self.assertEquals(len(write_bufs), 4) |
4777 | - self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo', write_bufs) |
4778 | - write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo'] |
4779 | - expected_buf = ''' |
4780 | + with ExitStack() as mocks: |
4781 | + mocks.enter_context( |
4782 | + mock.patch.object(util, 'write_file', replace_write)) |
4783 | + mocks.enter_context( |
4784 | + mock.patch.object(util, 'load_file', return_value='')) |
4785 | + mocks.enter_context( |
4786 | + mock.patch.object(os.path, 'isfile', return_value=False)) |
4787 | + |
4788 | + rh_distro.apply_network(BASE_NET_CFG_IPV6, False) |
4789 | + |
4790 | + self.assertEquals(len(write_bufs), 4) |
4791 | + self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo', |
4792 | + write_bufs) |
4793 | + write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo'] |
4794 | + expected_buf = ''' |
4795 | DEVICE="lo" |
4796 | ONBOOT=yes |
4797 | ''' |
4798 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4799 | - self.assertEquals(write_buf.mode, 0644) |
4800 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4801 | + self.assertEquals(write_buf.mode, 0o644) |
4802 | |
4803 | - self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0', write_bufs) |
4804 | - write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0'] |
4805 | - expected_buf = ''' |
4806 | + self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0', |
4807 | + write_bufs) |
4808 | + write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0'] |
4809 | + expected_buf = ''' |
4810 | DEVICE="eth0" |
4811 | BOOTPROTO="static" |
4812 | NETMASK="255.255.255.0" |
4813 | @@ -264,11 +241,12 @@ |
4814 | IPV6ADDR="2607:f0d0:1002:0011::2" |
4815 | IPV6_DEFAULTGW="2607:f0d0:1002:0011::1" |
4816 | ''' |
4817 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4818 | - self.assertEquals(write_buf.mode, 0644) |
4819 | - self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1', write_bufs) |
4820 | - write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1'] |
4821 | - expected_buf = ''' |
4822 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4823 | + self.assertEquals(write_buf.mode, 0o644) |
4824 | + self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1', |
4825 | + write_bufs) |
4826 | + write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1'] |
4827 | + expected_buf = ''' |
4828 | DEVICE="eth1" |
4829 | BOOTPROTO="static" |
4830 | NETMASK="255.255.255.0" |
4831 | @@ -280,38 +258,22 @@ |
4832 | IPV6ADDR="2607:f0d0:1002:0011::3" |
4833 | IPV6_DEFAULTGW="2607:f0d0:1002:0011::1" |
4834 | ''' |
4835 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4836 | - self.assertEquals(write_buf.mode, 0644) |
4837 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4838 | + self.assertEquals(write_buf.mode, 0o644) |
4839 | |
4840 | - self.assertIn('/etc/sysconfig/network', write_bufs) |
4841 | - write_buf = write_bufs['/etc/sysconfig/network'] |
4842 | - expected_buf = ''' |
4843 | + self.assertIn('/etc/sysconfig/network', write_bufs) |
4844 | + write_buf = write_bufs['/etc/sysconfig/network'] |
4845 | + expected_buf = ''' |
4846 | # Created by cloud-init v. 0.7 |
4847 | NETWORKING=yes |
4848 | NETWORKING_IPV6=yes |
4849 | IPV6_AUTOCONF=no |
4850 | ''' |
4851 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4852 | - self.assertEquals(write_buf.mode, 0644) |
4853 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4854 | + self.assertEquals(write_buf.mode, 0o644) |
4855 | |
4856 | def test_simple_write_freebsd(self): |
4857 | fbsd_distro = self._get_distro('freebsd') |
4858 | - util_mock = self.mocker.replace(util.write_file, |
4859 | - spec=False, passthrough=False) |
4860 | - exists_mock = self.mocker.replace(os.path.isfile, |
4861 | - spec=False, passthrough=False) |
4862 | - load_mock = self.mocker.replace(util.load_file, |
4863 | - spec=False, passthrough=False) |
4864 | - subp_mock = self.mocker.replace(util.subp, |
4865 | - spec=False, passthrough=False) |
4866 | - |
4867 | - subp_mock(['ifconfig', '-a']) |
4868 | - self.mocker.count(0, None) |
4869 | - self.mocker.result(('vtnet0', '')) |
4870 | - |
4871 | - exists_mock(mocker.ARGS) |
4872 | - self.mocker.count(0, None) |
4873 | - self.mocker.result(False) |
4874 | |
4875 | write_bufs = {} |
4876 | read_bufs = { |
4877 | @@ -319,7 +281,7 @@ |
4878 | '/etc/resolv.conf': '', |
4879 | } |
4880 | |
4881 | - def replace_write(filename, content, mode=0644, omode="wb"): |
4882 | + def replace_write(filename, content, mode=0o644, omode="wb"): |
4883 | buf = WriteBuffer() |
4884 | buf.mode = mode |
4885 | buf.omode = omode |
4886 | @@ -336,23 +298,24 @@ |
4887 | return str(write_bufs[fname]) |
4888 | return read_bufs[fname] |
4889 | |
4890 | - util_mock(mocker.ARGS) |
4891 | - self.mocker.call(replace_write) |
4892 | - self.mocker.count(0, None) |
4893 | - |
4894 | - load_mock(mocker.ARGS) |
4895 | - self.mocker.call(replace_read) |
4896 | - self.mocker.count(0, None) |
4897 | - |
4898 | - self.mocker.replay() |
4899 | - fbsd_distro.apply_network(BASE_NET_CFG, False) |
4900 | - |
4901 | - self.assertIn('/etc/rc.conf', write_bufs) |
4902 | - write_buf = write_bufs['/etc/rc.conf'] |
4903 | - expected_buf = ''' |
4904 | + with ExitStack() as mocks: |
4905 | + mocks.enter_context( |
4906 | + mock.patch.object(util, 'subp', return_value=('vtnet0', ''))) |
4907 | + mocks.enter_context( |
4908 | + mock.patch.object(os.path, 'exists', return_value=False)) |
4909 | + mocks.enter_context( |
4910 | + mock.patch.object(util, 'write_file', replace_write)) |
4911 | + mocks.enter_context( |
4912 | + mock.patch.object(util, 'load_file', replace_read)) |
4913 | + |
4914 | + fbsd_distro.apply_network(BASE_NET_CFG, False) |
4915 | + |
4916 | + self.assertIn('/etc/rc.conf', write_bufs) |
4917 | + write_buf = write_bufs['/etc/rc.conf'] |
4918 | + expected_buf = ''' |
4919 | ifconfig_vtnet0="192.168.1.5 netmask 255.255.255.0" |
4920 | ifconfig_vtnet1="DHCP" |
4921 | defaultrouter="192.168.1.254" |
4922 | ''' |
4923 | - self.assertCfgEquals(expected_buf, str(write_buf)) |
4924 | - self.assertEquals(write_buf.mode, 0644) |
4925 | + self.assertCfgEquals(expected_buf, str(write_buf)) |
4926 | + self.assertEquals(write_buf.mode, 0o644) |
4927 | |
4928 | === modified file 'tests/unittests/test_distros/test_resolv.py' |
4929 | --- tests/unittests/test_distros/test_resolv.py 2012-11-13 13:48:19 +0000 |
4930 | +++ tests/unittests/test_distros/test_resolv.py 2015-01-27 20:16:43 +0000 |
4931 | @@ -1,8 +1,7 @@ |
4932 | -from mocker import MockerTestCase |
4933 | - |
4934 | from cloudinit.distros.parsers import resolv_conf |
4935 | |
4936 | import re |
4937 | +from ..helpers import TestCase |
4938 | |
4939 | |
4940 | BASE_RESOLVE = ''' |
4941 | @@ -14,7 +13,7 @@ |
4942 | BASE_RESOLVE = BASE_RESOLVE.strip() |
4943 | |
4944 | |
4945 | -class TestResolvHelper(MockerTestCase): |
4946 | +class TestResolvHelper(TestCase): |
4947 | def test_parse_same(self): |
4948 | rp = resolv_conf.ResolvConf(BASE_RESOLVE) |
4949 | rp_r = str(rp).strip() |
4950 | |
4951 | === modified file 'tests/unittests/test_distros/test_sysconfig.py' |
4952 | --- tests/unittests/test_distros/test_sysconfig.py 2012-11-13 06:14:31 +0000 |
4953 | +++ tests/unittests/test_distros/test_sysconfig.py 2015-01-27 20:16:43 +0000 |
4954 | @@ -1,14 +1,13 @@ |
4955 | -from mocker import MockerTestCase |
4956 | - |
4957 | import re |
4958 | |
4959 | from cloudinit.distros.parsers.sys_conf import SysConf |
4960 | +from ..helpers import TestCase |
4961 | |
4962 | |
4963 | # Lots of good examples @ |
4964 | # http://content.hccfl.edu/pollock/AUnix1/SysconfigFilesDesc.txt |
4965 | |
4966 | -class TestSysConfHelper(MockerTestCase): |
4967 | +class TestSysConfHelper(TestCase): |
4968 | # This function was added in 2.7, make it work for 2.6 |
4969 | def assertRegMatches(self, text, regexp): |
4970 | regexp = re.compile(regexp) |
4971 | |
4972 | === modified file 'tests/unittests/test_distros/test_user_data_normalize.py' |
4973 | --- tests/unittests/test_distros/test_user_data_normalize.py 2013-01-18 18:57:20 +0000 |
4974 | +++ tests/unittests/test_distros/test_user_data_normalize.py 2015-01-27 20:16:43 +0000 |
4975 | @@ -1,9 +1,10 @@ |
4976 | -from mocker import MockerTestCase |
4977 | - |
4978 | from cloudinit import distros |
4979 | from cloudinit import helpers |
4980 | from cloudinit import settings |
4981 | |
4982 | +from ..helpers import TestCase |
4983 | + |
4984 | + |
4985 | bcfg = { |
4986 | 'name': 'bob', |
4987 | 'plain_text_passwd': 'ubuntu', |
4988 | @@ -15,7 +16,7 @@ |
4989 | } |
4990 | |
4991 | |
4992 | -class TestUGNormalize(MockerTestCase): |
4993 | +class TestUGNormalize(TestCase): |
4994 | |
4995 | def _make_distro(self, dtype, def_user=None): |
4996 | cfg = dict(settings.CFG_BUILTIN) |
4997 | |
4998 | === modified file 'tests/unittests/test_filters/test_launch_index.py' |
4999 | --- tests/unittests/test_filters/test_launch_index.py 2014-07-23 16:25:35 +0000 |
5000 | +++ tests/unittests/test_filters/test_launch_index.py 2015-01-27 20:16:43 +0000 |
Thanks for chugging away on this :)