Merge ~powersj/cloud-init:fix-log-warn into cloud-init:master

Proposed by Joshua Powers
Status: Merged
Merged at revision: 5afe4cd0797a12d07ea19b9715b720d47bdea401
Proposed branch: ~powersj/cloud-init:fix-log-warn
Merge into: cloud-init:master
Diff against target: 1610 lines (+239/-202)
51 files modified
.pylintrc (+24/-3)
cloudinit/cloud.py (+2/-1)
cloudinit/cmd/main.py (+9/-8)
cloudinit/config/__init__.py (+1/-1)
cloudinit/config/cc_apt_configure.py (+6/-5)
cloudinit/config/cc_disk_setup.py (+15/-15)
cloudinit/config/cc_fan.py (+2/-2)
cloudinit/config/cc_mounts.py (+7/-5)
cloudinit/config/cc_resolv_conf.py (+1/-1)
cloudinit/config/cc_rsyslog.py (+4/-3)
cloudinit/config/cc_snappy.py (+3/-3)
cloudinit/distros/__init__.py (+16/-15)
cloudinit/distros/arch.py (+4/-2)
cloudinit/distros/debian.py (+1/-1)
cloudinit/distros/freebsd.py (+4/-4)
cloudinit/distros/gentoo.py (+6/-5)
cloudinit/distros/parsers/resolv_conf.py (+3/-3)
cloudinit/distros/ug_util.py (+8/-8)
cloudinit/ec2_utils.py (+4/-3)
cloudinit/gpg.py (+1/-1)
cloudinit/handlers/__init__.py (+1/-1)
cloudinit/helpers.py (+7/-7)
cloudinit/net/network_state.py (+4/-4)
cloudinit/reporting/handlers.py (+2/-2)
cloudinit/sources/DataSourceAltCloud.py (+2/-2)
cloudinit/sources/DataSourceAzure.py (+11/-11)
cloudinit/sources/DataSourceCloudSigma.py (+1/-1)
cloudinit/sources/DataSourceConfigDrive.py (+2/-2)
cloudinit/sources/DataSourceDigitalOcean.py (+1/-1)
cloudinit/sources/DataSourceEc2.py (+5/-5)
cloudinit/sources/DataSourceGCE.py (+3/-2)
cloudinit/sources/DataSourceMAAS.py (+5/-5)
cloudinit/sources/DataSourceNoCloud.py (+2/-2)
cloudinit/sources/DataSourceOVF.py (+5/-5)
cloudinit/sources/DataSourceOpenNebula.py (+2/-2)
cloudinit/sources/DataSourceOpenStack.py (+2/-2)
cloudinit/sources/DataSourceSmartOS.py (+1/-1)
cloudinit/sources/__init__.py (+2/-2)
cloudinit/sources/helpers/azure.py (+1/-1)
cloudinit/sources/helpers/vmware/imc/config_file.py (+4/-4)
cloudinit/stages.py (+17/-16)
cloudinit/templater.py (+4/-4)
cloudinit/url_helper.py (+6/-5)
cloudinit/user_data.py (+6/-5)
cloudinit/util.py (+9/-9)
cloudinit/warnings.py (+2/-2)
tests/cloud_tests/__main__.py (+1/-1)
tests/cloud_tests/args.py (+1/-1)
tests/cloud_tests/collect.py (+1/-1)
tests/cloud_tests/verify.py (+5/-4)
tools/mock-meta.py (+3/-3)
Reviewer Review Type Date Requested Status
Scott Moser Approve
Server Team CI bot continuous-integration Approve
Review via email: mp+322137@code.launchpad.net

Commit message

pylint: fix all logging warnings

This will change all instances of LOG.warn to LOG.warning as warn
is now a deprecated method. It will also make sure any logging
uses lazy logging by passing string format arguments as function
parameters.

To post a comment you must log in.
Revision history for this message
Server Team CI bot (server-team-bot) wrote :
review: Needs Fixing (continuous-integration)
Revision history for this message
Server Team CI bot (server-team-bot) wrote :
review: Approve (continuous-integration)
Revision history for this message
Scott Moser (smoser) wrote :

so i think i'm ok with this.

but how do we get to having pylint error now on these warnings?

Revision history for this message
Joshua Powers (powersj) wrote :

ok so what I can do is propose a new pylintrc that excludes all the other warning numbers, except these two. And as I fix more warnings remove them from the excludes list. Something like:

[MESSAGES CONTROL]

# Errors & warnings only
# W0223 - abstract-method
# W0511 - fixme
# W0612 - unused-variable
# W0613 - unused-except
# W0703 - broad-except
disable=C, F, I, R, W0223, W0511, W0612, W0613, W0703

Revision history for this message
Server Team CI bot (server-team-bot) wrote :
review: Needs Fixing (continuous-integration)
Revision history for this message
Server Team CI bot (server-team-bot) wrote :
review: Approve (continuous-integration)
Revision history for this message
Scott Moser (smoser) wrote :

i'll grab this shortly.
thank you josh.

review: Approve

There was an error fetching revisions from git servers. Please try again in a few minutes. If the problem persists, contact Launchpad support.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/.pylintrc b/.pylintrc
2index b8cda03..0f5e41b 100644
3--- a/.pylintrc
4+++ b/.pylintrc
5@@ -6,8 +6,29 @@ jobs=4
6
7 [MESSAGES CONTROL]
8
9-# Errors only
10-disable=C, F, I, R, W
11+# Errors and warings with some filtered:
12+# W0105(pointless-string-statement)
13+# W0107(unnecessary-pass)
14+# W0201(attribute-defined-outside-init)
15+# W0212(protected-access)
16+# W0221(arguments-differ)
17+# W0222(signature-differs)
18+# W0223(abstract-method)
19+# W0231(super-init-not-called)
20+# W0311(bad-indentation)
21+# W0511(fixme)
22+# W0602(global-variable-not-assigned)
23+# W0603(global-statement)
24+# W0611(unused-import)
25+# W0612(unused-variable)
26+# W0613(unused-argument)
27+# W0621(redefined-outer-name)
28+# W0622(redefined-builtin)
29+# W0631(undefined-loop-variable)
30+# W0703(broad-except)
31+# W1401(anomalous-backslash-in-string)
32+
33+disable=C, F, I, R, W0105, W0107, W0201, W0212, W0221, W0222, W0223, W0231, W0311, W0511, W0602, W0603, W0611, W0612, W0613, W0621, W0622, W0631, W0703, W1401
34
35
36 [REPORTS]
37@@ -25,7 +46,7 @@ reports=no
38 # (useful for modules/projects where namespaces are manipulated during runtime
39 # and thus existing member attributes cannot be deduced by static analysis. It
40 # supports qualified module names, as well as Unix pattern matching.
41-ignored-modules=six.moves,pkg_resources
42+ignored-modules=six.moves,pkg_resources,httplib,http.client
43
44 # List of class names for which member attributes should not be checked (useful
45 # for classes with dynamically set attributes). This supports the use of
46diff --git a/cloudinit/cloud.py b/cloudinit/cloud.py
47index b93a42e..d8a9fc8 100644
48--- a/cloudinit/cloud.py
49+++ b/cloudinit/cloud.py
50@@ -56,7 +56,8 @@ class Cloud(object):
51 def get_template_filename(self, name):
52 fn = self.paths.template_tpl % (name)
53 if not os.path.isfile(fn):
54- LOG.warn("No template found at %s for template named %s", fn, name)
55+ LOG.warning("No template found at %s for template named %s",
56+ fn, name)
57 return None
58 return fn
59
60diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py
61index fd22132..26cc265 100644
62--- a/cloudinit/cmd/main.py
63+++ b/cloudinit/cmd/main.py
64@@ -405,7 +405,8 @@ def main_init(name, args):
65 errfmt_orig = errfmt
66 (outfmt, errfmt) = util.get_output_cfg(mods.cfg, name)
67 if outfmt_orig != outfmt or errfmt_orig != errfmt:
68- LOG.warn("Stdout, stderr changing to (%s, %s)", outfmt, errfmt)
69+ LOG.warning("Stdout, stderr changing to (%s, %s)",
70+ outfmt, errfmt)
71 (outfmt, errfmt) = util.fixup_output(mods.cfg, name)
72 except Exception:
73 util.logexc(LOG, "Failed to re-adjust output redirection!")
74@@ -427,15 +428,15 @@ def di_report_warn(datasource, cfg):
75
76 dicfg = cfg.get('di_report', {})
77 if not isinstance(dicfg, dict):
78- LOG.warn("di_report config not a dictionary: %s", dicfg)
79+ LOG.warning("di_report config not a dictionary: %s", dicfg)
80 return
81
82 dslist = dicfg.get('datasource_list')
83 if dslist is None:
84- LOG.warn("no 'datasource_list' found in di_report.")
85+ LOG.warning("no 'datasource_list' found in di_report.")
86 return
87 elif not isinstance(dslist, list):
88- LOG.warn("di_report/datasource_list not a list: %s", dslist)
89+ LOG.warning("di_report/datasource_list not a list: %s", dslist)
90 return
91
92 # ds.__module__ is like cloudinit.sources.DataSourceName
93@@ -444,8 +445,8 @@ def di_report_warn(datasource, cfg):
94 if modname.startswith(sources.DS_PREFIX):
95 modname = modname[len(sources.DS_PREFIX):]
96 else:
97- LOG.warn("Datasource '%s' came from unexpected module '%s'.",
98- datasource, modname)
99+ LOG.warning("Datasource '%s' came from unexpected module '%s'.",
100+ datasource, modname)
101
102 if modname in dslist:
103 LOG.debug("used datasource '%s' from '%s' was in di_report's list: %s",
104@@ -571,10 +572,10 @@ def main_single(name, args):
105 mod_args,
106 mod_freq)
107 if failures:
108- LOG.warn("Ran %s but it failed!", mod_name)
109+ LOG.warning("Ran %s but it failed!", mod_name)
110 return 1
111 elif not which_ran:
112- LOG.warn("Did not run %s, does it exist?", mod_name)
113+ LOG.warning("Did not run %s, does it exist?", mod_name)
114 return 1
115 else:
116 # Guess it worked
117diff --git a/cloudinit/config/__init__.py b/cloudinit/config/__init__.py
118index 57e2a44..0ef9a74 100644
119--- a/cloudinit/config/__init__.py
120+++ b/cloudinit/config/__init__.py
121@@ -37,7 +37,7 @@ def fixup_module(mod, def_freq=PER_INSTANCE):
122 else:
123 freq = mod.frequency
124 if freq and freq not in FREQUENCIES:
125- LOG.warn("Module %s has an unknown frequency %s", mod, freq)
126+ LOG.warning("Module %s has an unknown frequency %s", mod, freq)
127 if not hasattr(mod, 'distros'):
128 setattr(mod, 'distros', [])
129 if not hasattr(mod, 'osfamilies'):
130diff --git a/cloudinit/config/cc_apt_configure.py b/cloudinit/config/cc_apt_configure.py
131index 06804e8..7e75177 100644
132--- a/cloudinit/config/cc_apt_configure.py
133+++ b/cloudinit/config/cc_apt_configure.py
134@@ -347,8 +347,8 @@ def dpkg_reconfigure(packages, target=None):
135 unhandled.append(pkg)
136
137 if len(unhandled):
138- LOG.warn("The following packages were installed and preseeded, "
139- "but cannot be unconfigured: %s", unhandled)
140+ LOG.warning("The following packages were installed and preseeded, "
141+ "but cannot be unconfigured: %s", unhandled)
142
143 if len(to_config):
144 util.subp(['dpkg-reconfigure', '--frontend=noninteractive'] +
145@@ -441,7 +441,7 @@ def rename_apt_lists(new_mirrors, target=None):
146 os.rename(filename, newname)
147 except OSError:
148 # since this is a best effort task, warn with but don't fail
149- LOG.warn("Failed to rename apt list:", exc_info=True)
150+ LOG.warning("Failed to rename apt list:", exc_info=True)
151
152
153 def mirror_to_placeholder(tmpl, mirror, placeholder):
154@@ -449,7 +449,7 @@ def mirror_to_placeholder(tmpl, mirror, placeholder):
155 replace the specified mirror in a template with a placeholder string
156 Checks for existance of the expected mirror and warns if not found"""
157 if mirror not in tmpl:
158- LOG.warn("Expected mirror '%s' not found in: %s", mirror, tmpl)
159+ LOG.warning("Expected mirror '%s' not found in: %s", mirror, tmpl)
160 return tmpl.replace(mirror, placeholder)
161
162
163@@ -525,7 +525,8 @@ def generate_sources_list(cfg, release, mirrors, cloud):
164 if not template_fn:
165 template_fn = cloud.get_template_filename('sources.list')
166 if not template_fn:
167- LOG.warn("No template found, not rendering /etc/apt/sources.list")
168+ LOG.warning("No template found, "
169+ "not rendering /etc/apt/sources.list")
170 return
171 tmpl = util.load_file(template_fn)
172
173diff --git a/cloudinit/config/cc_disk_setup.py b/cloudinit/config/cc_disk_setup.py
174index f39f081..f49386e 100644
175--- a/cloudinit/config/cc_disk_setup.py
176+++ b/cloudinit/config/cc_disk_setup.py
177@@ -181,7 +181,7 @@ def update_fs_setup_devices(disk_setup, tformer):
178 # update it with the response from 'tformer'
179 for definition in disk_setup:
180 if not isinstance(definition, dict):
181- LOG.warn("entry in disk_setup not a dict: %s", definition)
182+ LOG.warning("entry in disk_setup not a dict: %s", definition)
183 continue
184
185 origname = definition.get('device')
186@@ -279,7 +279,7 @@ def is_device_valid(name, partition=False):
187 try:
188 d_type = device_type(name)
189 except Exception:
190- LOG.warn("Query against device %s failed" % name)
191+ LOG.warning("Query against device %s failed", name)
192 return False
193
194 if partition and d_type == 'part':
195@@ -372,7 +372,7 @@ def find_device_node(device, fs_type=None, label=None, valid_targets=None,
196 if not raw_device_used:
197 return (device, False)
198
199- LOG.warn("Failed to find device during available device search.")
200+ LOG.warning("Failed to find device during available device search.")
201 return (None, False)
202
203
204@@ -638,7 +638,7 @@ def purge_disk(device):
205 if d['type'] not in ["disk", "crypt"]:
206 wipefs_cmd = [WIPEFS_CMD, "--all", "/dev/%s" % d['name']]
207 try:
208- LOG.info("Purging filesystem on /dev/%s" % d['name'])
209+ LOG.info("Purging filesystem on /dev/%s", d['name'])
210 util.subp(wipefs_cmd)
211 except Exception:
212 raise Exception("Failed FS purge of /dev/%s" % d['name'])
213@@ -700,7 +700,7 @@ def exec_mkpart_gpt(device, layout):
214 [SGDISK_CMD,
215 '-t', '{}:{}'.format(index, partition_type), device])
216 except Exception:
217- LOG.warn("Failed to partition device %s" % device)
218+ LOG.warning("Failed to partition device %s", device)
219 raise
220
221 read_parttbl(device)
222@@ -736,7 +736,7 @@ def mkpart(device, definition):
223 # ensure that we get a real device rather than a symbolic link
224 device = os.path.realpath(device)
225
226- LOG.debug("Checking values for %s definition" % device)
227+ LOG.debug("Checking values for %s definition", device)
228 overwrite = definition.get('overwrite', False)
229 layout = definition.get('layout', False)
230 table_type = definition.get('table_type', 'mbr')
231@@ -766,7 +766,7 @@ def mkpart(device, definition):
232
233 LOG.debug("Checking if device is safe to partition")
234 if not overwrite and (is_disk_used(device) or is_filesystem(device)):
235- LOG.debug("Skipping partitioning on configured device %s" % device)
236+ LOG.debug("Skipping partitioning on configured device %s", device)
237 return
238
239 LOG.debug("Checking for device size")
240@@ -774,7 +774,7 @@ def mkpart(device, definition):
241
242 LOG.debug("Calculating partition layout")
243 part_definition = get_partition_layout(table_type, device_size, layout)
244- LOG.debug(" Layout is: %s" % part_definition)
245+ LOG.debug(" Layout is: %s", part_definition)
246
247 LOG.debug("Creating partition table on %s", device)
248 exec_mkpart(table_type, device, part_definition)
249@@ -799,7 +799,7 @@ def lookup_force_flag(fs):
250 if fs.lower() in flags:
251 return flags[fs]
252
253- LOG.warn("Force flag for %s is unknown." % fs)
254+ LOG.warning("Force flag for %s is unknown.", fs)
255 return ''
256
257
258@@ -858,7 +858,7 @@ def mkfs(fs_cfg):
259 LOG.debug("Device %s has required file system", device)
260 return
261 else:
262- LOG.warn("Destroying filesystem on %s", device)
263+ LOG.warning("Destroying filesystem on %s", device)
264
265 else:
266 LOG.debug("Device %s is cleared for formating", device)
267@@ -883,14 +883,14 @@ def mkfs(fs_cfg):
268 return
269
270 if not reuse and fs_replace and device:
271- LOG.debug("Replacing file system on %s as instructed." % device)
272+ LOG.debug("Replacing file system on %s as instructed.", device)
273
274 if not device:
275 LOG.debug("No device aviable that matches request. "
276 "Skipping fs creation for %s", fs_cfg)
277 return
278 elif not partition or str(partition).lower() == 'none':
279- LOG.debug("Using the raw device to place filesystem %s on" % label)
280+ LOG.debug("Using the raw device to place filesystem %s on", label)
281
282 else:
283 LOG.debug("Error in device identification handling.")
284@@ -901,7 +901,7 @@ def mkfs(fs_cfg):
285
286 # Make sure the device is defined
287 if not device:
288- LOG.warn("Device is not known: %s", device)
289+ LOG.warning("Device is not known: %s", device)
290 return
291
292 # Check that we can create the FS
293@@ -923,8 +923,8 @@ def mkfs(fs_cfg):
294 mkfs_cmd = util.which("mk%s" % fs_type)
295
296 if not mkfs_cmd:
297- LOG.warn("Cannot create fstype '%s'. No mkfs.%s command", fs_type,
298- fs_type)
299+ LOG.warning("Cannot create fstype '%s'. No mkfs.%s command",
300+ fs_type, fs_type)
301 return
302
303 fs_cmd = [mkfs_cmd, device]
304diff --git a/cloudinit/config/cc_fan.py b/cloudinit/config/cc_fan.py
305index f0cda3d..0a135bb 100644
306--- a/cloudinit/config/cc_fan.py
307+++ b/cloudinit/config/cc_fan.py
308@@ -64,7 +64,7 @@ def stop_update_start(service, config_file, content, systemd=False):
309 try:
310 return util.subp(cmd, capture=True)
311 except util.ProcessExecutionError as e:
312- LOG.warn("failed: %s (%s): %s", service, cmd, e)
313+ LOG.warning("failed: %s (%s): %s", service, cmd, e)
314 return False
315
316 stop_failed = not run(cmds['stop'], msg='stop %s' % service)
317@@ -74,7 +74,7 @@ def stop_update_start(service, config_file, content, systemd=False):
318
319 ret = run(cmds['start'], msg='start %s' % service)
320 if ret and stop_failed:
321- LOG.warn("success: %s started", service)
322+ LOG.warning("success: %s started", service)
323
324 if 'enable' in cmds:
325 ret = run(cmds['enable'], msg='enable %s' % service)
326diff --git a/cloudinit/config/cc_mounts.py b/cloudinit/config/cc_mounts.py
327index 5b630f8..f14a4fc 100644
328--- a/cloudinit/config/cc_mounts.py
329+++ b/cloudinit/config/cc_mounts.py
330@@ -216,8 +216,9 @@ def suggested_swapsize(memsize=None, maxsize=None, fsys=None):
331 else:
332 pinfo[k] = v
333
334- LOG.debug("suggest %(size)s swap for %(mem)s memory with '%(avail)s'"
335- " disk given max=%(max_in)s [max=%(max)s]'" % pinfo)
336+ LOG.debug("suggest %s swap for %s memory with '%s'"
337+ " disk given max=%s [max=%s]'", pinfo['size'], pinfo['mem'],
338+ pinfo['avail'], pinfo['max_in'], pinfo['max'])
339 return size
340
341
342@@ -266,7 +267,7 @@ def handle_swapcfg(swapcfg):
343 return None or (filename, size)
344 """
345 if not isinstance(swapcfg, dict):
346- LOG.warn("input for swap config was not a dict.")
347+ LOG.warning("input for swap config was not a dict.")
348 return None
349
350 fname = swapcfg.get('filename', '/swap.img')
351@@ -289,7 +290,8 @@ def handle_swapcfg(swapcfg):
352 return fname
353 LOG.debug("swap file %s existed, but not in /proc/swaps", fname)
354 except Exception:
355- LOG.warn("swap file %s existed. Error reading /proc/swaps", fname)
356+ LOG.warning("swap file %s existed. Error reading /proc/swaps",
357+ fname)
358 return fname
359
360 try:
361@@ -300,7 +302,7 @@ def handle_swapcfg(swapcfg):
362 return setup_swapfile(fname=fname, size=size, maxsize=maxsize)
363
364 except Exception as e:
365- LOG.warn("failed to setup swap: %s", e)
366+ LOG.warning("failed to setup swap: %s", e)
367
368 return None
369
370diff --git a/cloudinit/config/cc_resolv_conf.py b/cloudinit/config/cc_resolv_conf.py
371index 9c5cd1f..2548d1f 100644
372--- a/cloudinit/config/cc_resolv_conf.py
373+++ b/cloudinit/config/cc_resolv_conf.py
374@@ -77,7 +77,7 @@ def generate_resolv_conf(template_fn, params, target_fname="/etc/resolv.conf"):
375 params['options'] = {}
376
377 params['flags'] = flags
378- LOG.debug("Writing resolv.conf from template %s" % template_fn)
379+ LOG.debug("Writing resolv.conf from template %s", template_fn)
380 templater.render_to_file(template_fn, target_fname, params)
381
382
383diff --git a/cloudinit/config/cc_rsyslog.py b/cloudinit/config/cc_rsyslog.py
384index 5031621..50ff9e3 100644
385--- a/cloudinit/config/cc_rsyslog.py
386+++ b/cloudinit/config/cc_rsyslog.py
387@@ -252,7 +252,8 @@ def apply_rsyslog_changes(configs, def_fname, cfg_dir):
388 for cur_pos, ent in enumerate(configs):
389 if isinstance(ent, dict):
390 if "content" not in ent:
391- LOG.warn("No 'content' entry in config entry %s", cur_pos + 1)
392+ LOG.warning("No 'content' entry in config entry %s",
393+ cur_pos + 1)
394 continue
395 content = ent['content']
396 filename = ent.get("filename", def_fname)
397@@ -262,7 +263,7 @@ def apply_rsyslog_changes(configs, def_fname, cfg_dir):
398
399 filename = filename.strip()
400 if not filename:
401- LOG.warn("Entry %s has an empty filename", cur_pos + 1)
402+ LOG.warning("Entry %s has an empty filename", cur_pos + 1)
403 continue
404
405 filename = os.path.join(cfg_dir, filename)
406@@ -389,7 +390,7 @@ def remotes_to_rsyslog_cfg(remotes, header=None, footer=None):
407 try:
408 lines.append(str(parse_remotes_line(line, name=name)))
409 except ValueError as e:
410- LOG.warn("failed loading remote %s: %s [%s]", name, line, e)
411+ LOG.warning("failed loading remote %s: %s [%s]", name, line, e)
412 if footer is not None:
413 lines.append(footer)
414 return '\n'.join(lines) + "\n"
415diff --git a/cloudinit/config/cc_snappy.py b/cloudinit/config/cc_snappy.py
416index 6ea81b8..a9682f1 100644
417--- a/cloudinit/config/cc_snappy.py
418+++ b/cloudinit/config/cc_snappy.py
419@@ -283,8 +283,8 @@ def handle(name, cfg, cloud, log, args):
420 render_snap_op(**pkg_op)
421 except Exception as e:
422 fails.append((pkg_op, e,))
423- LOG.warn("'%s' failed for '%s': %s",
424- pkg_op['op'], pkg_op['name'], e)
425+ LOG.warning("'%s' failed for '%s': %s",
426+ pkg_op['op'], pkg_op['name'], e)
427
428 # Default to disabling SSH
429 ssh_enabled = mycfg.get('ssh_enabled', "auto")
430@@ -303,7 +303,7 @@ def handle(name, cfg, cloud, log, args):
431 LOG.debug("Enabling SSH, password authentication requested")
432 ssh_enabled = True
433 elif ssh_enabled not in (True, False):
434- LOG.warn("Unknown value '%s' in ssh_enabled", ssh_enabled)
435+ LOG.warning("Unknown value '%s' in ssh_enabled", ssh_enabled)
436
437 disable_enable_ssh(ssh_enabled)
438
439diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py
440index 803ac74..28650b8 100755
441--- a/cloudinit/distros/__init__.py
442+++ b/cloudinit/distros/__init__.py
443@@ -143,9 +143,9 @@ class Distro(object):
444
445 def _apply_network_from_network_config(self, netconfig, bring_up=True):
446 distro = self.__class__
447- LOG.warn("apply_network_config is not currently implemented "
448- "for distribution '%s'. Attempting to use apply_network",
449- distro)
450+ LOG.warning("apply_network_config is not currently implemented "
451+ "for distribution '%s'. Attempting to use apply_network",
452+ distro)
453 header = '\n'.join([
454 "# Converted from network_config for distro %s" % distro,
455 "# Implmentation of _write_network_config is needed."
456@@ -335,7 +335,8 @@ class Distro(object):
457 try:
458 (_out, err) = util.subp(cmd)
459 if len(err):
460- LOG.warn("Running %s resulted in stderr output: %s", cmd, err)
461+ LOG.warning("Running %s resulted in stderr output: %s",
462+ cmd, err)
463 return True
464 except util.ProcessExecutionError:
465 util.logexc(LOG, "Running interface command %s failed", cmd)
466@@ -358,7 +359,7 @@ class Distro(object):
467 Add a user to the system using standard GNU tools
468 """
469 if util.is_user(name):
470- LOG.info("User %s already exists, skipping." % name)
471+ LOG.info("User %s already exists, skipping.", name)
472 return
473
474 if 'create_groups' in kwargs:
475@@ -520,9 +521,9 @@ class Distro(object):
476 keys = list(keys.values())
477 if keys is not None:
478 if not isinstance(keys, (tuple, list, set)):
479- LOG.warn("Invalid type '%s' detected for"
480- " 'ssh_authorized_keys', expected list,"
481- " string, dict, or set.", type(keys))
482+ LOG.warning("Invalid type '%s' detected for"
483+ " 'ssh_authorized_keys', expected list,"
484+ " string, dict, or set.", type(keys))
485 else:
486 keys = set(keys) or []
487 ssh_util.setup_user_keys(keys, name, options=None)
488@@ -595,7 +596,7 @@ class Distro(object):
489 "#includedir %s" % (path), '']
490 sudoers_contents = "\n".join(lines)
491 util.append_file(sudo_base, sudoers_contents)
492- LOG.debug("Added '#includedir %s' to %s" % (path, sudo_base))
493+ LOG.debug("Added '#includedir %s' to %s", path, sudo_base)
494 except IOError as e:
495 util.logexc(LOG, "Failed to write %s", sudo_base)
496 raise e
497@@ -647,11 +648,11 @@ class Distro(object):
498
499 # Check if group exists, and then add it doesn't
500 if util.is_group(name):
501- LOG.warn("Skipping creation of existing group '%s'" % name)
502+ LOG.warning("Skipping creation of existing group '%s'", name)
503 else:
504 try:
505 util.subp(group_add_cmd)
506- LOG.info("Created new group %s" % name)
507+ LOG.info("Created new group %s", name)
508 except Exception:
509 util.logexc(LOG, "Failed to create group %s", name)
510
511@@ -659,12 +660,12 @@ class Distro(object):
512 if len(members) > 0:
513 for member in members:
514 if not util.is_user(member):
515- LOG.warn("Unable to add group member '%s' to group '%s'"
516- "; user does not exist.", member, name)
517+ LOG.warning("Unable to add group member '%s' to group '%s'"
518+ "; user does not exist.", member, name)
519 continue
520
521 util.subp(['usermod', '-a', '-G', name, member])
522- LOG.info("Added user '%s' to group '%s'" % (member, name))
523+ LOG.info("Added user '%s' to group '%s'", member, name)
524
525
526 def _get_package_mirror_info(mirror_info, data_source=None,
527@@ -708,7 +709,7 @@ def _get_package_mirror_info(mirror_info, data_source=None,
528 if found:
529 results[name] = found
530
531- LOG.debug("filtered distro mirror info: %s" % results)
532+ LOG.debug("filtered distro mirror info: %s", results)
533
534 return results
535
536diff --git a/cloudinit/distros/arch.py b/cloudinit/distros/arch.py
537index 64b8c1f..75d4620 100644
538--- a/cloudinit/distros/arch.py
539+++ b/cloudinit/distros/arch.py
540@@ -83,7 +83,8 @@ class Distro(distros.Distro):
541 try:
542 (_out, err) = util.subp(cmd)
543 if len(err):
544- LOG.warn("Running %s resulted in stderr output: %s", cmd, err)
545+ LOG.warning("Running %s resulted in stderr output: %s",
546+ cmd, err)
547 except util.ProcessExecutionError:
548 util.logexc(LOG, "Running interface command %s failed", cmd)
549
550@@ -94,7 +95,8 @@ class Distro(distros.Distro):
551 try:
552 (_out, err) = util.subp(cmd)
553 if len(err):
554- LOG.warn("Running %s resulted in stderr output: %s", cmd, err)
555+ LOG.warning("Running %s resulted in stderr output: %s",
556+ cmd, err)
557 return True
558 except util.ProcessExecutionError:
559 util.logexc(LOG, "Running interface command %s failed", cmd)
560diff --git a/cloudinit/distros/debian.py b/cloudinit/distros/debian.py
561index 16f8d95..d06d46a 100644
562--- a/cloudinit/distros/debian.py
563+++ b/cloudinit/distros/debian.py
564@@ -223,6 +223,6 @@ def _maybe_remove_legacy_eth0(path="/etc/network/interfaces.d/eth0.cfg"):
565 except Exception:
566 msg = bmsg + " %s exists, but could not be read." % path
567
568- LOG.warn(msg)
569+ LOG.warning(msg)
570
571 # vi: ts=4 expandtab
572diff --git a/cloudinit/distros/freebsd.py b/cloudinit/distros/freebsd.py
573index a70ee45..183e445 100644
574--- a/cloudinit/distros/freebsd.py
575+++ b/cloudinit/distros/freebsd.py
576@@ -148,7 +148,7 @@ class Distro(distros.Distro):
577 def create_group(self, name, members):
578 group_add_cmd = ['pw', '-n', name]
579 if util.is_group(name):
580- LOG.warn("Skipping creation of existing group '%s'", name)
581+ LOG.warning("Skipping creation of existing group '%s'", name)
582 else:
583 try:
584 util.subp(group_add_cmd)
585@@ -160,8 +160,8 @@ class Distro(distros.Distro):
586 if len(members) > 0:
587 for member in members:
588 if not util.is_user(member):
589- LOG.warn("Unable to add group member '%s' to group '%s'"
590- "; user does not exist.", member, name)
591+ LOG.warning("Unable to add group member '%s' to group '%s'"
592+ "; user does not exist.", member, name)
593 continue
594 try:
595 util.subp(['pw', 'usermod', '-n', name, '-G', member])
596@@ -369,7 +369,7 @@ class Distro(distros.Distro):
597 # OS. This is just fine.
598 (_out, err) = util.subp(cmd, rcs=[0, 1])
599 if len(err):
600- LOG.warn("Error running %s: %s", cmd, err)
601+ LOG.warning("Error running %s: %s", cmd, err)
602
603 def install_packages(self, pkglist):
604 self.update_package_sources()
605diff --git a/cloudinit/distros/gentoo.py b/cloudinit/distros/gentoo.py
606index 83fb56f..0ad2f03 100644
607--- a/cloudinit/distros/gentoo.py
608+++ b/cloudinit/distros/gentoo.py
609@@ -96,8 +96,8 @@ class Distro(distros.Distro):
610 try:
611 (_out, err) = util.subp(cmd)
612 if len(err):
613- LOG.warn("Running %s resulted in stderr output: %s",
614- cmd, err)
615+ LOG.warning("Running %s resulted in stderr output: %s",
616+ cmd, err)
617 except util.ProcessExecutionError:
618 util.logexc(LOG, "Running interface command %s failed",
619 cmd)
620@@ -121,7 +121,8 @@ class Distro(distros.Distro):
621 try:
622 (_out, err) = util.subp(cmd)
623 if len(err):
624- LOG.warn("Running %s resulted in stderr output: %s", cmd, err)
625+ LOG.warning("Running %s resulted in stderr output: %s",
626+ cmd, err)
627 return True
628 except util.ProcessExecutionError:
629 util.logexc(LOG, "Running interface command %s failed", cmd)
630@@ -138,8 +139,8 @@ class Distro(distros.Distro):
631 try:
632 (_out, err) = util.subp(cmd)
633 if len(err):
634- LOG.warn("Running %s resulted in stderr output: %s", cmd,
635- err)
636+ LOG.warning("Running %s resulted in stderr output: %s",
637+ cmd, err)
638 except util.ProcessExecutionError:
639 util.logexc(LOG, "Running interface command %s failed", cmd)
640 return False
641diff --git a/cloudinit/distros/parsers/resolv_conf.py b/cloudinit/distros/parsers/resolv_conf.py
642index d1f8a04..a62055a 100644
643--- a/cloudinit/distros/parsers/resolv_conf.py
644+++ b/cloudinit/distros/parsers/resolv_conf.py
645@@ -81,9 +81,9 @@ class ResolvConf(object):
646 if len(new_ns) == len(current_ns):
647 return current_ns
648 if len(current_ns) >= 3:
649- LOG.warn("ignoring nameserver %r: adding would "
650- "exceed the maximum of "
651- "'3' name servers (see resolv.conf(5))" % (ns))
652+ LOG.warning("ignoring nameserver %r: adding would "
653+ "exceed the maximum of "
654+ "'3' name servers (see resolv.conf(5))", ns)
655 return current_ns[:3]
656 self._remove_option('nameserver')
657 for n in new_ns:
658diff --git a/cloudinit/distros/ug_util.py b/cloudinit/distros/ug_util.py
659index 53a0eaf..9378dd7 100755
660--- a/cloudinit/distros/ug_util.py
661+++ b/cloudinit/distros/ug_util.py
662@@ -214,8 +214,8 @@ def normalize_users_groups(cfg, distro):
663 'name': old_user,
664 }
665 if not isinstance(old_user, dict):
666- LOG.warn(("Format for 'user' key must be a string or "
667- "dictionary and not %s"), type_utils.obj_name(old_user))
668+ LOG.warning(("Format for 'user' key must be a string or dictionary"
669+ " and not %s"), type_utils.obj_name(old_user))
670 old_user = {}
671
672 # If no old user format, then assume the distro
673@@ -227,9 +227,9 @@ def normalize_users_groups(cfg, distro):
674 try:
675 distro_user_config = distro.get_default_user()
676 except NotImplementedError:
677- LOG.warn(("Distro has not implemented default user "
678- "access. No distribution provided default user"
679- " will be normalized."))
680+ LOG.warning(("Distro has not implemented default user "
681+ "access. No distribution provided default user"
682+ " will be normalized."))
683
684 # Merge the old user (which may just be an empty dict when not
685 # present with the distro provided default user configuration so
686@@ -239,9 +239,9 @@ def normalize_users_groups(cfg, distro):
687
688 base_users = cfg.get('users', [])
689 if not isinstance(base_users, (list, dict) + six.string_types):
690- LOG.warn(("Format for 'users' key must be a comma separated string"
691- " or a dictionary or a list and not %s"),
692- type_utils.obj_name(base_users))
693+ LOG.warning(("Format for 'users' key must be a comma separated string"
694+ " or a dictionary or a list and not %s"),
695+ type_utils.obj_name(base_users))
696 base_users = []
697
698 if old_user:
699diff --git a/cloudinit/ec2_utils.py b/cloudinit/ec2_utils.py
700index 1369154..723d6bd 100644
701--- a/cloudinit/ec2_utils.py
702+++ b/cloudinit/ec2_utils.py
703@@ -38,8 +38,8 @@ class MetadataLeafDecoder(object):
704 # Assume it's json, unless it fails parsing...
705 return json.loads(blob)
706 except (ValueError, TypeError) as e:
707- LOG.warn("Field %s looked like a json object, but it was"
708- " not: %s", field, e)
709+ LOG.warning("Field %s looked like a json object, but it"
710+ " was not: %s", field, e)
711 if blob.find("\n") != -1:
712 return blob.splitlines()
713 return blob
714@@ -125,7 +125,8 @@ class MetadataMaterializer(object):
715 joined.update(child_contents)
716 for field in leaf_contents.keys():
717 if field in joined:
718- LOG.warn("Duplicate key found in results from %s", base_url)
719+ LOG.warning("Duplicate key found in results from %s",
720+ base_url)
721 else:
722 joined[field] = leaf_contents[field]
723 return joined
724diff --git a/cloudinit/gpg.py b/cloudinit/gpg.py
725index 70c620d..d58d73e 100644
726--- a/cloudinit/gpg.py
727+++ b/cloudinit/gpg.py
728@@ -43,7 +43,7 @@ def delete_key(key):
729 util.subp(["gpg", "--batch", "--yes", "--delete-keys", key],
730 capture=True)
731 except util.ProcessExecutionError as error:
732- LOG.warn('Failed delete key "%s": %s', key, error)
733+ LOG.warning('Failed delete key "%s": %s', key, error)
734
735
736 def getkeybyid(keyid, keyserver='keyserver.ubuntu.com'):
737diff --git a/cloudinit/handlers/__init__.py b/cloudinit/handlers/__init__.py
738index 1362db6..c3576c0 100644
739--- a/cloudinit/handlers/__init__.py
740+++ b/cloudinit/handlers/__init__.py
741@@ -246,7 +246,7 @@ def fixup_handler(mod, def_freq=PER_INSTANCE):
742 else:
743 freq = mod.frequency
744 if freq and freq not in FREQUENCIES:
745- LOG.warn("Handler %s has an unknown frequency %s", mod, freq)
746+ LOG.warning("Handler %s has an unknown frequency %s", mod, freq)
747 return mod
748
749
750diff --git a/cloudinit/helpers.py b/cloudinit/helpers.py
751index 7435d58..f01021a 100644
752--- a/cloudinit/helpers.py
753+++ b/cloudinit/helpers.py
754@@ -126,11 +126,11 @@ class FileSemaphores(object):
755 # this case could happen if the migrator module hadn't run yet
756 # but the item had run before we did canon_sem_name.
757 if cname != name and os.path.exists(self._get_path(name, freq)):
758- LOG.warn("%s has run without canonicalized name [%s].\n"
759- "likely the migrator has not yet run. "
760- "It will run next boot.\n"
761- "run manually with: cloud-init single --name=migrator"
762- % (name, cname))
763+ LOG.warning("%s has run without canonicalized name [%s].\n"
764+ "likely the migrator has not yet run. "
765+ "It will run next boot.\n"
766+ "run manually with: cloud-init single --name=migrator",
767+ name, cname)
768 return True
769
770 return False
771@@ -375,8 +375,8 @@ class Paths(object):
772 def get_ipath(self, name=None):
773 ipath = self._get_ipath(name)
774 if not ipath:
775- LOG.warn(("No per instance data available, "
776- "is there an datasource/iid set?"))
777+ LOG.warning(("No per instance data available, "
778+ "is there an datasource/iid set?"))
779 return None
780 else:
781 return ipath
782diff --git a/cloudinit/net/network_state.py b/cloudinit/net/network_state.py
783index 692b600..db3c357 100644
784--- a/cloudinit/net/network_state.py
785+++ b/cloudinit/net/network_state.py
786@@ -242,8 +242,8 @@ class NetworkStateInterpreter(object):
787 if not skip_broken:
788 raise
789 else:
790- LOG.warn("Skipping invalid command: %s", command,
791- exc_info=True)
792+ LOG.warning("Skipping invalid command: %s", command,
793+ exc_info=True)
794 LOG.debug(self.dump_network_state())
795
796 def parse_config_v2(self, skip_broken=True):
797@@ -262,8 +262,8 @@ class NetworkStateInterpreter(object):
798 if not skip_broken:
799 raise
800 else:
801- LOG.warn("Skipping invalid command: %s", command,
802- exc_info=True)
803+ LOG.warning("Skipping invalid command: %s", command,
804+ exc_info=True)
805 LOG.debug(self.dump_network_state())
806
807 @ensure_command_keys(['name'])
808diff --git a/cloudinit/reporting/handlers.py b/cloudinit/reporting/handlers.py
809index b90bc19..4066076 100644
810--- a/cloudinit/reporting/handlers.py
811+++ b/cloudinit/reporting/handlers.py
812@@ -37,7 +37,7 @@ class LogHandler(ReportingHandler):
813 try:
814 level = getattr(logging, level.upper())
815 except Exception:
816- LOG.warn("invalid level '%s', using WARN", input_level)
817+ LOG.warning("invalid level '%s', using WARN", input_level)
818 level = logging.WARN
819 self.level = level
820
821@@ -82,7 +82,7 @@ class WebHookHandler(ReportingHandler):
822 timeout=self.timeout,
823 retries=self.retries, ssl_details=self.ssl_details)
824 except Exception:
825- LOG.warn("failed posting event: %s" % event.as_string())
826+ LOG.warning("failed posting event: %s", event.as_string())
827
828
829 available_handlers = DictRegistry()
830diff --git a/cloudinit/sources/DataSourceAltCloud.py b/cloudinit/sources/DataSourceAltCloud.py
831index 8528fa1..ed1d691 100644
832--- a/cloudinit/sources/DataSourceAltCloud.py
833+++ b/cloudinit/sources/DataSourceAltCloud.py
834@@ -181,7 +181,7 @@ class DataSourceAltCloud(sources.DataSource):
835 try:
836 cmd = CMD_PROBE_FLOPPY
837 (cmd_out, _err) = util.subp(cmd)
838- LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out))
839+ LOG.debug('Command: %s\nOutput%s', ' '.join(cmd), cmd_out)
840 except ProcessExecutionError as _err:
841 util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), _err)
842 return False
843@@ -196,7 +196,7 @@ class DataSourceAltCloud(sources.DataSource):
844 cmd = CMD_UDEVADM_SETTLE
845 cmd.append('--exit-if-exists=' + floppy_dev)
846 (cmd_out, _err) = util.subp(cmd)
847- LOG.debug(('Command: %s\nOutput%s') % (' '.join(cmd), cmd_out))
848+ LOG.debug('Command: %s\nOutput%s', ' '.join(cmd), cmd_out)
849 except ProcessExecutionError as _err:
850 util.logexc(LOG, 'Failed command: %s\n%s', ' '.join(cmd), _err)
851 return False
852diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
853index 48a3e1d..04358b7 100644
854--- a/cloudinit/sources/DataSourceAzure.py
855+++ b/cloudinit/sources/DataSourceAzure.py
856@@ -116,7 +116,7 @@ class DataSourceAzureNet(sources.DataSource):
857 # the metadata and "bounce" the network to force DDNS to update via
858 # dhclient
859 azure_hostname = self.metadata.get('local-hostname')
860- LOG.debug("Hostname in metadata is {}".format(azure_hostname))
861+ LOG.debug("Hostname in metadata is %s", azure_hostname)
862 hostname_command = self.ds_cfg['hostname_bounce']['hostname_command']
863
864 with temporary_hostname(azure_hostname, self.ds_cfg,
865@@ -132,7 +132,7 @@ class DataSourceAzureNet(sources.DataSource):
866 cfg=cfg,
867 prev_hostname=previous_hostname)
868 except Exception as e:
869- LOG.warn("Failed publishing hostname: %s", e)
870+ LOG.warning("Failed publishing hostname: %s", e)
871 util.logexc(LOG, "handling set_hostname failed")
872
873 def get_metadata_from_agent(self):
874@@ -168,7 +168,7 @@ class DataSourceAzureNet(sources.DataSource):
875 func=wait_for_files,
876 args=(fp_files,))
877 if len(missing):
878- LOG.warn("Did not find files, but going on: %s", missing)
879+ LOG.warning("Did not find files, but going on: %s", missing)
880
881 metadata = {}
882 metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
883@@ -199,7 +199,7 @@ class DataSourceAzureNet(sources.DataSource):
884 except BrokenAzureDataSource as exc:
885 raise exc
886 except util.MountFailedError:
887- LOG.warn("%s was not mountable", cdev)
888+ LOG.warning("%s was not mountable", cdev)
889 continue
890
891 (md, self.userdata_raw, cfg, files) = ret
892@@ -331,8 +331,8 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
893 log_pre="Azure ephemeral disk: ")
894
895 if missing:
896- LOG.warn("ephemeral device '%s' did not appear after %d seconds.",
897- devpath, maxwait)
898+ LOG.warning("ephemeral device '%s' did not appear after %d seconds.",
899+ devpath, maxwait)
900 return
901
902 result = False
903@@ -342,7 +342,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
904 else:
905 result, msg = can_dev_be_reformatted(devpath)
906
907- LOG.debug("reformattable=%s: %s" % (result, msg))
908+ LOG.debug("reformattable=%s: %s", result, msg)
909 if not result:
910 return
911
912@@ -355,7 +355,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
913 LOG.debug(bmsg + " removed.")
914 except Exception as e:
915 # python3 throws FileNotFoundError, python2 throws OSError
916- LOG.warn(bmsg + ": remove failed! (%s)" % e)
917+ LOG.warning(bmsg + ": remove failed! (%s)", e)
918 else:
919 LOG.debug(bmsg + " did not exist.")
920 return
921@@ -405,7 +405,7 @@ def pubkeys_from_crt_files(flist):
922 errors.append(fname)
923
924 if errors:
925- LOG.warn("failed to convert the crt files to pubkey: %s", errors)
926+ LOG.warning("failed to convert the crt files to pubkey: %s", errors)
927
928 return pubkeys
929
930@@ -427,8 +427,8 @@ def wait_for_files(flist, maxwait=60, naplen=.5, log_pre=""):
931 time.sleep(naplen)
932 waited += naplen
933
934- LOG.warn("%sStill missing files after %s seconds: %s",
935- log_pre, maxwait, need)
936+ LOG.warning("%sStill missing files after %s seconds: %s",
937+ log_pre, maxwait, need)
938 return need
939
940
941diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py
942index ffc23e3..19df16b 100644
943--- a/cloudinit/sources/DataSourceCloudSigma.py
944+++ b/cloudinit/sources/DataSourceCloudSigma.py
945@@ -43,7 +43,7 @@ class DataSourceCloudSigma(sources.DataSource):
946 LOG.debug("detected hypervisor as %s", sys_product_name)
947 return 'cloudsigma' in sys_product_name.lower()
948
949- LOG.warn("failed to query dmi data for system product name")
950+ LOG.warning("failed to query dmi data for system product name")
951 return False
952
953 def get_data(self):
954diff --git a/cloudinit/sources/DataSourceConfigDrive.py b/cloudinit/sources/DataSourceConfigDrive.py
955index 46dd89e..ef374f3 100644
956--- a/cloudinit/sources/DataSourceConfigDrive.py
957+++ b/cloudinit/sources/DataSourceConfigDrive.py
958@@ -127,7 +127,7 @@ class DataSourceConfigDrive(openstack.SourceMixin, sources.DataSource):
959 try:
960 self.vendordata_raw = sources.convert_vendordata(vd)
961 except ValueError as e:
962- LOG.warn("Invalid content in vendor-data: %s", e)
963+ LOG.warning("Invalid content in vendor-data: %s", e)
964 self.vendordata_raw = None
965
966 # network_config is an /etc/network/interfaces formated file and is
967@@ -190,7 +190,7 @@ def on_first_boot(data, distro=None, network=True):
968 if network:
969 net_conf = data.get("network_config", '')
970 if net_conf and distro:
971- LOG.warn("Updating network interfaces from config drive")
972+ LOG.warning("Updating network interfaces from config drive")
973 distro.apply_network(net_conf)
974 write_injected_files(data.get('files'))
975
976diff --git a/cloudinit/sources/DataSourceDigitalOcean.py b/cloudinit/sources/DataSourceDigitalOcean.py
977index d052c4c..5e7e66b 100644
978--- a/cloudinit/sources/DataSourceDigitalOcean.py
979+++ b/cloudinit/sources/DataSourceDigitalOcean.py
980@@ -51,7 +51,7 @@ class DataSourceDigitalOcean(sources.DataSource):
981 if not is_do:
982 return False
983
984- LOG.info("Running on digital ocean. droplet_id=%s" % droplet_id)
985+ LOG.info("Running on digital ocean. droplet_id=%s", droplet_id)
986
987 ipv4LL_nic = None
988 if self.use_ip4LL:
989diff --git a/cloudinit/sources/DataSourceEc2.py b/cloudinit/sources/DataSourceEc2.py
990index 6f01a13..2f9c7ed 100644
991--- a/cloudinit/sources/DataSourceEc2.py
992+++ b/cloudinit/sources/DataSourceEc2.py
993@@ -125,7 +125,7 @@ class DataSourceEc2(sources.DataSource):
994 if len(filtered):
995 mdurls = filtered
996 else:
997- LOG.warn("Empty metadata url list! using default list")
998+ LOG.warning("Empty metadata url list! using default list")
999 mdurls = self.metadata_urls
1000
1001 urls = []
1002@@ -232,7 +232,7 @@ def read_strict_mode(cfgval, default):
1003 try:
1004 return parse_strict_mode(cfgval)
1005 except ValueError as e:
1006- LOG.warn(e)
1007+ LOG.warning(e)
1008 return default
1009
1010
1011@@ -270,7 +270,7 @@ def warn_if_necessary(cfgval, cfg):
1012 try:
1013 mode, sleep = parse_strict_mode(cfgval)
1014 except ValueError as e:
1015- LOG.warn(e)
1016+ LOG.warning(e)
1017 return
1018
1019 if mode == "false":
1020@@ -304,8 +304,8 @@ def identify_platform():
1021 if result:
1022 return result
1023 except Exception as e:
1024- LOG.warn("calling %s with %s raised exception: %s",
1025- checker, data, e)
1026+ LOG.warning("calling %s with %s raised exception: %s",
1027+ checker, data, e)
1028
1029
1030 def _collect_platform_data():
1031diff --git a/cloudinit/sources/DataSourceGCE.py b/cloudinit/sources/DataSourceGCE.py
1032index 637c950..e9afda9 100644
1033--- a/cloudinit/sources/DataSourceGCE.py
1034+++ b/cloudinit/sources/DataSourceGCE.py
1035@@ -98,7 +98,7 @@ class DataSourceGCE(sources.DataSource):
1036 if not running_on_gce:
1037 LOG.debug(msg, mkey)
1038 else:
1039- LOG.warn(msg, mkey)
1040+ LOG.warning(msg, mkey)
1041 return False
1042 self.metadata[mkey] = value
1043
1044@@ -116,7 +116,8 @@ class DataSourceGCE(sources.DataSource):
1045 self.metadata['user-data'] = b64decode(
1046 self.metadata['user-data'])
1047 else:
1048- LOG.warn('unknown user-data-encoding: %s, ignoring', encoding)
1049+ LOG.warning('unknown user-data-encoding: %s, ignoring',
1050+ encoding)
1051
1052 return running_on_gce
1053
1054diff --git a/cloudinit/sources/DataSourceMAAS.py b/cloudinit/sources/DataSourceMAAS.py
1055index 41179b0..77df5a5 100644
1056--- a/cloudinit/sources/DataSourceMAAS.py
1057+++ b/cloudinit/sources/DataSourceMAAS.py
1058@@ -71,7 +71,7 @@ class DataSourceMAAS(sources.DataSource):
1059 except MAASSeedDirNone:
1060 pass
1061 except MAASSeedDirMalformed as exc:
1062- LOG.warn("%s was malformed: %s" % (self.seed_dir, exc))
1063+ LOG.warning("%s was malformed: %s", self.seed_dir, exc)
1064 raise
1065
1066 # If there is no metadata_url, then we're not configured
1067@@ -107,7 +107,7 @@ class DataSourceMAAS(sources.DataSource):
1068 try:
1069 self.vendordata_raw = sources.convert_vendordata(vd)
1070 except ValueError as e:
1071- LOG.warn("Invalid content in vendor-data: %s", e)
1072+ LOG.warning("Invalid content in vendor-data: %s", e)
1073 self.vendordata_raw = None
1074
1075 def wait_for_metadata_service(self, url):
1076@@ -126,7 +126,7 @@ class DataSourceMAAS(sources.DataSource):
1077 if timeout in mcfg:
1078 timeout = int(mcfg.get("timeout", timeout))
1079 except Exception:
1080- LOG.warn("Failed to get timeout, using %s" % timeout)
1081+ LOG.warning("Failed to get timeout, using %s", timeout)
1082
1083 starttime = time.time()
1084 if url.endswith("/"):
1085@@ -190,8 +190,8 @@ def read_maas_seed_url(seed_url, read_file_or_url=None, timeout=None,
1086 else:
1087 md[path] = util.decode_binary(resp.contents)
1088 else:
1089- LOG.warn(("Fetching from %s resulted in"
1090- " an invalid http code %s"), url, resp.code)
1091+ LOG.warning(("Fetching from %s resulted in"
1092+ " an invalid http code %s"), url, resp.code)
1093 except url_helper.UrlError as e:
1094 if e.code == 404 and not optional:
1095 raise MAASSeedDirMalformed(
1096diff --git a/cloudinit/sources/DataSourceNoCloud.py b/cloudinit/sources/DataSourceNoCloud.py
1097index 5924b82..c68f6b8 100644
1098--- a/cloudinit/sources/DataSourceNoCloud.py
1099+++ b/cloudinit/sources/DataSourceNoCloud.py
1100@@ -104,8 +104,8 @@ class DataSourceNoCloud(sources.DataSource):
1101 pp2d_kwargs)
1102 except ValueError as e:
1103 if dev in label_list:
1104- LOG.warn("device %s with label=%s not a"
1105- "valid seed.", dev, label)
1106+ LOG.warning("device %s with label=%s not a"
1107+ "valid seed.", dev, label)
1108 continue
1109
1110 mydata = _merge_new_seed(mydata, seeded)
1111diff --git a/cloudinit/sources/DataSourceOVF.py b/cloudinit/sources/DataSourceOVF.py
1112index d70784a..f20c9a6 100644
1113--- a/cloudinit/sources/DataSourceOVF.py
1114+++ b/cloudinit/sources/DataSourceOVF.py
1115@@ -225,12 +225,12 @@ def get_max_wait_from_cfg(cfg):
1116 try:
1117 max_wait = int(cfg.get(max_wait_cfg_option, default_max_wait))
1118 except ValueError:
1119- LOG.warn("Failed to get '%s', using %s",
1120- max_wait_cfg_option, default_max_wait)
1121+ LOG.warning("Failed to get '%s', using %s",
1122+ max_wait_cfg_option, default_max_wait)
1123
1124 if max_wait <= 0:
1125- LOG.warn("Invalid value '%s' for '%s', using '%s' instead",
1126- max_wait, max_wait_cfg_option, default_max_wait)
1127+ LOG.warning("Invalid value '%s' for '%s', using '%s' instead",
1128+ max_wait, max_wait_cfg_option, default_max_wait)
1129 max_wait = default_max_wait
1130
1131 return max_wait
1132@@ -355,7 +355,7 @@ def transport_iso9660(require_iso=True):
1133 try:
1134 (fname, contents) = util.mount_cb(fullp, get_ovf_env, mtype=mtype)
1135 except util.MountFailedError:
1136- LOG.debug("%s not mountable as iso9660" % fullp)
1137+ LOG.debug("%s not mountable as iso9660", fullp)
1138 continue
1139
1140 if contents is not False:
1141diff --git a/cloudinit/sources/DataSourceOpenNebula.py b/cloudinit/sources/DataSourceOpenNebula.py
1142index cd75e6e..5fdac19 100644
1143--- a/cloudinit/sources/DataSourceOpenNebula.py
1144+++ b/cloudinit/sources/DataSourceOpenNebula.py
1145@@ -64,7 +64,7 @@ class DataSourceOpenNebula(sources.DataSource):
1146 except BrokenContextDiskDir as exc:
1147 raise exc
1148 except util.MountFailedError:
1149- LOG.warn("%s was not mountable" % cdev)
1150+ LOG.warning("%s was not mountable", cdev)
1151
1152 if results:
1153 seed = cdev
1154@@ -381,7 +381,7 @@ def read_context_disk_dir(source_dir, asuser=None):
1155 try:
1156 results['userdata'] = util.b64d(results['userdata'])
1157 except TypeError:
1158- LOG.warn("Failed base64 decoding of userdata")
1159+ LOG.warning("Failed base64 decoding of userdata")
1160
1161 # generate static /etc/network/interfaces
1162 # only if there are any required context variables
1163diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py
1164index e1ea21f..f0a6bfc 100644
1165--- a/cloudinit/sources/DataSourceOpenStack.py
1166+++ b/cloudinit/sources/DataSourceOpenStack.py
1167@@ -73,7 +73,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
1168 if len(filtered):
1169 urls = filtered
1170 else:
1171- LOG.warn("Empty metadata url list! using default list")
1172+ LOG.warning("Empty metadata url list! using default list")
1173 urls = [DEF_MD_URL]
1174
1175 md_urls = []
1176@@ -137,7 +137,7 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
1177 try:
1178 self.vendordata_raw = sources.convert_vendordata(vd)
1179 except ValueError as e:
1180- LOG.warn("Invalid content in vendor-data: %s", e)
1181+ LOG.warning("Invalid content in vendor-data: %s", e)
1182 self.vendordata_raw = None
1183
1184 return True
1185diff --git a/cloudinit/sources/DataSourceSmartOS.py b/cloudinit/sources/DataSourceSmartOS.py
1186index 5e66894..6c6902f 100644
1187--- a/cloudinit/sources/DataSourceSmartOS.py
1188+++ b/cloudinit/sources/DataSourceSmartOS.py
1189@@ -555,7 +555,7 @@ class JoyentMetadataLegacySerialClient(JoyentMetadataSerialClient):
1190 val = base64.b64decode(val.encode()).decode()
1191 # Bogus input produces different errors in Python 2 and 3
1192 except (TypeError, binascii.Error):
1193- LOG.warn("Failed base64 decoding key '%s': %s", key, val)
1194+ LOG.warning("Failed base64 decoding key '%s': %s", key, val)
1195
1196 if strip:
1197 val = val.strip()
1198diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
1199index 5c99437..c3ce36d 100644
1200--- a/cloudinit/sources/__init__.py
1201+++ b/cloudinit/sources/__init__.py
1202@@ -237,8 +237,8 @@ class DataSource(object):
1203 if candidate in valid:
1204 return candidate
1205 else:
1206- LOG.warn("invalid dsmode '%s', using default=%s",
1207- candidate, default)
1208+ LOG.warning("invalid dsmode '%s', using default=%s",
1209+ candidate, default)
1210 return default
1211
1212 return default
1213diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
1214index f32dac9..6e01aa4 100644
1215--- a/cloudinit/sources/helpers/azure.py
1216+++ b/cloudinit/sources/helpers/azure.py
1217@@ -289,7 +289,7 @@ class WALinuxAgentShim(object):
1218 LOG.debug("Unable to find endpoint in dhclient logs. "
1219 " Falling back to check lease files")
1220 if fallback_lease_file is None:
1221- LOG.warn("No fallback lease file was specified.")
1222+ LOG.warning("No fallback lease file was specified.")
1223 value = None
1224 else:
1225 LOG.debug("Looking for endpoint in lease file %s",
1226diff --git a/cloudinit/sources/helpers/vmware/imc/config_file.py b/cloudinit/sources/helpers/vmware/imc/config_file.py
1227index 14293f3..602af07 100644
1228--- a/cloudinit/sources/helpers/vmware/imc/config_file.py
1229+++ b/cloudinit/sources/helpers/vmware/imc/config_file.py
1230@@ -43,9 +43,9 @@ class ConfigFile(ConfigSource, dict):
1231
1232 # "sensitive" settings shall not be logged
1233 if canLog:
1234- logger.debug("ADDED KEY-VAL :: '%s' = '%s'" % (key, val))
1235+ logger.debug("ADDED KEY-VAL :: '%s' = '%s'", key, val)
1236 else:
1237- logger.debug("ADDED KEY-VAL :: '%s' = '*****************'" % key)
1238+ logger.debug("ADDED KEY-VAL :: '%s' = '*****************'", key)
1239
1240 self[key] = val
1241
1242@@ -60,7 +60,7 @@ class ConfigFile(ConfigSource, dict):
1243 Keyword arguments:
1244 filename - The full path to the config file.
1245 """
1246- logger.info('Parsing the config file %s.' % filename)
1247+ logger.info('Parsing the config file %s.', filename)
1248
1249 config = configparser.ConfigParser()
1250 config.optionxform = str
1251@@ -69,7 +69,7 @@ class ConfigFile(ConfigSource, dict):
1252 self.clear()
1253
1254 for category in config.sections():
1255- logger.debug("FOUND CATEGORY = '%s'" % category)
1256+ logger.debug("FOUND CATEGORY = '%s'", category)
1257
1258 for (key, value) in config.items(category):
1259 self._insertKey(category + '|' + key, value)
1260diff --git a/cloudinit/stages.py b/cloudinit/stages.py
1261index 1216543..f7191b0 100644
1262--- a/cloudinit/stages.py
1263+++ b/cloudinit/stages.py
1264@@ -163,8 +163,8 @@ class Init(object):
1265 except OSError as e:
1266 error = e
1267
1268- LOG.warn("Failed changing perms on '%s'. tried: %s. %s",
1269- log_file, ','.join(perms), error)
1270+ LOG.warning("Failed changing perms on '%s'. tried: %s. %s",
1271+ log_file, ','.join(perms), error)
1272
1273 def read_cfg(self, extra_fns=None):
1274 # None check so that we don't keep on re-loading if empty
1275@@ -447,9 +447,9 @@ class Init(object):
1276 mod_locs, looked_locs = importer.find_module(
1277 mod_name, [''], ['list_types', 'handle_part'])
1278 if not mod_locs:
1279- LOG.warn("Could not find a valid user-data handler"
1280- " named %s in file %s (searched %s)",
1281- mod_name, fname, looked_locs)
1282+ LOG.warning("Could not find a valid user-data handler"
1283+ " named %s in file %s (searched %s)",
1284+ mod_name, fname, looked_locs)
1285 continue
1286 mod = importer.import_module(mod_locs[0])
1287 mod = handlers.fixup_handler(mod)
1288@@ -568,7 +568,8 @@ class Init(object):
1289
1290 if not isinstance(vdcfg, dict):
1291 vdcfg = {'enabled': False}
1292- LOG.warn("invalid 'vendor_data' setting. resetting to: %s", vdcfg)
1293+ LOG.warning("invalid 'vendor_data' setting. resetting to: %s",
1294+ vdcfg)
1295
1296 enabled = vdcfg.get('enabled')
1297 no_handlers = vdcfg.get('disabled_handlers', None)
1298@@ -632,10 +633,10 @@ class Init(object):
1299 return
1300
1301 try:
1302- LOG.debug("applying net config names for %s" % netcfg)
1303+ LOG.debug("applying net config names for %s", netcfg)
1304 self.distro.apply_network_config_names(netcfg)
1305 except Exception as e:
1306- LOG.warn("Failed to rename devices: %s", e)
1307+ LOG.warning("Failed to rename devices: %s", e)
1308
1309 if (self.datasource is not NULL_DATA_SOURCE and
1310 not self.is_new_instance()):
1311@@ -651,9 +652,9 @@ class Init(object):
1312 "likely broken: %s", e)
1313 return
1314 except NotImplementedError:
1315- LOG.warn("distro '%s' does not implement apply_network_config. "
1316- "networking may not be configured properly.",
1317- self.distro)
1318+ LOG.warning("distro '%s' does not implement apply_network_config. "
1319+ "networking may not be configured properly.",
1320+ self.distro)
1321 return
1322
1323
1324@@ -737,15 +738,15 @@ class Modules(object):
1325 if not mod_name:
1326 continue
1327 if freq and freq not in FREQUENCIES:
1328- LOG.warn(("Config specified module %s"
1329- " has an unknown frequency %s"), raw_name, freq)
1330+ LOG.warning(("Config specified module %s"
1331+ " has an unknown frequency %s"), raw_name, freq)
1332 # Reset it so when ran it will get set to a known value
1333 freq = None
1334 mod_locs, looked_locs = importer.find_module(
1335 mod_name, ['', type_utils.obj_name(config)], ['handle'])
1336 if not mod_locs:
1337- LOG.warn("Could not find module named %s (searched %s)",
1338- mod_name, looked_locs)
1339+ LOG.warning("Could not find module named %s (searched %s)",
1340+ mod_name, looked_locs)
1341 continue
1342 mod = config.fixup_module(importer.import_module(mod_locs[0]))
1343 mostly_mods.append([mod, raw_name, freq, run_args])
1344@@ -877,7 +878,7 @@ def _pkl_load(fname):
1345 pickle_contents = util.load_file(fname, decode=False)
1346 except Exception as e:
1347 if os.path.isfile(fname):
1348- LOG.warn("failed loading pickle in %s: %s" % (fname, e))
1349+ LOG.warning("failed loading pickle in %s: %s", fname, e)
1350 pass
1351
1352 # This is allowed so just return nothing successfully loaded...
1353diff --git a/cloudinit/templater.py b/cloudinit/templater.py
1354index 648cd21..b3ea64e 100644
1355--- a/cloudinit/templater.py
1356+++ b/cloudinit/templater.py
1357@@ -103,14 +103,14 @@ def detect_template(text):
1358 raise ValueError("Unknown template rendering type '%s' requested"
1359 % template_type)
1360 if template_type == 'jinja' and not JINJA_AVAILABLE:
1361- LOG.warn("Jinja not available as the selected renderer for"
1362- " desired template, reverting to the basic renderer.")
1363+ LOG.warning("Jinja not available as the selected renderer for"
1364+ " desired template, reverting to the basic renderer.")
1365 return ('basic', basic_render, rest)
1366 elif template_type == 'jinja' and JINJA_AVAILABLE:
1367 return ('jinja', jinja_render, rest)
1368 if template_type == 'cheetah' and not CHEETAH_AVAILABLE:
1369- LOG.warn("Cheetah not available as the selected renderer for"
1370- " desired template, reverting to the basic renderer.")
1371+ LOG.warning("Cheetah not available as the selected renderer for"
1372+ " desired template, reverting to the basic renderer.")
1373 return ('basic', basic_render, rest)
1374 elif template_type == 'cheetah' and CHEETAH_AVAILABLE:
1375 return ('cheetah', cheetah_render, rest)
1376diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
1377index 2f6a158..d2b92e6 100644
1378--- a/cloudinit/url_helper.py
1379+++ b/cloudinit/url_helper.py
1380@@ -155,8 +155,8 @@ def _get_ssl_args(url, ssl_details):
1381 scheme = urlparse(url).scheme
1382 if scheme == 'https' and ssl_details:
1383 if not SSL_ENABLED:
1384- LOG.warn("SSL is not supported in requests v%s, "
1385- "cert. verification can not occur!", _REQ_VER)
1386+ LOG.warning("SSL is not supported in requests v%s, "
1387+ "cert. verification can not occur!", _REQ_VER)
1388 else:
1389 if 'ca_certs' in ssl_details and ssl_details['ca_certs']:
1390 ssl_args['verify'] = ssl_details['ca_certs']
1391@@ -415,14 +415,15 @@ class OauthUrlHelper(object):
1392 return
1393
1394 if 'date' not in exception.headers:
1395- LOG.warn("Missing header 'date' in %s response", exception.code)
1396+ LOG.warning("Missing header 'date' in %s response",
1397+ exception.code)
1398 return
1399
1400 date = exception.headers['date']
1401 try:
1402 remote_time = time.mktime(parsedate(date))
1403 except Exception as e:
1404- LOG.warn("Failed to convert datetime '%s': %s", date, e)
1405+ LOG.warning("Failed to convert datetime '%s': %s", date, e)
1406 return
1407
1408 skew = int(remote_time - time.time())
1409@@ -430,7 +431,7 @@ class OauthUrlHelper(object):
1410 old_skew = self.skew_data.get(host, 0)
1411 if abs(old_skew - skew) > self.skew_change_limit:
1412 self.update_skew_file(host, skew)
1413- LOG.warn("Setting oauth clockskew for %s to %d", host, skew)
1414+ LOG.warning("Setting oauth clockskew for %s to %d", host, skew)
1415 self.skew_data[host] = skew
1416
1417 return
1418diff --git a/cloudinit/user_data.py b/cloudinit/user_data.py
1419index cfe5aa2..88cb7f8 100644
1420--- a/cloudinit/user_data.py
1421+++ b/cloudinit/user_data.py
1422@@ -109,8 +109,9 @@ class UserDataProcessor(object):
1423 ctype_orig = None
1424 was_compressed = True
1425 except util.DecompressionError as e:
1426- LOG.warn("Failed decompressing payload from %s of length"
1427- " %s due to: %s", ctype_orig, len(payload), e)
1428+ LOG.warning("Failed decompressing payload from %s of"
1429+ " length %s due to: %s",
1430+ ctype_orig, len(payload), e)
1431 continue
1432
1433 # Attempt to figure out the payloads content-type
1434@@ -228,9 +229,9 @@ class UserDataProcessor(object):
1435 if resp.ok():
1436 content = resp.contents
1437 else:
1438- LOG.warn(("Fetching from %s resulted in"
1439- " a invalid http code of %s"),
1440- include_url, resp.code)
1441+ LOG.warning(("Fetching from %s resulted in"
1442+ " a invalid http code of %s"),
1443+ include_url, resp.code)
1444
1445 if content is not None:
1446 new_msg = convert_string(content)
1447diff --git a/cloudinit/util.py b/cloudinit/util.py
1448index 6940850..bfddca6 100644
1449--- a/cloudinit/util.py
1450+++ b/cloudinit/util.py
1451@@ -96,11 +96,11 @@ def _lsb_release(target=None):
1452 data[fmap[fname]] = val.strip()
1453 missing = [k for k in fmap.values() if k not in data]
1454 if len(missing):
1455- LOG.warn("Missing fields in lsb_release --all output: %s",
1456- ','.join(missing))
1457+ LOG.warning("Missing fields in lsb_release --all output: %s",
1458+ ','.join(missing))
1459
1460 except ProcessExecutionError as err:
1461- LOG.warn("Unable to get lsb_release --all: %s", err)
1462+ LOG.warning("Unable to get lsb_release --all: %s", err)
1463 data = dict((v, "UNAVAILABLE") for v in fmap.values())
1464
1465 return data
1466@@ -590,7 +590,7 @@ def system_info():
1467 'release': platform.release(),
1468 'python': platform.python_version(),
1469 'uname': platform.uname(),
1470- 'dist': platform.linux_distribution(),
1471+ 'dist': platform.linux_distribution(), # pylint: disable=W1505
1472 }
1473
1474
1475@@ -865,7 +865,7 @@ def read_file_or_url(url, timeout=5, retries=10,
1476 url = "file://%s" % url
1477 if url.lower().startswith("file://"):
1478 if data:
1479- LOG.warn("Unable to post data to file resource %s", url)
1480+ LOG.warning("Unable to post data to file resource %s", url)
1481 file_path = url[len("file://"):]
1482 try:
1483 contents = load_file(file_path, decode=False)
1484@@ -1279,7 +1279,7 @@ def get_cmdline():
1485 # replace nulls with space and drop trailing null
1486 cmdline = contents.replace("\x00", " ")[:-1]
1487 except Exception as e:
1488- LOG.warn("failed reading /proc/1/cmdline: %s", e)
1489+ LOG.warning("failed reading /proc/1/cmdline: %s", e)
1490 cmdline = ""
1491 else:
1492 try:
1493@@ -1400,7 +1400,7 @@ def logexc(log, msg, *args):
1494 # or even desirable to have that much junk
1495 # coming out to a non-debug stream
1496 if msg:
1497- log.warn(msg, *args)
1498+ log.warning(msg, *args)
1499 # Debug gets the full trace. However, nose has a bug whereby its
1500 # logcapture plugin doesn't properly handle the case where there is no
1501 # actual exception. To avoid tracebacks during the test suite then, we'll
1502@@ -2344,8 +2344,8 @@ def read_dmi_data(key):
1503 if dmidecode_path:
1504 return _call_dmidecode(key, dmidecode_path)
1505
1506- LOG.warn("did not find either path %s or dmidecode command",
1507- DMI_SYS_PATH)
1508+ LOG.warning("did not find either path %s or dmidecode command",
1509+ DMI_SYS_PATH)
1510 return None
1511
1512
1513diff --git a/cloudinit/warnings.py b/cloudinit/warnings.py
1514index 3206d4e..f9f7a63 100644
1515--- a/cloudinit/warnings.py
1516+++ b/cloudinit/warnings.py
1517@@ -130,10 +130,10 @@ def show_warning(name, cfg=None, sleep=None, mode=True, **kwargs):
1518 os.path.join(_get_warn_dir(cfg), name),
1519 topline + "\n".join(fmtlines) + "\n" + topline)
1520
1521- LOG.warn(topline + "\n".join(fmtlines) + "\n" + closeline)
1522+ LOG.warning(topline + "\n".join(fmtlines) + "\n" + closeline)
1523
1524 if sleep:
1525- LOG.debug("sleeping %d seconds for warning '%s'" % (sleep, name))
1526+ LOG.debug("sleeping %d seconds for warning '%s'", sleep, name)
1527 time.sleep(sleep)
1528
1529 # vi: ts=4 expandtab
1530diff --git a/tests/cloud_tests/__main__.py b/tests/cloud_tests/__main__.py
1531index ef7d187..ed654ad 100644
1532--- a/tests/cloud_tests/__main__.py
1533+++ b/tests/cloud_tests/__main__.py
1534@@ -38,7 +38,7 @@ def run(args):
1535 finally:
1536 # TODO: make this configurable via environ or cmdline
1537 if failed:
1538- LOG.warn('some tests failed, leaving data in %s', args.data_dir)
1539+ LOG.warning('some tests failed, leaving data in %s', args.data_dir)
1540 else:
1541 shutil.rmtree(args.data_dir)
1542 return failed
1543diff --git a/tests/cloud_tests/args.py b/tests/cloud_tests/args.py
1544index b68cc98..371b044 100644
1545--- a/tests/cloud_tests/args.py
1546+++ b/tests/cloud_tests/args.py
1547@@ -94,7 +94,7 @@ def normalize_create_args(args):
1548 if os.path.exists(config.name_to_path(args.name)):
1549 msg = 'test: {} already exists'.format(args.name)
1550 if args.force:
1551- LOG.warn('%s but ignoring due to --force', msg)
1552+ LOG.warning('%s but ignoring due to --force', msg)
1553 else:
1554 LOG.error(msg)
1555 return None
1556diff --git a/tests/cloud_tests/collect.py b/tests/cloud_tests/collect.py
1557index 68b47d7..02fc0e5 100644
1558--- a/tests/cloud_tests/collect.py
1559+++ b/tests/cloud_tests/collect.py
1560@@ -45,7 +45,7 @@ def collect_test_data(args, snapshot, os_name, test_name):
1561
1562 # if test is not enabled, skip and return 0 failures
1563 if not test_config.get('enabled', False):
1564- LOG.warn('test config %s is not enabled, skipping', test_name)
1565+ LOG.warning('test config %s is not enabled, skipping', test_name)
1566 return ({}, 0)
1567
1568 # create test instance
1569diff --git a/tests/cloud_tests/verify.py b/tests/cloud_tests/verify.py
1570index ef7d4e2..2a63550 100644
1571--- a/tests/cloud_tests/verify.py
1572+++ b/tests/cloud_tests/verify.py
1573@@ -45,9 +45,9 @@ def verify_data(base_dir, tests):
1574 }
1575
1576 for failure in res[test_name]['failures']:
1577- LOG.warn('test case: %s failed %s.%s with: %s',
1578- test_name, failure['class'], failure['function'],
1579- failure['error'])
1580+ LOG.warning('test case: %s failed %s.%s with: %s',
1581+ test_name, failure['class'], failure['function'],
1582+ failure['error'])
1583
1584 return res
1585
1586@@ -80,7 +80,8 @@ def verify(args):
1587 if len(fail_list) == 0:
1588 LOG.info('test: %s passed all tests', test_name)
1589 else:
1590- LOG.warn('test: %s failed %s tests', test_name, len(fail_list))
1591+ LOG.warning('test: %s failed %s tests', test_name,
1592+ len(fail_list))
1593 failed += len(fail_list)
1594
1595 # dump results
1596diff --git a/tools/mock-meta.py b/tools/mock-meta.py
1597index 95fc465..82816e8 100755
1598--- a/tools/mock-meta.py
1599+++ b/tools/mock-meta.py
1600@@ -293,9 +293,9 @@ class MetaDataHandler(object):
1601 else:
1602 return "%s" % (PLACEMENT_CAPABILITIES.get(pentry, ''))
1603 else:
1604- log.warn(("Did not implement action %s, "
1605- "returning empty response: %r"),
1606- action, NOT_IMPL_RESPONSE)
1607+ log.warning(("Did not implement action %s, "
1608+ "returning empty response: %r"),
1609+ action, NOT_IMPL_RESPONSE)
1610 return NOT_IMPL_RESPONSE
1611
1612

Subscribers

People subscribed via source and target branches