Merge ~vtqanh/cloud-init:addKVPtelemetry into cloud-init:master

Proposed by Anh Vo (MSFT)
Status: Merged
Approved by: Ryan Harper
Approved revision: d0778d381caad5b9edbdca013eb443652bb8c81b
Merge reported by: Server Team CI bot
Merged at revision: not available
Proposed branch: ~vtqanh/cloud-init:addKVPtelemetry
Merge into: cloud-init:master
Diff against target: 606 lines (+179/-83)
2 files modified
cloudinit/sources/DataSourceAzure.py (+148/-83)
cloudinit/sources/helpers/azure.py (+31/-0)
Reviewer Review Type Date Requested Status
Server Team CI bot continuous-integration Approve
Ryan Harper Approve
Review via email: mp+365374@code.launchpad.net

Commit message

DatasourceAzure: add additional logging for azure datasource

Create an Azure logging decorator and use additional ReportEventStack
context managers to provide additional logging details.

To post a comment you must log in.
Revision history for this message
Jason Zions (jasonzio) wrote :

LGTM

Revision history for this message
Ryan Harper (raharper) wrote :

I've pointed CI at this branch.

Revision history for this message
Server Team CI bot (server-team-bot) wrote :

FAILED: Continuous integration, rev:d0778d381caad5b9edbdca013eb443652bb8c81b
https://jenkins.ubuntu.com/server/job/cloud-init-ci/663/
Executed test runs:
    FAILED: Checkout

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/cloud-init-ci/663/rebuild

review: Needs Fixing (continuous-integration)
Revision history for this message
Anh Vo (MSFT) (vtqanh) wrote :

Looks like CI failed to checkout due to some authentication issue:

Is there any settings necessary on my branch to allow this

Cloning the remote Git repository
Cloning repository https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry
 > git init /var/lib/jenkins/slaves/torkoal/workspace/cloud-init-ci # timeout=10
Fetching upstream changes from https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry
 > git --version # timeout=10
 > git fetch --tags --progress https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry +refs/heads/*:refs/remotes/origin/*
ERROR: Error cloning remote repo 'origin'
hudson.plugins.git.GitException: Command "git fetch --tags --progress https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry +refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout:
stderr: remote: Authorisation required.

Revision history for this message
Ryan Harper (raharper) wrote :

Likely my fault, I'll submit again.

Revision history for this message
Server Team CI bot (server-team-bot) wrote :

PASSED: Continuous integration, rev:d0778d381caad5b9edbdca013eb443652bb8c81b
https://jenkins.ubuntu.com/server/job/cloud-init-ci/664/
Executed test runs:
    SUCCESS: Checkout
    SUCCESS: Unit & Style Tests
    SUCCESS: Ubuntu LTS: Build
    SUCCESS: Ubuntu LTS: Integration
    IN_PROGRESS: Declarative: Post Actions

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/cloud-init-ci/664/rebuild

review: Approve (continuous-integration)
Revision history for this message
Ryan Harper (raharper) :
review: Approve
Revision history for this message
Server Team CI bot (server-team-bot) :
review: Approve (continuous-integration)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
2old mode 100644
3new mode 100755
4index b4e3f06..d4230b3
5--- a/cloudinit/sources/DataSourceAzure.py
6+++ b/cloudinit/sources/DataSourceAzure.py
7@@ -21,10 +21,14 @@ from cloudinit import net
8 from cloudinit.event import EventType
9 from cloudinit.net.dhcp import EphemeralDHCPv4
10 from cloudinit import sources
11-from cloudinit.sources.helpers.azure import get_metadata_from_fabric
12 from cloudinit.sources.helpers import netlink
13 from cloudinit.url_helper import UrlError, readurl, retry_on_url_exc
14 from cloudinit import util
15+from cloudinit.reporting import events
16+
17+from cloudinit.sources.helpers.azure import (azure_ds_reporter,
18+ azure_ds_telemetry_reporter,
19+ get_metadata_from_fabric)
20
21 LOG = logging.getLogger(__name__)
22
23@@ -244,6 +248,7 @@ def set_hostname(hostname, hostname_command='hostname'):
24 util.subp([hostname_command, hostname])
25
26
27+@azure_ds_telemetry_reporter
28 @contextlib.contextmanager
29 def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):
30 """
31@@ -290,6 +295,7 @@ class DataSourceAzure(sources.DataSource):
32 root = sources.DataSource.__str__(self)
33 return "%s [seed=%s]" % (root, self.seed)
34
35+ @azure_ds_telemetry_reporter
36 def bounce_network_with_azure_hostname(self):
37 # When using cloud-init to provision, we have to set the hostname from
38 # the metadata and "bounce" the network to force DDNS to update via
39@@ -315,6 +321,7 @@ class DataSourceAzure(sources.DataSource):
40 util.logexc(LOG, "handling set_hostname failed")
41 return False
42
43+ @azure_ds_telemetry_reporter
44 def get_metadata_from_agent(self):
45 temp_hostname = self.metadata.get('local-hostname')
46 agent_cmd = self.ds_cfg['agent_command']
47@@ -344,15 +351,18 @@ class DataSourceAzure(sources.DataSource):
48 LOG.debug("ssh authentication: "
49 "using fingerprint from fabirc")
50
51- # wait very long for public SSH keys to arrive
52- # https://bugs.launchpad.net/cloud-init/+bug/1717611
53- missing = util.log_time(logfunc=LOG.debug,
54- msg="waiting for SSH public key files",
55- func=util.wait_for_files,
56- args=(fp_files, 900))
57-
58- if len(missing):
59- LOG.warning("Did not find files, but going on: %s", missing)
60+ with events.ReportEventStack(
61+ name="waiting-for-ssh-public-key",
62+ description="wait for agents to retrieve ssh keys",
63+ parent=azure_ds_reporter):
64+ # wait very long for public SSH keys to arrive
65+ # https://bugs.launchpad.net/cloud-init/+bug/1717611
66+ missing = util.log_time(logfunc=LOG.debug,
67+ msg="waiting for SSH public key files",
68+ func=util.wait_for_files,
69+ args=(fp_files, 900))
70+ if len(missing):
71+ LOG.warning("Did not find files, but going on: %s", missing)
72
73 metadata = {}
74 metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
75@@ -366,6 +376,7 @@ class DataSourceAzure(sources.DataSource):
76 subplatform_type = 'seed-dir'
77 return '%s (%s)' % (subplatform_type, self.seed)
78
79+ @azure_ds_telemetry_reporter
80 def crawl_metadata(self):
81 """Walk all instance metadata sources returning a dict on success.
82
83@@ -467,6 +478,7 @@ class DataSourceAzure(sources.DataSource):
84 super(DataSourceAzure, self).clear_cached_attrs(attr_defaults)
85 self._metadata_imds = sources.UNSET
86
87+ @azure_ds_telemetry_reporter
88 def _get_data(self):
89 """Crawl and process datasource metadata caching metadata as attrs.
90
91@@ -513,6 +525,7 @@ class DataSourceAzure(sources.DataSource):
92 # quickly (local check only) if self.instance_id is still valid
93 return sources.instance_id_matches_system_uuid(self.get_instance_id())
94
95+ @azure_ds_telemetry_reporter
96 def setup(self, is_new_instance):
97 if self._negotiated is False:
98 LOG.debug("negotiating for %s (new_instance=%s)",
99@@ -580,6 +593,7 @@ class DataSourceAzure(sources.DataSource):
100 if nl_sock:
101 nl_sock.close()
102
103+ @azure_ds_telemetry_reporter
104 def _report_ready(self, lease):
105 """Tells the fabric provisioning has completed """
106 try:
107@@ -617,9 +631,14 @@ class DataSourceAzure(sources.DataSource):
108 def _reprovision(self):
109 """Initiate the reprovisioning workflow."""
110 contents = self._poll_imds()
111- md, ud, cfg = read_azure_ovf(contents)
112- return (md, ud, cfg, {'ovf-env.xml': contents})
113-
114+ with events.ReportEventStack(
115+ name="reprovisioning-read-azure-ovf",
116+ description="read azure ovf during reprovisioning",
117+ parent=azure_ds_reporter):
118+ md, ud, cfg = read_azure_ovf(contents)
119+ return (md, ud, cfg, {'ovf-env.xml': contents})
120+
121+ @azure_ds_telemetry_reporter
122 def _negotiate(self):
123 """Negotiate with fabric and return data from it.
124
125@@ -652,6 +671,7 @@ class DataSourceAzure(sources.DataSource):
126 util.del_file(REPROVISION_MARKER_FILE)
127 return fabric_data
128
129+ @azure_ds_telemetry_reporter
130 def activate(self, cfg, is_new_instance):
131 address_ephemeral_resize(is_new_instance=is_new_instance,
132 preserve_ntfs=self.ds_cfg.get(
133@@ -690,12 +710,14 @@ def _partitions_on_device(devpath, maxnum=16):
134 return []
135
136
137+@azure_ds_telemetry_reporter
138 def _has_ntfs_filesystem(devpath):
139 ntfs_devices = util.find_devs_with("TYPE=ntfs", no_cache=True)
140 LOG.debug('ntfs_devices found = %s', ntfs_devices)
141 return os.path.realpath(devpath) in ntfs_devices
142
143
144+@azure_ds_telemetry_reporter
145 def can_dev_be_reformatted(devpath, preserve_ntfs):
146 """Determine if the ephemeral drive at devpath should be reformatted.
147
148@@ -744,43 +766,59 @@ def can_dev_be_reformatted(devpath, preserve_ntfs):
149 (cand_part, cand_path, devpath))
150 return False, msg
151
152+ @azure_ds_telemetry_reporter
153 def count_files(mp):
154 ignored = set(['dataloss_warning_readme.txt'])
155 return len([f for f in os.listdir(mp) if f.lower() not in ignored])
156
157 bmsg = ('partition %s (%s) on device %s was ntfs formatted' %
158 (cand_part, cand_path, devpath))
159- try:
160- file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
161- update_env_for_mount={'LANG': 'C'})
162- except util.MountFailedError as e:
163- if "unknown filesystem type 'ntfs'" in str(e):
164- return True, (bmsg + ' but this system cannot mount NTFS,'
165- ' assuming there are no important files.'
166- ' Formatting allowed.')
167- return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
168-
169- if file_count != 0:
170- LOG.warning("it looks like you're using NTFS on the ephemeral disk, "
171- 'to ensure that filesystem does not get wiped, set '
172- '%s.%s in config', '.'.join(DS_CFG_PATH),
173- DS_CFG_KEY_PRESERVE_NTFS)
174- return False, bmsg + ' but had %d files on it.' % file_count
175+
176+ with events.ReportEventStack(
177+ name="mount-ntfs-and-count",
178+ description="mount-ntfs-and-count",
179+ parent=azure_ds_reporter) as evt:
180+ try:
181+ file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
182+ update_env_for_mount={'LANG': 'C'})
183+ except util.MountFailedError as e:
184+ evt.description = "cannot mount ntfs"
185+ if "unknown filesystem type 'ntfs'" in str(e):
186+ return True, (bmsg + ' but this system cannot mount NTFS,'
187+ ' assuming there are no important files.'
188+ ' Formatting allowed.')
189+ return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
190+
191+ if file_count != 0:
192+ evt.description = "mounted and counted %d files" % file_count
193+ LOG.warning("it looks like you're using NTFS on the ephemeral"
194+ " disk, to ensure that filesystem does not get wiped,"
195+ " set %s.%s in config", '.'.join(DS_CFG_PATH),
196+ DS_CFG_KEY_PRESERVE_NTFS)
197+ return False, bmsg + ' but had %d files on it.' % file_count
198
199 return True, bmsg + ' and had no important files. Safe for reformatting.'
200
201
202+@azure_ds_telemetry_reporter
203 def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
204 is_new_instance=False, preserve_ntfs=False):
205 # wait for ephemeral disk to come up
206 naplen = .2
207- missing = util.wait_for_files([devpath], maxwait=maxwait, naplen=naplen,
208- log_pre="Azure ephemeral disk: ")
209-
210- if missing:
211- LOG.warning("ephemeral device '%s' did not appear after %d seconds.",
212- devpath, maxwait)
213- return
214+ with events.ReportEventStack(
215+ name="wait-for-ephemeral-disk",
216+ description="wait for ephemeral disk",
217+ parent=azure_ds_reporter):
218+ missing = util.wait_for_files([devpath],
219+ maxwait=maxwait,
220+ naplen=naplen,
221+ log_pre="Azure ephemeral disk: ")
222+
223+ if missing:
224+ LOG.warning("ephemeral device '%s' did"
225+ " not appear after %d seconds.",
226+ devpath, maxwait)
227+ return
228
229 result = False
230 msg = None
231@@ -808,6 +846,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
232 return
233
234
235+@azure_ds_telemetry_reporter
236 def perform_hostname_bounce(hostname, cfg, prev_hostname):
237 # set the hostname to 'hostname' if it is not already set to that.
238 # then, if policy is not off, bounce the interface using command
239@@ -843,6 +882,7 @@ def perform_hostname_bounce(hostname, cfg, prev_hostname):
240 return True
241
242
243+@azure_ds_telemetry_reporter
244 def crtfile_to_pubkey(fname, data=None):
245 pipeline = ('openssl x509 -noout -pubkey < "$0" |'
246 'ssh-keygen -i -m PKCS8 -f /dev/stdin')
247@@ -851,6 +891,7 @@ def crtfile_to_pubkey(fname, data=None):
248 return out.rstrip()
249
250
251+@azure_ds_telemetry_reporter
252 def pubkeys_from_crt_files(flist):
253 pubkeys = []
254 errors = []
255@@ -866,6 +907,7 @@ def pubkeys_from_crt_files(flist):
256 return pubkeys
257
258
259+@azure_ds_telemetry_reporter
260 def write_files(datadir, files, dirmode=None):
261
262 def _redact_password(cnt, fname):
263@@ -893,6 +935,7 @@ def write_files(datadir, files, dirmode=None):
264 util.write_file(filename=fname, content=content, mode=0o600)
265
266
267+@azure_ds_telemetry_reporter
268 def invoke_agent(cmd):
269 # this is a function itself to simplify patching it for test
270 if cmd:
271@@ -912,6 +955,7 @@ def find_child(node, filter_func):
272 return ret
273
274
275+@azure_ds_telemetry_reporter
276 def load_azure_ovf_pubkeys(sshnode):
277 # This parses a 'SSH' node formatted like below, and returns
278 # an array of dicts.
279@@ -964,6 +1008,7 @@ def load_azure_ovf_pubkeys(sshnode):
280 return found
281
282
283+@azure_ds_telemetry_reporter
284 def read_azure_ovf(contents):
285 try:
286 dom = minidom.parseString(contents)
287@@ -1064,6 +1109,7 @@ def read_azure_ovf(contents):
288 return (md, ud, cfg)
289
290
291+@azure_ds_telemetry_reporter
292 def _extract_preprovisioned_vm_setting(dom):
293 """Read the preprovision flag from the ovf. It should not
294 exist unless true."""
295@@ -1092,6 +1138,7 @@ def encrypt_pass(password, salt_id="$6$"):
296 return crypt.crypt(password, salt_id + util.rand_str(strlen=16))
297
298
299+@azure_ds_telemetry_reporter
300 def _check_freebsd_cdrom(cdrom_dev):
301 """Return boolean indicating path to cdrom device has content."""
302 try:
303@@ -1103,6 +1150,7 @@ def _check_freebsd_cdrom(cdrom_dev):
304 return False
305
306
307+@azure_ds_telemetry_reporter
308 def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
309 """Return content random seed file if available, otherwise,
310 return None."""
311@@ -1126,6 +1174,7 @@ def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
312 return seed
313
314
315+@azure_ds_telemetry_reporter
316 def list_possible_azure_ds_devs():
317 devlist = []
318 if util.is_FreeBSD():
319@@ -1140,6 +1189,7 @@ def list_possible_azure_ds_devs():
320 return devlist
321
322
323+@azure_ds_telemetry_reporter
324 def load_azure_ds_dir(source_dir):
325 ovf_file = os.path.join(source_dir, "ovf-env.xml")
326
327@@ -1162,47 +1212,54 @@ def parse_network_config(imds_metadata):
328 @param: imds_metadata: Dict of content read from IMDS network service.
329 @return: Dictionary containing network version 2 standard configuration.
330 """
331- if imds_metadata != sources.UNSET and imds_metadata:
332- netconfig = {'version': 2, 'ethernets': {}}
333- LOG.debug('Azure: generating network configuration from IMDS')
334- network_metadata = imds_metadata['network']
335- for idx, intf in enumerate(network_metadata['interface']):
336- nicname = 'eth{idx}'.format(idx=idx)
337- dev_config = {}
338- for addr4 in intf['ipv4']['ipAddress']:
339- privateIpv4 = addr4['privateIpAddress']
340- if privateIpv4:
341- if dev_config.get('dhcp4', False):
342- # Append static address config for nic > 1
343- netPrefix = intf['ipv4']['subnet'][0].get(
344- 'prefix', '24')
345- if not dev_config.get('addresses'):
346- dev_config['addresses'] = []
347- dev_config['addresses'].append(
348- '{ip}/{prefix}'.format(
349- ip=privateIpv4, prefix=netPrefix))
350- else:
351- dev_config['dhcp4'] = True
352- for addr6 in intf['ipv6']['ipAddress']:
353- privateIpv6 = addr6['privateIpAddress']
354- if privateIpv6:
355- dev_config['dhcp6'] = True
356- break
357- if dev_config:
358- mac = ':'.join(re.findall(r'..', intf['macAddress']))
359- dev_config.update(
360- {'match': {'macaddress': mac.lower()},
361- 'set-name': nicname})
362- netconfig['ethernets'][nicname] = dev_config
363- else:
364- blacklist = ['mlx4_core']
365- LOG.debug('Azure: generating fallback configuration')
366- # generate a network config, blacklist picking mlx4_core devs
367- netconfig = net.generate_fallback_config(
368- blacklist_drivers=blacklist, config_driver=True)
369- return netconfig
370+ with events.ReportEventStack(
371+ name="parse_network_config",
372+ description="",
373+ parent=azure_ds_reporter) as evt:
374+ if imds_metadata != sources.UNSET and imds_metadata:
375+ netconfig = {'version': 2, 'ethernets': {}}
376+ LOG.debug('Azure: generating network configuration from IMDS')
377+ network_metadata = imds_metadata['network']
378+ for idx, intf in enumerate(network_metadata['interface']):
379+ nicname = 'eth{idx}'.format(idx=idx)
380+ dev_config = {}
381+ for addr4 in intf['ipv4']['ipAddress']:
382+ privateIpv4 = addr4['privateIpAddress']
383+ if privateIpv4:
384+ if dev_config.get('dhcp4', False):
385+ # Append static address config for nic > 1
386+ netPrefix = intf['ipv4']['subnet'][0].get(
387+ 'prefix', '24')
388+ if not dev_config.get('addresses'):
389+ dev_config['addresses'] = []
390+ dev_config['addresses'].append(
391+ '{ip}/{prefix}'.format(
392+ ip=privateIpv4, prefix=netPrefix))
393+ else:
394+ dev_config['dhcp4'] = True
395+ for addr6 in intf['ipv6']['ipAddress']:
396+ privateIpv6 = addr6['privateIpAddress']
397+ if privateIpv6:
398+ dev_config['dhcp6'] = True
399+ break
400+ if dev_config:
401+ mac = ':'.join(re.findall(r'..', intf['macAddress']))
402+ dev_config.update(
403+ {'match': {'macaddress': mac.lower()},
404+ 'set-name': nicname})
405+ netconfig['ethernets'][nicname] = dev_config
406+ evt.description = "network config from imds"
407+ else:
408+ blacklist = ['mlx4_core']
409+ LOG.debug('Azure: generating fallback configuration')
410+ # generate a network config, blacklist picking mlx4_core devs
411+ netconfig = net.generate_fallback_config(
412+ blacklist_drivers=blacklist, config_driver=True)
413+ evt.description = "network config from fallback"
414+ return netconfig
415
416
417+@azure_ds_telemetry_reporter
418 def get_metadata_from_imds(fallback_nic, retries):
419 """Query Azure's network metadata service, returning a dictionary.
420
421@@ -1227,6 +1284,7 @@ def get_metadata_from_imds(fallback_nic, retries):
422 return util.log_time(**kwargs)
423
424
425+@azure_ds_telemetry_reporter
426 def _get_metadata_from_imds(retries):
427
428 url = IMDS_URL + "instance?api-version=2017-12-01"
429@@ -1246,6 +1304,7 @@ def _get_metadata_from_imds(retries):
430 return {}
431
432
433+@azure_ds_telemetry_reporter
434 def maybe_remove_ubuntu_network_config_scripts(paths=None):
435 """Remove Azure-specific ubuntu network config for non-primary nics.
436
437@@ -1283,14 +1342,20 @@ def maybe_remove_ubuntu_network_config_scripts(paths=None):
438
439
440 def _is_platform_viable(seed_dir):
441- """Check platform environment to report if this datasource may run."""
442- asset_tag = util.read_dmi_data('chassis-asset-tag')
443- if asset_tag == AZURE_CHASSIS_ASSET_TAG:
444- return True
445- LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
446- if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
447- return True
448- return False
449+ with events.ReportEventStack(
450+ name="check-platform-viability",
451+ description="found azure asset tag",
452+ parent=azure_ds_reporter) as evt:
453+
454+ """Check platform environment to report if this datasource may run."""
455+ asset_tag = util.read_dmi_data('chassis-asset-tag')
456+ if asset_tag == AZURE_CHASSIS_ASSET_TAG:
457+ return True
458+ LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
459+ evt.description = "Non-Azure DMI asset tag '%s' discovered.", asset_tag
460+ if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
461+ return True
462+ return False
463
464
465 class BrokenAzureDataSource(Exception):
466diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
467old mode 100644
468new mode 100755
469index 2829dd2..d3af05e
470--- a/cloudinit/sources/helpers/azure.py
471+++ b/cloudinit/sources/helpers/azure.py
472@@ -16,10 +16,27 @@ from xml.etree import ElementTree
473
474 from cloudinit import url_helper
475 from cloudinit import util
476+from cloudinit.reporting import events
477
478 LOG = logging.getLogger(__name__)
479
480
481+azure_ds_reporter = events.ReportEventStack(
482+ name="azure-ds",
483+ description="initialize reporter for azure ds",
484+ reporting_enabled=True)
485+
486+
487+def azure_ds_telemetry_reporter(func):
488+ def impl(*args, **kwargs):
489+ with events.ReportEventStack(
490+ name=func.__name__,
491+ description=func.__name__,
492+ parent=azure_ds_reporter):
493+ return func(*args, **kwargs)
494+ return impl
495+
496+
497 @contextmanager
498 def cd(newdir):
499 prevdir = os.getcwd()
500@@ -119,6 +136,7 @@ class OpenSSLManager(object):
501 def clean_up(self):
502 util.del_dir(self.tmpdir)
503
504+ @azure_ds_telemetry_reporter
505 def generate_certificate(self):
506 LOG.debug('Generating certificate for communication with fabric...')
507 if self.certificate is not None:
508@@ -139,17 +157,20 @@ class OpenSSLManager(object):
509 LOG.debug('New certificate generated.')
510
511 @staticmethod
512+ @azure_ds_telemetry_reporter
513 def _run_x509_action(action, cert):
514 cmd = ['openssl', 'x509', '-noout', action]
515 result, _ = util.subp(cmd, data=cert)
516 return result
517
518+ @azure_ds_telemetry_reporter
519 def _get_ssh_key_from_cert(self, certificate):
520 pub_key = self._run_x509_action('-pubkey', certificate)
521 keygen_cmd = ['ssh-keygen', '-i', '-m', 'PKCS8', '-f', '/dev/stdin']
522 ssh_key, _ = util.subp(keygen_cmd, data=pub_key)
523 return ssh_key
524
525+ @azure_ds_telemetry_reporter
526 def _get_fingerprint_from_cert(self, certificate):
527 """openssl x509 formats fingerprints as so:
528 'SHA1 Fingerprint=07:3E:19:D1:4D:1C:79:92:24:C6:A0:FD:8D:DA:\
529@@ -163,6 +184,7 @@ class OpenSSLManager(object):
530 octets = raw_fp[eq+1:-1].split(':')
531 return ''.join(octets)
532
533+ @azure_ds_telemetry_reporter
534 def _decrypt_certs_from_xml(self, certificates_xml):
535 """Decrypt the certificates XML document using the our private key;
536 return the list of certs and private keys contained in the doc.
537@@ -185,6 +207,7 @@ class OpenSSLManager(object):
538 shell=True, data=b'\n'.join(lines))
539 return out
540
541+ @azure_ds_telemetry_reporter
542 def parse_certificates(self, certificates_xml):
543 """Given the Certificates XML document, return a dictionary of
544 fingerprints and associated SSH keys derived from the certs."""
545@@ -265,11 +288,13 @@ class WALinuxAgentShim(object):
546 return socket.inet_ntoa(packed_bytes)
547
548 @staticmethod
549+ @azure_ds_telemetry_reporter
550 def _networkd_get_value_from_leases(leases_d=None):
551 return dhcp.networkd_get_option_from_leases(
552 'OPTION_245', leases_d=leases_d)
553
554 @staticmethod
555+ @azure_ds_telemetry_reporter
556 def _get_value_from_leases_file(fallback_lease_file):
557 leases = []
558 content = util.load_file(fallback_lease_file)
559@@ -287,6 +312,7 @@ class WALinuxAgentShim(object):
560 return leases[-1]
561
562 @staticmethod
563+ @azure_ds_telemetry_reporter
564 def _load_dhclient_json():
565 dhcp_options = {}
566 hooks_dir = WALinuxAgentShim._get_hooks_dir()
567@@ -305,6 +331,7 @@ class WALinuxAgentShim(object):
568 return dhcp_options
569
570 @staticmethod
571+ @azure_ds_telemetry_reporter
572 def _get_value_from_dhcpoptions(dhcp_options):
573 if dhcp_options is None:
574 return None
575@@ -318,6 +345,7 @@ class WALinuxAgentShim(object):
576 return _value
577
578 @staticmethod
579+ @azure_ds_telemetry_reporter
580 def find_endpoint(fallback_lease_file=None, dhcp245=None):
581 value = None
582 if dhcp245 is not None:
583@@ -352,6 +380,7 @@ class WALinuxAgentShim(object):
584 LOG.debug('Azure endpoint found at %s', endpoint_ip_address)
585 return endpoint_ip_address
586
587+ @azure_ds_telemetry_reporter
588 def register_with_azure_and_fetch_data(self, pubkey_info=None):
589 if self.openssl_manager is None:
590 self.openssl_manager = OpenSSLManager()
591@@ -404,6 +433,7 @@ class WALinuxAgentShim(object):
592
593 return keys
594
595+ @azure_ds_telemetry_reporter
596 def _report_ready(self, goal_state, http_client):
597 LOG.debug('Reporting ready to Azure fabric.')
598 document = self.REPORT_READY_XML_TEMPLATE.format(
599@@ -419,6 +449,7 @@ class WALinuxAgentShim(object):
600 LOG.info('Reported ready to Azure fabric.')
601
602
603+@azure_ds_telemetry_reporter
604 def get_metadata_from_fabric(fallback_lease_file=None, dhcp_opts=None,
605 pubkey_info=None):
606 shim = WALinuxAgentShim(fallback_lease_file=fallback_lease_file,

Subscribers

People subscribed via source and target branches