Merge ~vtqanh/cloud-init:addKVPtelemetry into cloud-init:master

Proposed by Anh Vo (MSFT)
Status: Merged
Approved by: Ryan Harper
Approved revision: d0778d381caad5b9edbdca013eb443652bb8c81b
Merge reported by: Server Team CI bot
Merged at revision: not available
Proposed branch: ~vtqanh/cloud-init:addKVPtelemetry
Merge into: cloud-init:master
Diff against target: 606 lines (+179/-83)
2 files modified
cloudinit/sources/DataSourceAzure.py (+148/-83)
cloudinit/sources/helpers/azure.py (+31/-0)
Reviewer Review Type Date Requested Status
Server Team CI bot continuous-integration Approve
Ryan Harper Approve
Review via email: mp+365374@code.launchpad.net

Commit message

DatasourceAzure: add additional logging for azure datasource

Create an Azure logging decorator and use additional ReportEventStack
context managers to provide additional logging details.

To post a comment you must log in.
Revision history for this message
Jason Zions (jasonzio) wrote :

LGTM

Revision history for this message
Ryan Harper (raharper) wrote :

I've pointed CI at this branch.

Revision history for this message
Server Team CI bot (server-team-bot) wrote :

FAILED: Continuous integration, rev:d0778d381caad5b9edbdca013eb443652bb8c81b
https://jenkins.ubuntu.com/server/job/cloud-init-ci/663/
Executed test runs:
    FAILED: Checkout

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/cloud-init-ci/663/rebuild

review: Needs Fixing (continuous-integration)
Revision history for this message
Anh Vo (MSFT) (vtqanh) wrote :

Looks like CI failed to checkout due to some authentication issue:

Is there any settings necessary on my branch to allow this

Cloning the remote Git repository
Cloning repository https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry
 > git init /var/lib/jenkins/slaves/torkoal/workspace/cloud-init-ci # timeout=10
Fetching upstream changes from https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry
 > git --version # timeout=10
 > git fetch --tags --progress https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry +refs/heads/*:refs/remotes/origin/*
ERROR: Error cloning remote repo 'origin'
hudson.plugins.git.GitException: Command "git fetch --tags --progress https://git.launchpad.net/~vtqanh/cloud-init:addKVPtelemetry +refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout:
stderr: remote: Authorisation required.

Revision history for this message
Ryan Harper (raharper) wrote :

Likely my fault, I'll submit again.

Revision history for this message
Server Team CI bot (server-team-bot) wrote :

PASSED: Continuous integration, rev:d0778d381caad5b9edbdca013eb443652bb8c81b
https://jenkins.ubuntu.com/server/job/cloud-init-ci/664/
Executed test runs:
    SUCCESS: Checkout
    SUCCESS: Unit & Style Tests
    SUCCESS: Ubuntu LTS: Build
    SUCCESS: Ubuntu LTS: Integration
    IN_PROGRESS: Declarative: Post Actions

Click here to trigger a rebuild:
https://jenkins.ubuntu.com/server/job/cloud-init-ci/664/rebuild

review: Approve (continuous-integration)
Revision history for this message
Ryan Harper (raharper) :
review: Approve
Revision history for this message
Server Team CI bot (server-team-bot) :
review: Approve (continuous-integration)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
0old mode 1006440old mode 100644
1new mode 1007551new mode 100755
index b4e3f06..d4230b3
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -21,10 +21,14 @@ from cloudinit import net
21from cloudinit.event import EventType21from cloudinit.event import EventType
22from cloudinit.net.dhcp import EphemeralDHCPv422from cloudinit.net.dhcp import EphemeralDHCPv4
23from cloudinit import sources23from cloudinit import sources
24from cloudinit.sources.helpers.azure import get_metadata_from_fabric
25from cloudinit.sources.helpers import netlink24from cloudinit.sources.helpers import netlink
26from cloudinit.url_helper import UrlError, readurl, retry_on_url_exc25from cloudinit.url_helper import UrlError, readurl, retry_on_url_exc
27from cloudinit import util26from cloudinit import util
27from cloudinit.reporting import events
28
29from cloudinit.sources.helpers.azure import (azure_ds_reporter,
30 azure_ds_telemetry_reporter,
31 get_metadata_from_fabric)
2832
29LOG = logging.getLogger(__name__)33LOG = logging.getLogger(__name__)
3034
@@ -244,6 +248,7 @@ def set_hostname(hostname, hostname_command='hostname'):
244 util.subp([hostname_command, hostname])248 util.subp([hostname_command, hostname])
245249
246250
251@azure_ds_telemetry_reporter
247@contextlib.contextmanager252@contextlib.contextmanager
248def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):253def temporary_hostname(temp_hostname, cfg, hostname_command='hostname'):
249 """254 """
@@ -290,6 +295,7 @@ class DataSourceAzure(sources.DataSource):
290 root = sources.DataSource.__str__(self)295 root = sources.DataSource.__str__(self)
291 return "%s [seed=%s]" % (root, self.seed)296 return "%s [seed=%s]" % (root, self.seed)
292297
298 @azure_ds_telemetry_reporter
293 def bounce_network_with_azure_hostname(self):299 def bounce_network_with_azure_hostname(self):
294 # When using cloud-init to provision, we have to set the hostname from300 # When using cloud-init to provision, we have to set the hostname from
295 # the metadata and "bounce" the network to force DDNS to update via301 # the metadata and "bounce" the network to force DDNS to update via
@@ -315,6 +321,7 @@ class DataSourceAzure(sources.DataSource):
315 util.logexc(LOG, "handling set_hostname failed")321 util.logexc(LOG, "handling set_hostname failed")
316 return False322 return False
317323
324 @azure_ds_telemetry_reporter
318 def get_metadata_from_agent(self):325 def get_metadata_from_agent(self):
319 temp_hostname = self.metadata.get('local-hostname')326 temp_hostname = self.metadata.get('local-hostname')
320 agent_cmd = self.ds_cfg['agent_command']327 agent_cmd = self.ds_cfg['agent_command']
@@ -344,15 +351,18 @@ class DataSourceAzure(sources.DataSource):
344 LOG.debug("ssh authentication: "351 LOG.debug("ssh authentication: "
345 "using fingerprint from fabirc")352 "using fingerprint from fabirc")
346353
347 # wait very long for public SSH keys to arrive354 with events.ReportEventStack(
348 # https://bugs.launchpad.net/cloud-init/+bug/1717611355 name="waiting-for-ssh-public-key",
349 missing = util.log_time(logfunc=LOG.debug,356 description="wait for agents to retrieve ssh keys",
350 msg="waiting for SSH public key files",357 parent=azure_ds_reporter):
351 func=util.wait_for_files,358 # wait very long for public SSH keys to arrive
352 args=(fp_files, 900))359 # https://bugs.launchpad.net/cloud-init/+bug/1717611
353360 missing = util.log_time(logfunc=LOG.debug,
354 if len(missing):361 msg="waiting for SSH public key files",
355 LOG.warning("Did not find files, but going on: %s", missing)362 func=util.wait_for_files,
363 args=(fp_files, 900))
364 if len(missing):
365 LOG.warning("Did not find files, but going on: %s", missing)
356366
357 metadata = {}367 metadata = {}
358 metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)368 metadata['public-keys'] = key_value or pubkeys_from_crt_files(fp_files)
@@ -366,6 +376,7 @@ class DataSourceAzure(sources.DataSource):
366 subplatform_type = 'seed-dir'376 subplatform_type = 'seed-dir'
367 return '%s (%s)' % (subplatform_type, self.seed)377 return '%s (%s)' % (subplatform_type, self.seed)
368378
379 @azure_ds_telemetry_reporter
369 def crawl_metadata(self):380 def crawl_metadata(self):
370 """Walk all instance metadata sources returning a dict on success.381 """Walk all instance metadata sources returning a dict on success.
371382
@@ -467,6 +478,7 @@ class DataSourceAzure(sources.DataSource):
467 super(DataSourceAzure, self).clear_cached_attrs(attr_defaults)478 super(DataSourceAzure, self).clear_cached_attrs(attr_defaults)
468 self._metadata_imds = sources.UNSET479 self._metadata_imds = sources.UNSET
469480
481 @azure_ds_telemetry_reporter
470 def _get_data(self):482 def _get_data(self):
471 """Crawl and process datasource metadata caching metadata as attrs.483 """Crawl and process datasource metadata caching metadata as attrs.
472484
@@ -513,6 +525,7 @@ class DataSourceAzure(sources.DataSource):
513 # quickly (local check only) if self.instance_id is still valid525 # quickly (local check only) if self.instance_id is still valid
514 return sources.instance_id_matches_system_uuid(self.get_instance_id())526 return sources.instance_id_matches_system_uuid(self.get_instance_id())
515527
528 @azure_ds_telemetry_reporter
516 def setup(self, is_new_instance):529 def setup(self, is_new_instance):
517 if self._negotiated is False:530 if self._negotiated is False:
518 LOG.debug("negotiating for %s (new_instance=%s)",531 LOG.debug("negotiating for %s (new_instance=%s)",
@@ -580,6 +593,7 @@ class DataSourceAzure(sources.DataSource):
580 if nl_sock:593 if nl_sock:
581 nl_sock.close()594 nl_sock.close()
582595
596 @azure_ds_telemetry_reporter
583 def _report_ready(self, lease):597 def _report_ready(self, lease):
584 """Tells the fabric provisioning has completed """598 """Tells the fabric provisioning has completed """
585 try:599 try:
@@ -617,9 +631,14 @@ class DataSourceAzure(sources.DataSource):
617 def _reprovision(self):631 def _reprovision(self):
618 """Initiate the reprovisioning workflow."""632 """Initiate the reprovisioning workflow."""
619 contents = self._poll_imds()633 contents = self._poll_imds()
620 md, ud, cfg = read_azure_ovf(contents)634 with events.ReportEventStack(
621 return (md, ud, cfg, {'ovf-env.xml': contents})635 name="reprovisioning-read-azure-ovf",
622636 description="read azure ovf during reprovisioning",
637 parent=azure_ds_reporter):
638 md, ud, cfg = read_azure_ovf(contents)
639 return (md, ud, cfg, {'ovf-env.xml': contents})
640
641 @azure_ds_telemetry_reporter
623 def _negotiate(self):642 def _negotiate(self):
624 """Negotiate with fabric and return data from it.643 """Negotiate with fabric and return data from it.
625644
@@ -652,6 +671,7 @@ class DataSourceAzure(sources.DataSource):
652 util.del_file(REPROVISION_MARKER_FILE)671 util.del_file(REPROVISION_MARKER_FILE)
653 return fabric_data672 return fabric_data
654673
674 @azure_ds_telemetry_reporter
655 def activate(self, cfg, is_new_instance):675 def activate(self, cfg, is_new_instance):
656 address_ephemeral_resize(is_new_instance=is_new_instance,676 address_ephemeral_resize(is_new_instance=is_new_instance,
657 preserve_ntfs=self.ds_cfg.get(677 preserve_ntfs=self.ds_cfg.get(
@@ -690,12 +710,14 @@ def _partitions_on_device(devpath, maxnum=16):
690 return []710 return []
691711
692712
713@azure_ds_telemetry_reporter
693def _has_ntfs_filesystem(devpath):714def _has_ntfs_filesystem(devpath):
694 ntfs_devices = util.find_devs_with("TYPE=ntfs", no_cache=True)715 ntfs_devices = util.find_devs_with("TYPE=ntfs", no_cache=True)
695 LOG.debug('ntfs_devices found = %s', ntfs_devices)716 LOG.debug('ntfs_devices found = %s', ntfs_devices)
696 return os.path.realpath(devpath) in ntfs_devices717 return os.path.realpath(devpath) in ntfs_devices
697718
698719
720@azure_ds_telemetry_reporter
699def can_dev_be_reformatted(devpath, preserve_ntfs):721def can_dev_be_reformatted(devpath, preserve_ntfs):
700 """Determine if the ephemeral drive at devpath should be reformatted.722 """Determine if the ephemeral drive at devpath should be reformatted.
701723
@@ -744,43 +766,59 @@ def can_dev_be_reformatted(devpath, preserve_ntfs):
744 (cand_part, cand_path, devpath))766 (cand_part, cand_path, devpath))
745 return False, msg767 return False, msg
746768
769 @azure_ds_telemetry_reporter
747 def count_files(mp):770 def count_files(mp):
748 ignored = set(['dataloss_warning_readme.txt'])771 ignored = set(['dataloss_warning_readme.txt'])
749 return len([f for f in os.listdir(mp) if f.lower() not in ignored])772 return len([f for f in os.listdir(mp) if f.lower() not in ignored])
750773
751 bmsg = ('partition %s (%s) on device %s was ntfs formatted' %774 bmsg = ('partition %s (%s) on device %s was ntfs formatted' %
752 (cand_part, cand_path, devpath))775 (cand_part, cand_path, devpath))
753 try:776
754 file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",777 with events.ReportEventStack(
755 update_env_for_mount={'LANG': 'C'})778 name="mount-ntfs-and-count",
756 except util.MountFailedError as e:779 description="mount-ntfs-and-count",
757 if "unknown filesystem type 'ntfs'" in str(e):780 parent=azure_ds_reporter) as evt:
758 return True, (bmsg + ' but this system cannot mount NTFS,'781 try:
759 ' assuming there are no important files.'782 file_count = util.mount_cb(cand_path, count_files, mtype="ntfs",
760 ' Formatting allowed.')783 update_env_for_mount={'LANG': 'C'})
761 return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)784 except util.MountFailedError as e:
762785 evt.description = "cannot mount ntfs"
763 if file_count != 0:786 if "unknown filesystem type 'ntfs'" in str(e):
764 LOG.warning("it looks like you're using NTFS on the ephemeral disk, "787 return True, (bmsg + ' but this system cannot mount NTFS,'
765 'to ensure that filesystem does not get wiped, set '788 ' assuming there are no important files.'
766 '%s.%s in config', '.'.join(DS_CFG_PATH),789 ' Formatting allowed.')
767 DS_CFG_KEY_PRESERVE_NTFS)790 return False, bmsg + ' but mount of %s failed: %s' % (cand_part, e)
768 return False, bmsg + ' but had %d files on it.' % file_count791
792 if file_count != 0:
793 evt.description = "mounted and counted %d files" % file_count
794 LOG.warning("it looks like you're using NTFS on the ephemeral"
795 " disk, to ensure that filesystem does not get wiped,"
796 " set %s.%s in config", '.'.join(DS_CFG_PATH),
797 DS_CFG_KEY_PRESERVE_NTFS)
798 return False, bmsg + ' but had %d files on it.' % file_count
769799
770 return True, bmsg + ' and had no important files. Safe for reformatting.'800 return True, bmsg + ' and had no important files. Safe for reformatting.'
771801
772802
803@azure_ds_telemetry_reporter
773def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,804def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
774 is_new_instance=False, preserve_ntfs=False):805 is_new_instance=False, preserve_ntfs=False):
775 # wait for ephemeral disk to come up806 # wait for ephemeral disk to come up
776 naplen = .2807 naplen = .2
777 missing = util.wait_for_files([devpath], maxwait=maxwait, naplen=naplen,808 with events.ReportEventStack(
778 log_pre="Azure ephemeral disk: ")809 name="wait-for-ephemeral-disk",
779810 description="wait for ephemeral disk",
780 if missing:811 parent=azure_ds_reporter):
781 LOG.warning("ephemeral device '%s' did not appear after %d seconds.",812 missing = util.wait_for_files([devpath],
782 devpath, maxwait)813 maxwait=maxwait,
783 return814 naplen=naplen,
815 log_pre="Azure ephemeral disk: ")
816
817 if missing:
818 LOG.warning("ephemeral device '%s' did"
819 " not appear after %d seconds.",
820 devpath, maxwait)
821 return
784822
785 result = False823 result = False
786 msg = None824 msg = None
@@ -808,6 +846,7 @@ def address_ephemeral_resize(devpath=RESOURCE_DISK_PATH, maxwait=120,
808 return846 return
809847
810848
849@azure_ds_telemetry_reporter
811def perform_hostname_bounce(hostname, cfg, prev_hostname):850def perform_hostname_bounce(hostname, cfg, prev_hostname):
812 # set the hostname to 'hostname' if it is not already set to that.851 # set the hostname to 'hostname' if it is not already set to that.
813 # then, if policy is not off, bounce the interface using command852 # then, if policy is not off, bounce the interface using command
@@ -843,6 +882,7 @@ def perform_hostname_bounce(hostname, cfg, prev_hostname):
843 return True882 return True
844883
845884
885@azure_ds_telemetry_reporter
846def crtfile_to_pubkey(fname, data=None):886def crtfile_to_pubkey(fname, data=None):
847 pipeline = ('openssl x509 -noout -pubkey < "$0" |'887 pipeline = ('openssl x509 -noout -pubkey < "$0" |'
848 'ssh-keygen -i -m PKCS8 -f /dev/stdin')888 'ssh-keygen -i -m PKCS8 -f /dev/stdin')
@@ -851,6 +891,7 @@ def crtfile_to_pubkey(fname, data=None):
851 return out.rstrip()891 return out.rstrip()
852892
853893
894@azure_ds_telemetry_reporter
854def pubkeys_from_crt_files(flist):895def pubkeys_from_crt_files(flist):
855 pubkeys = []896 pubkeys = []
856 errors = []897 errors = []
@@ -866,6 +907,7 @@ def pubkeys_from_crt_files(flist):
866 return pubkeys907 return pubkeys
867908
868909
910@azure_ds_telemetry_reporter
869def write_files(datadir, files, dirmode=None):911def write_files(datadir, files, dirmode=None):
870912
871 def _redact_password(cnt, fname):913 def _redact_password(cnt, fname):
@@ -893,6 +935,7 @@ def write_files(datadir, files, dirmode=None):
893 util.write_file(filename=fname, content=content, mode=0o600)935 util.write_file(filename=fname, content=content, mode=0o600)
894936
895937
938@azure_ds_telemetry_reporter
896def invoke_agent(cmd):939def invoke_agent(cmd):
897 # this is a function itself to simplify patching it for test940 # this is a function itself to simplify patching it for test
898 if cmd:941 if cmd:
@@ -912,6 +955,7 @@ def find_child(node, filter_func):
912 return ret955 return ret
913956
914957
958@azure_ds_telemetry_reporter
915def load_azure_ovf_pubkeys(sshnode):959def load_azure_ovf_pubkeys(sshnode):
916 # This parses a 'SSH' node formatted like below, and returns960 # This parses a 'SSH' node formatted like below, and returns
917 # an array of dicts.961 # an array of dicts.
@@ -964,6 +1008,7 @@ def load_azure_ovf_pubkeys(sshnode):
964 return found1008 return found
9651009
9661010
1011@azure_ds_telemetry_reporter
967def read_azure_ovf(contents):1012def read_azure_ovf(contents):
968 try:1013 try:
969 dom = minidom.parseString(contents)1014 dom = minidom.parseString(contents)
@@ -1064,6 +1109,7 @@ def read_azure_ovf(contents):
1064 return (md, ud, cfg)1109 return (md, ud, cfg)
10651110
10661111
1112@azure_ds_telemetry_reporter
1067def _extract_preprovisioned_vm_setting(dom):1113def _extract_preprovisioned_vm_setting(dom):
1068 """Read the preprovision flag from the ovf. It should not1114 """Read the preprovision flag from the ovf. It should not
1069 exist unless true."""1115 exist unless true."""
@@ -1092,6 +1138,7 @@ def encrypt_pass(password, salt_id="$6$"):
1092 return crypt.crypt(password, salt_id + util.rand_str(strlen=16))1138 return crypt.crypt(password, salt_id + util.rand_str(strlen=16))
10931139
10941140
1141@azure_ds_telemetry_reporter
1095def _check_freebsd_cdrom(cdrom_dev):1142def _check_freebsd_cdrom(cdrom_dev):
1096 """Return boolean indicating path to cdrom device has content."""1143 """Return boolean indicating path to cdrom device has content."""
1097 try:1144 try:
@@ -1103,6 +1150,7 @@ def _check_freebsd_cdrom(cdrom_dev):
1103 return False1150 return False
11041151
11051152
1153@azure_ds_telemetry_reporter
1106def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):1154def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
1107 """Return content random seed file if available, otherwise,1155 """Return content random seed file if available, otherwise,
1108 return None."""1156 return None."""
@@ -1126,6 +1174,7 @@ def _get_random_seed(source=PLATFORM_ENTROPY_SOURCE):
1126 return seed1174 return seed
11271175
11281176
1177@azure_ds_telemetry_reporter
1129def list_possible_azure_ds_devs():1178def list_possible_azure_ds_devs():
1130 devlist = []1179 devlist = []
1131 if util.is_FreeBSD():1180 if util.is_FreeBSD():
@@ -1140,6 +1189,7 @@ def list_possible_azure_ds_devs():
1140 return devlist1189 return devlist
11411190
11421191
1192@azure_ds_telemetry_reporter
1143def load_azure_ds_dir(source_dir):1193def load_azure_ds_dir(source_dir):
1144 ovf_file = os.path.join(source_dir, "ovf-env.xml")1194 ovf_file = os.path.join(source_dir, "ovf-env.xml")
11451195
@@ -1162,47 +1212,54 @@ def parse_network_config(imds_metadata):
1162 @param: imds_metadata: Dict of content read from IMDS network service.1212 @param: imds_metadata: Dict of content read from IMDS network service.
1163 @return: Dictionary containing network version 2 standard configuration.1213 @return: Dictionary containing network version 2 standard configuration.
1164 """1214 """
1165 if imds_metadata != sources.UNSET and imds_metadata:1215 with events.ReportEventStack(
1166 netconfig = {'version': 2, 'ethernets': {}}1216 name="parse_network_config",
1167 LOG.debug('Azure: generating network configuration from IMDS')1217 description="",
1168 network_metadata = imds_metadata['network']1218 parent=azure_ds_reporter) as evt:
1169 for idx, intf in enumerate(network_metadata['interface']):1219 if imds_metadata != sources.UNSET and imds_metadata:
1170 nicname = 'eth{idx}'.format(idx=idx)1220 netconfig = {'version': 2, 'ethernets': {}}
1171 dev_config = {}1221 LOG.debug('Azure: generating network configuration from IMDS')
1172 for addr4 in intf['ipv4']['ipAddress']:1222 network_metadata = imds_metadata['network']
1173 privateIpv4 = addr4['privateIpAddress']1223 for idx, intf in enumerate(network_metadata['interface']):
1174 if privateIpv4:1224 nicname = 'eth{idx}'.format(idx=idx)
1175 if dev_config.get('dhcp4', False):1225 dev_config = {}
1176 # Append static address config for nic > 11226 for addr4 in intf['ipv4']['ipAddress']:
1177 netPrefix = intf['ipv4']['subnet'][0].get(1227 privateIpv4 = addr4['privateIpAddress']
1178 'prefix', '24')1228 if privateIpv4:
1179 if not dev_config.get('addresses'):1229 if dev_config.get('dhcp4', False):
1180 dev_config['addresses'] = []1230 # Append static address config for nic > 1
1181 dev_config['addresses'].append(1231 netPrefix = intf['ipv4']['subnet'][0].get(
1182 '{ip}/{prefix}'.format(1232 'prefix', '24')
1183 ip=privateIpv4, prefix=netPrefix))1233 if not dev_config.get('addresses'):
1184 else:1234 dev_config['addresses'] = []
1185 dev_config['dhcp4'] = True1235 dev_config['addresses'].append(
1186 for addr6 in intf['ipv6']['ipAddress']:1236 '{ip}/{prefix}'.format(
1187 privateIpv6 = addr6['privateIpAddress']1237 ip=privateIpv4, prefix=netPrefix))
1188 if privateIpv6:1238 else:
1189 dev_config['dhcp6'] = True1239 dev_config['dhcp4'] = True
1190 break1240 for addr6 in intf['ipv6']['ipAddress']:
1191 if dev_config:1241 privateIpv6 = addr6['privateIpAddress']
1192 mac = ':'.join(re.findall(r'..', intf['macAddress']))1242 if privateIpv6:
1193 dev_config.update(1243 dev_config['dhcp6'] = True
1194 {'match': {'macaddress': mac.lower()},1244 break
1195 'set-name': nicname})1245 if dev_config:
1196 netconfig['ethernets'][nicname] = dev_config1246 mac = ':'.join(re.findall(r'..', intf['macAddress']))
1197 else:1247 dev_config.update(
1198 blacklist = ['mlx4_core']1248 {'match': {'macaddress': mac.lower()},
1199 LOG.debug('Azure: generating fallback configuration')1249 'set-name': nicname})
1200 # generate a network config, blacklist picking mlx4_core devs1250 netconfig['ethernets'][nicname] = dev_config
1201 netconfig = net.generate_fallback_config(1251 evt.description = "network config from imds"
1202 blacklist_drivers=blacklist, config_driver=True)1252 else:
1203 return netconfig1253 blacklist = ['mlx4_core']
1254 LOG.debug('Azure: generating fallback configuration')
1255 # generate a network config, blacklist picking mlx4_core devs
1256 netconfig = net.generate_fallback_config(
1257 blacklist_drivers=blacklist, config_driver=True)
1258 evt.description = "network config from fallback"
1259 return netconfig
12041260
12051261
1262@azure_ds_telemetry_reporter
1206def get_metadata_from_imds(fallback_nic, retries):1263def get_metadata_from_imds(fallback_nic, retries):
1207 """Query Azure's network metadata service, returning a dictionary.1264 """Query Azure's network metadata service, returning a dictionary.
12081265
@@ -1227,6 +1284,7 @@ def get_metadata_from_imds(fallback_nic, retries):
1227 return util.log_time(**kwargs)1284 return util.log_time(**kwargs)
12281285
12291286
1287@azure_ds_telemetry_reporter
1230def _get_metadata_from_imds(retries):1288def _get_metadata_from_imds(retries):
12311289
1232 url = IMDS_URL + "instance?api-version=2017-12-01"1290 url = IMDS_URL + "instance?api-version=2017-12-01"
@@ -1246,6 +1304,7 @@ def _get_metadata_from_imds(retries):
1246 return {}1304 return {}
12471305
12481306
1307@azure_ds_telemetry_reporter
1249def maybe_remove_ubuntu_network_config_scripts(paths=None):1308def maybe_remove_ubuntu_network_config_scripts(paths=None):
1250 """Remove Azure-specific ubuntu network config for non-primary nics.1309 """Remove Azure-specific ubuntu network config for non-primary nics.
12511310
@@ -1283,14 +1342,20 @@ def maybe_remove_ubuntu_network_config_scripts(paths=None):
12831342
12841343
1285def _is_platform_viable(seed_dir):1344def _is_platform_viable(seed_dir):
1286 """Check platform environment to report if this datasource may run."""1345 with events.ReportEventStack(
1287 asset_tag = util.read_dmi_data('chassis-asset-tag')1346 name="check-platform-viability",
1288 if asset_tag == AZURE_CHASSIS_ASSET_TAG:1347 description="found azure asset tag",
1289 return True1348 parent=azure_ds_reporter) as evt:
1290 LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)1349
1291 if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):1350 """Check platform environment to report if this datasource may run."""
1292 return True1351 asset_tag = util.read_dmi_data('chassis-asset-tag')
1293 return False1352 if asset_tag == AZURE_CHASSIS_ASSET_TAG:
1353 return True
1354 LOG.debug("Non-Azure DMI asset tag '%s' discovered.", asset_tag)
1355 evt.description = "Non-Azure DMI asset tag '%s' discovered.", asset_tag
1356 if os.path.exists(os.path.join(seed_dir, 'ovf-env.xml')):
1357 return True
1358 return False
12941359
12951360
1296class BrokenAzureDataSource(Exception):1361class BrokenAzureDataSource(Exception):
diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
1297old mode 1006441362old mode 100644
1298new mode 1007551363new mode 100755
index 2829dd2..d3af05e
--- a/cloudinit/sources/helpers/azure.py
+++ b/cloudinit/sources/helpers/azure.py
@@ -16,10 +16,27 @@ from xml.etree import ElementTree
1616
17from cloudinit import url_helper17from cloudinit import url_helper
18from cloudinit import util18from cloudinit import util
19from cloudinit.reporting import events
1920
20LOG = logging.getLogger(__name__)21LOG = logging.getLogger(__name__)
2122
2223
24azure_ds_reporter = events.ReportEventStack(
25 name="azure-ds",
26 description="initialize reporter for azure ds",
27 reporting_enabled=True)
28
29
30def azure_ds_telemetry_reporter(func):
31 def impl(*args, **kwargs):
32 with events.ReportEventStack(
33 name=func.__name__,
34 description=func.__name__,
35 parent=azure_ds_reporter):
36 return func(*args, **kwargs)
37 return impl
38
39
23@contextmanager40@contextmanager
24def cd(newdir):41def cd(newdir):
25 prevdir = os.getcwd()42 prevdir = os.getcwd()
@@ -119,6 +136,7 @@ class OpenSSLManager(object):
119 def clean_up(self):136 def clean_up(self):
120 util.del_dir(self.tmpdir)137 util.del_dir(self.tmpdir)
121138
139 @azure_ds_telemetry_reporter
122 def generate_certificate(self):140 def generate_certificate(self):
123 LOG.debug('Generating certificate for communication with fabric...')141 LOG.debug('Generating certificate for communication with fabric...')
124 if self.certificate is not None:142 if self.certificate is not None:
@@ -139,17 +157,20 @@ class OpenSSLManager(object):
139 LOG.debug('New certificate generated.')157 LOG.debug('New certificate generated.')
140158
141 @staticmethod159 @staticmethod
160 @azure_ds_telemetry_reporter
142 def _run_x509_action(action, cert):161 def _run_x509_action(action, cert):
143 cmd = ['openssl', 'x509', '-noout', action]162 cmd = ['openssl', 'x509', '-noout', action]
144 result, _ = util.subp(cmd, data=cert)163 result, _ = util.subp(cmd, data=cert)
145 return result164 return result
146165
166 @azure_ds_telemetry_reporter
147 def _get_ssh_key_from_cert(self, certificate):167 def _get_ssh_key_from_cert(self, certificate):
148 pub_key = self._run_x509_action('-pubkey', certificate)168 pub_key = self._run_x509_action('-pubkey', certificate)
149 keygen_cmd = ['ssh-keygen', '-i', '-m', 'PKCS8', '-f', '/dev/stdin']169 keygen_cmd = ['ssh-keygen', '-i', '-m', 'PKCS8', '-f', '/dev/stdin']
150 ssh_key, _ = util.subp(keygen_cmd, data=pub_key)170 ssh_key, _ = util.subp(keygen_cmd, data=pub_key)
151 return ssh_key171 return ssh_key
152172
173 @azure_ds_telemetry_reporter
153 def _get_fingerprint_from_cert(self, certificate):174 def _get_fingerprint_from_cert(self, certificate):
154 """openssl x509 formats fingerprints as so:175 """openssl x509 formats fingerprints as so:
155 'SHA1 Fingerprint=07:3E:19:D1:4D:1C:79:92:24:C6:A0:FD:8D:DA:\176 'SHA1 Fingerprint=07:3E:19:D1:4D:1C:79:92:24:C6:A0:FD:8D:DA:\
@@ -163,6 +184,7 @@ class OpenSSLManager(object):
163 octets = raw_fp[eq+1:-1].split(':')184 octets = raw_fp[eq+1:-1].split(':')
164 return ''.join(octets)185 return ''.join(octets)
165186
187 @azure_ds_telemetry_reporter
166 def _decrypt_certs_from_xml(self, certificates_xml):188 def _decrypt_certs_from_xml(self, certificates_xml):
167 """Decrypt the certificates XML document using the our private key;189 """Decrypt the certificates XML document using the our private key;
168 return the list of certs and private keys contained in the doc.190 return the list of certs and private keys contained in the doc.
@@ -185,6 +207,7 @@ class OpenSSLManager(object):
185 shell=True, data=b'\n'.join(lines))207 shell=True, data=b'\n'.join(lines))
186 return out208 return out
187209
210 @azure_ds_telemetry_reporter
188 def parse_certificates(self, certificates_xml):211 def parse_certificates(self, certificates_xml):
189 """Given the Certificates XML document, return a dictionary of212 """Given the Certificates XML document, return a dictionary of
190 fingerprints and associated SSH keys derived from the certs."""213 fingerprints and associated SSH keys derived from the certs."""
@@ -265,11 +288,13 @@ class WALinuxAgentShim(object):
265 return socket.inet_ntoa(packed_bytes)288 return socket.inet_ntoa(packed_bytes)
266289
267 @staticmethod290 @staticmethod
291 @azure_ds_telemetry_reporter
268 def _networkd_get_value_from_leases(leases_d=None):292 def _networkd_get_value_from_leases(leases_d=None):
269 return dhcp.networkd_get_option_from_leases(293 return dhcp.networkd_get_option_from_leases(
270 'OPTION_245', leases_d=leases_d)294 'OPTION_245', leases_d=leases_d)
271295
272 @staticmethod296 @staticmethod
297 @azure_ds_telemetry_reporter
273 def _get_value_from_leases_file(fallback_lease_file):298 def _get_value_from_leases_file(fallback_lease_file):
274 leases = []299 leases = []
275 content = util.load_file(fallback_lease_file)300 content = util.load_file(fallback_lease_file)
@@ -287,6 +312,7 @@ class WALinuxAgentShim(object):
287 return leases[-1]312 return leases[-1]
288313
289 @staticmethod314 @staticmethod
315 @azure_ds_telemetry_reporter
290 def _load_dhclient_json():316 def _load_dhclient_json():
291 dhcp_options = {}317 dhcp_options = {}
292 hooks_dir = WALinuxAgentShim._get_hooks_dir()318 hooks_dir = WALinuxAgentShim._get_hooks_dir()
@@ -305,6 +331,7 @@ class WALinuxAgentShim(object):
305 return dhcp_options331 return dhcp_options
306332
307 @staticmethod333 @staticmethod
334 @azure_ds_telemetry_reporter
308 def _get_value_from_dhcpoptions(dhcp_options):335 def _get_value_from_dhcpoptions(dhcp_options):
309 if dhcp_options is None:336 if dhcp_options is None:
310 return None337 return None
@@ -318,6 +345,7 @@ class WALinuxAgentShim(object):
318 return _value345 return _value
319346
320 @staticmethod347 @staticmethod
348 @azure_ds_telemetry_reporter
321 def find_endpoint(fallback_lease_file=None, dhcp245=None):349 def find_endpoint(fallback_lease_file=None, dhcp245=None):
322 value = None350 value = None
323 if dhcp245 is not None:351 if dhcp245 is not None:
@@ -352,6 +380,7 @@ class WALinuxAgentShim(object):
352 LOG.debug('Azure endpoint found at %s', endpoint_ip_address)380 LOG.debug('Azure endpoint found at %s', endpoint_ip_address)
353 return endpoint_ip_address381 return endpoint_ip_address
354382
383 @azure_ds_telemetry_reporter
355 def register_with_azure_and_fetch_data(self, pubkey_info=None):384 def register_with_azure_and_fetch_data(self, pubkey_info=None):
356 if self.openssl_manager is None:385 if self.openssl_manager is None:
357 self.openssl_manager = OpenSSLManager()386 self.openssl_manager = OpenSSLManager()
@@ -404,6 +433,7 @@ class WALinuxAgentShim(object):
404433
405 return keys434 return keys
406435
436 @azure_ds_telemetry_reporter
407 def _report_ready(self, goal_state, http_client):437 def _report_ready(self, goal_state, http_client):
408 LOG.debug('Reporting ready to Azure fabric.')438 LOG.debug('Reporting ready to Azure fabric.')
409 document = self.REPORT_READY_XML_TEMPLATE.format(439 document = self.REPORT_READY_XML_TEMPLATE.format(
@@ -419,6 +449,7 @@ class WALinuxAgentShim(object):
419 LOG.info('Reported ready to Azure fabric.')449 LOG.info('Reported ready to Azure fabric.')
420450
421451
452@azure_ds_telemetry_reporter
422def get_metadata_from_fabric(fallback_lease_file=None, dhcp_opts=None,453def get_metadata_from_fabric(fallback_lease_file=None, dhcp_opts=None,
423 pubkey_info=None):454 pubkey_info=None):
424 shim = WALinuxAgentShim(fallback_lease_file=fallback_lease_file,455 shim = WALinuxAgentShim(fallback_lease_file=fallback_lease_file,

Subscribers

People subscribed via source and target branches