← Back to team overview

cloud-init-dev team mailing list archive

Re: [Merge] ~chad.smith/cloud-init:feature/os-local into cloud-init:master

 

as you suggested, the 'get_network_metadata' name is weird.

And we do have to think about what to do for SRU on this.
I guess we'd want to enable it in a datasource config so that then when
we SRU we could turn that setting to false to maintain the old path.

the rest of this i think looks pretty striaght forward.

comments inline


Diff comments:

> diff --git a/cloudinit/sources/DataSourceOpenStack.py b/cloudinit/sources/DataSourceOpenStack.py
> index fb166ae..b81415e 100644
> --- a/cloudinit/sources/DataSourceOpenStack.py
> +++ b/cloudinit/sources/DataSourceOpenStack.py
> @@ -99,38 +79,58 @@ class DataSourceOpenStack(openstack.SourceMixin, sources.DataSource):
>          self.metadata_address = url2base.get(avail_url)
>          return bool(avail_url)
>  
> -    def _get_data(self):
> -        try:
> -            if not self.wait_for_metadata_service():
> -                return False
> -        except IOError:
> -            return False
> +    def check_instance_id(self, sys_cfg):
> +        # quickly (local check only) if self.instance_id is still valid
> +        return sources.instance_id_matches_system_uuid(self.get_instance_id())
>  
> -        (_max_wait, timeout, retries) = self._get_url_settings()
> +    @property
> +    def network_config(self):
> +        """Return a network config dict for rendering ENI or netplan files."""
> +        if self._network_config != sources.UNSET:
> +            return self._network_config
>  
> -        try:
> -            results = util.log_time(LOG.debug,
> -                                    'Crawl of openstack metadata service',
> -                                    read_metadata_service,
> -                                    args=[self.metadata_address],
> -                                    kwargs={'ssl_details': self.ssl_details,
> -                                            'retries': retries,
> -                                            'timeout': timeout})
> -        except openstack.NonReadable:
> -            return False
> -        except (openstack.BrokenMetadata, IOError):
> -            util.logexc(LOG, "Broken metadata address %s",
> -                        self.metadata_address)
> -            return False
> +        if self.network_json == sources.UNSET:
> +            # this would happen if get_data hadn't been called. leave as UNSET
> +            LOG.warning(
> +                "Unexpected call to network_config when network_json is None.")
> +            return None
> +
> +        LOG.debug("network config provided via network_json")
> +        self._network_config = openstack.convert_net_json(
> +            self.network_json, known_macs=None)
> +        return self._network_config
> +
> +    def _get_data(self):
> +        """Crawl metadata, parse and persist that data for this instance.
> +
> +        @return: True when metadata discovered indicates OpenStack datasource.
> +            False when unable to contact metadata service or when metadata
> +            format is invalid or disabled.
> +        """
> +        if self.get_network_metadata:  # Setup networking in init-local stage.

we can't set up networking after we've set up fallback networking. at least not yet.
as we get to event-based updating of networking, we could possibly.

> +            try:
> +                with EphemeralDHCPv4(self.fallback_interface):
> +                    results = util.log_time(
> +                        logfunc=LOG.debug, msg='Crawl of metadata service',
> +                        func=self._crawl_metadata)
> +            except (NoDHCPLeaseError, sources.InvalidMetaDataException) as e:
> +                util.logexc(LOG, str(e))
> +                return False
> +        else:
> +            try:
> +                results = self._crawl_metadata()
> +            except sources.InvalidMetaDataException as e:
> +                util.logexc(LOG, str(e))
> +                return False
>  
>          self.dsmode = self._determine_dsmode([results.get('dsmode')])
>          if self.dsmode == sources.DSMODE_DISABLED:
>              return False
> -
>          md = results.get('metadata', {})
>          md = util.mergemanydict([md, DEFAULT_METADATA])
>          self.metadata = md
>          self.ec2_metadata = results.get('ec2-metadata')
> +        self.network_json = results.get('networkdata')
>          self.userdata_raw = results.get('userdata')
>          self.version = results['version']
>          self.files.update(results.get('files', {}))
> diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
> index df0b374..b3ff9fb 100644
> --- a/cloudinit/sources/__init__.py
> +++ b/cloudinit/sources/__init__.py
> @@ -149,6 +173,41 @@ class DataSource(object):
>              'Subclasses of DataSource must implement _get_data which'
>              ' sets self.metadata, vendordata_raw and userdata_raw.')
>  
> +    def get_url_params(self):
> +        """Return the Datasource's prefered url_read parameters.
> +
> +        Subclasses may override url_max_wait, url_timeout, url_retries.
> +
> +        @return: A 3-tuple of  max_wait_seconds, timeout_seconds, num_retries.

i kind of agree. i dont know why i didn't use namedtuple more...

> +        """
> +        max_wait = self.url_max_wait
> +        try:
> +            max_wait = int(self.ds_cfg.get("max_wait", self.url_max_wait))
> +        except ValueError:
> +            util.logexc(
> +                LOG, "Config max_wait '%s' is not an int, using default '%s'",
> +                self.ds_cfg.get("max_wait"), max_wait)
> +
> +        timeout = self.url_timeout
> +        try:
> +            timeout = max(
> +                0, int(self.ds_cfg.get("timeout", self.url_timeout)))
> +        except ValueError:
> +            timeout = self.url_timeout
> +            util.logexc(
> +                LOG, "Config timeout '%s' is not an int, using default '%s'",
> +                self.ds_cfg.get('timeout'), timeout)
> +
> +        retries = self.url_retries
> +        try:
> +            retries = int(self.ds_cfg.get("retries", self.url_retries))
> +        except Exception:
> +            util.logexc(
> +                LOG, "Config retries '%s' is not an int, using default '%s'",
> +                self.ds_cfg.get('retries'), retries)
> +
> +        return (max_wait, timeout, retries)
> +
>      def get_userdata(self, apply_filter=False):
>          if self.userdata is None:
>              self.userdata = self.ud_proc.process(self.get_userdata_raw())
> @@ -162,6 +221,24 @@ class DataSource(object):
>          return self.vendordata
>  
>      @property
> +    def fallback_interface(self):
> +        """Determine the network interface used during local network config."""
> +        if self._fallback_interface is None:
> +            # fallback_nic was used at one point, so restored objects may

i'd rather leave this sillyness to ec2 datasource than move it up to
the base class.
ec2 can still have its behavior and utilize super().fallback_inteface if you want.

> +            # have an attribute there. respect that if found.
> +            _legacy_fbnic = getattr(self, 'fallback_nic', None)
> +            if _legacy_fbnic:
> +                self._fallback_interface = _legacy_fbnic
> +                self.fallback_nic = None
> +            else:
> +                self._fallback_interface = net.find_fallback_nic()
> +                if self._fallback_interface is None:
> +                    LOG.warning(
> +                        "Did not find a fallback interface on %s.",
> +                        self.cloud_name)
> +        return self._fallback_interface
> +
> +    @property
>      def cloud_name(self):
>          """Return lowercase cloud name as determined by the datasource.
>  


-- 
https://code.launchpad.net/~chad.smith/cloud-init/+git/cloud-init/+merge/345806
Your team cloud-init commiters is requested to review the proposed merge of ~chad.smith/cloud-init:feature/os-local into cloud-init:master.


References