Submit
Path:
~
/
/
lib
/
python3
/
dist-packages
/
cloudinit
/
sources
/
File Content:
DataSourceCloudCIX.py
# This file is part of cloud-init. See LICENSE file for license information. import json import logging from typing import Optional from cloudinit import dmi, sources, url_helper, util LOG = logging.getLogger(__name__) METADATA_URLS = ["http://169.254.169.254"] METADATA_VERSION = 1 CLOUDCIX_DMI_NAME = "CloudCIX" class DataSourceCloudCIX(sources.DataSource): dsname = "CloudCIX" # Setup read_url parameters through get_url_params() url_retries = 3 url_timeout_seconds = 5 url_sec_between_retries = 5 def __init__(self, sys_cfg, distro, paths): super(DataSourceCloudCIX, self).__init__(sys_cfg, distro, paths) self._metadata_url = None self._net_cfg = None def _get_data(self): """ Fetch the user data and the metadata """ try: crawled_data = self.crawl_metadata_service() except sources.InvalidMetaDataException as error: LOG.error( "Failed to read data from CloudCIX datasource: %s", error ) return False self.metadata = crawled_data["meta-data"] self.userdata_raw = util.decode_binary(crawled_data["user-data"]) return True def crawl_metadata_service(self) -> dict: md_url = self.determine_md_url() if md_url is None: raise sources.InvalidMetaDataException( "Could not determine metadata URL" ) data = read_metadata(md_url, self.get_url_params()) return data def determine_md_url(self) -> Optional[str]: if self._metadata_url: return self._metadata_url # Try to reach the metadata server url_params = self.get_url_params() base_url, _ = url_helper.wait_for_url( METADATA_URLS, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds, ) if not base_url: return None # Find the highest supported metadata version for version in range(METADATA_VERSION, 0, -1): url = url_helper.combine_url( base_url, "v{0}".format(version), "metadata" ) try: response = url_helper.readurl(url, timeout=self.url_timeout) except url_helper.UrlError as e: LOG.debug("URL %s raised exception %s", url, e) continue if response.ok(): self._metadata_url = url_helper.combine_url( base_url, "v{0}".format(version) ) break else: LOG.debug("No metadata found at URL %s", url) return self._metadata_url @staticmethod def ds_detect(): return is_platform_viable() @property def network_config(self): if self._net_cfg: return self._net_cfg if not self.metadata: return None self._net_cfg = self.metadata["network"] return self._net_cfg def is_platform_viable() -> bool: return dmi.read_dmi_data("system-product-name") == CLOUDCIX_DMI_NAME def read_metadata(base_url: str, url_params): """ Read metadata from metadata server at base_url :returns: dictionary of retrieved metadata and user data containing the following keys: meta-data, user-data :param: base_url: meta data server's base URL :param: url_params: dictionary of URL retrieval parameters. Valid keys are `retries`, `sec_between` and `timeout`. :raises: InvalidMetadataException upon network error connecting to metadata URL, error response from meta data server or failure to decode/parse metadata and userdata payload. """ md = {} leaf_key_format_callback = ( ("metadata", "meta-data", util.load_json), ("userdata", "user-data", util.maybe_b64decode), ) for url_leaf, new_key, format_callback in leaf_key_format_callback: try: response = url_helper.readurl( url=url_helper.combine_url(base_url, url_leaf), retries=url_params.num_retries, sec_between=url_params.sec_between_retries, timeout=url_params.timeout_seconds, ) except url_helper.UrlError as error: raise sources.InvalidMetaDataException( f"Failed to fetch IMDS {url_leaf}: " f"{base_url}/{url_leaf}: {error}" ) if not response.ok(): raise sources.InvalidMetaDataException( f"No valid {url_leaf} found. " f"URL {base_url}/{url_leaf} returned code {response.code}" ) try: md[new_key] = format_callback(response.contents) except json.decoder.JSONDecodeError as exc: raise sources.InvalidMetaDataException( f"Invalid JSON at {base_url}/{url_leaf}: {exc}" ) from exc return md # Used to match classes to dependencies datasources = [ (DataSourceCloudCIX, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), ] # Return a list of data sources that match this set of dependencies def get_datasource_list(depends): return sources.list_from_depends(depends, datasources)
Edit
Rename
Chmod
Delete
FILE
FOLDER
INFO
Name
Size
Permission
Action
__pycache__
---
0755
azure
---
0755
helpers
---
0755
DataSourceAkamai.py
12960 bytes
0644
DataSourceAliYun.py
15593 bytes
0644
DataSourceAltCloud.py
8622 bytes
0644
DataSourceAzure.py
77550 bytes
0644
DataSourceBigstep.py
1946 bytes
0644
DataSourceCloudCIX.py
5311 bytes
0644
DataSourceCloudSigma.py
3956 bytes
0644
DataSourceCloudStack.py
11481 bytes
0644
DataSourceConfigDrive.py
11498 bytes
0644
DataSourceDigitalOcean.py
4300 bytes
0644
DataSourceEc2.py
42929 bytes
0644
DataSourceExoscale.py
8830 bytes
0644
DataSourceGCE.py
13818 bytes
0644
DataSourceHetzner.py
5520 bytes
0644
DataSourceIBMCloud.py
14999 bytes
0644
DataSourceLXD.py
17654 bytes
0644
DataSourceMAAS.py
15197 bytes
0644
DataSourceNWCS.py
4513 bytes
0644
DataSourceNoCloud.py
16307 bytes
0644
DataSourceNone.py
1304 bytes
0644
DataSourceOVF.py
13135 bytes
0644
DataSourceOpenNebula.py
16042 bytes
0644
DataSourceOpenStack.py
10445 bytes
0644
DataSourceOracle.py
21580 bytes
0644
DataSourceRbxCloud.py
8039 bytes
0644
DataSourceScaleway.py
15079 bytes
0644
DataSourceSmartOS.py
35075 bytes
0644
DataSourceUpCloud.py
5321 bytes
0644
DataSourceVMware.py
36155 bytes
0644
DataSourceVultr.py
4614 bytes
0644
DataSourceWSL.py
14708 bytes
0644
__init__.py
45299 bytes
0644
N4ST4R_ID | Naxtarrr