ACIL FM
Dark
Refresh
Current DIR:
/lib/python3.9/site-packages/cloudinit/sources
/
lib
python3.9
site-packages
cloudinit
sources
Upload
Zip Selected
Delete Selected
Pilih semua
Nama
Ukuran
Permission
Aksi
azure
-
chmod
Open
Rename
Delete
helpers
-
chmod
Open
Rename
Delete
__pycache__
-
chmod
Open
Rename
Delete
DataSourceAkamai.py
12.66 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceAliYun.py
15.21 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceAltCloud.py
8.42 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceAzure.py
75.51 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceBigstep.py
1.9 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceCloudCIX.py
5.19 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceCloudSigma.py
3.86 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceCloudStack.py
11.21 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceConfigDrive.py
11.23 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceDigitalOcean.py
4.2 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceEc2.py
41.76 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceExoscale.py
8.55 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceGCE.py
13.33 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceHetzner.py
5.39 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceIBMCloud.py
14.65 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceLXD.py
17.2 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceMAAS.py
14.85 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceNoCloud.py
15.98 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceNone.py
1.27 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceNWCS.py
4.41 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceOpenNebula.py
15.67 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceOpenStack.py
10.1 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceOracle.py
21.07 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceOVF.py
12.7 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceRbxCloud.py
7.88 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceScaleway.py
14.73 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceSmartOS.py
34.26 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceUpCloud.py
5.2 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceVMware.py
35.31 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceVultr.py
4.51 MB
chmod
View
DL
Edit
Rename
Delete
DataSourceWSL.py
14.37 MB
chmod
View
DL
Edit
Rename
Delete
__init__.py
43.63 MB
chmod
View
DL
Edit
Rename
Delete
Edit file: /lib/python3.9/site-packages/cloudinit/sources/DataSourceCloudCIX.py
# This file is part of cloud-init. See LICENSE file for license information. import json import logging from typing import Optional from cloudinit import dmi, sources, url_helper, util LOG = logging.getLogger(__name__) METADATA_URLS = ["http://169.254.169.254"] METADATA_VERSION = 1 CLOUDCIX_DMI_NAME = "CloudCIX" class DataSourceCloudCIX(sources.DataSource): dsname = "CloudCIX" # Setup read_url parameters through get_url_params() url_retries = 3 url_timeout_seconds = 5 url_sec_between_retries = 5 def __init__(self, sys_cfg, distro, paths): super(DataSourceCloudCIX, self).__init__(sys_cfg, distro, paths) self._metadata_url = None self._net_cfg = None def _get_data(self): """ Fetch the user data and the metadata """ try: crawled_data = self.crawl_metadata_service() except sources.InvalidMetaDataException as error: LOG.error( "Failed to read data from CloudCIX datasource: %s", error ) return False self.metadata = crawled_data["meta-data"] self.userdata_raw = util.decode_binary(crawled_data["user-data"]) return True def crawl_metadata_service(self) -> dict: md_url = self.determine_md_url() if md_url is None: raise sources.InvalidMetaDataException( "Could not determine metadata URL" ) data = read_metadata(md_url, self.get_url_params()) return data def determine_md_url(self) -> Optional[str]: if self._metadata_url: return self._metadata_url # Try to reach the metadata server url_params = self.get_url_params() base_url, _ = url_helper.wait_for_url( METADATA_URLS, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds, ) if not base_url: return None # Find the highest supported metadata version for version in range(METADATA_VERSION, 0, -1): url = url_helper.combine_url( base_url, "v{0}".format(version), "metadata" ) try: response = url_helper.readurl(url, timeout=self.url_timeout) except url_helper.UrlError as e: LOG.debug("URL %s raised exception %s", url, e) continue if response.ok(): self._metadata_url = url_helper.combine_url( base_url, "v{0}".format(version) ) break else: LOG.debug("No metadata found at URL %s", url) return self._metadata_url @staticmethod def ds_detect(): return is_platform_viable() @property def network_config(self): if self._net_cfg: return self._net_cfg if not self.metadata: return None self._net_cfg = self.metadata["network"] return self._net_cfg def is_platform_viable() -> bool: return dmi.read_dmi_data("system-product-name") == CLOUDCIX_DMI_NAME def read_metadata(base_url: str, url_params): """ Read metadata from metadata server at base_url :returns: dictionary of retrieved metadata and user data containing the following keys: meta-data, user-data :param: base_url: meta data server's base URL :param: url_params: dictionary of URL retrieval parameters. Valid keys are `retries`, `sec_between` and `timeout`. :raises: InvalidMetadataException upon network error connecting to metadata URL, error response from meta data server or failure to decode/parse metadata and userdata payload. """ md = {} leaf_key_format_callback = ( ("metadata", "meta-data", util.load_json), ("userdata", "user-data", util.maybe_b64decode), ) for url_leaf, new_key, format_callback in leaf_key_format_callback: try: response = url_helper.readurl( url=url_helper.combine_url(base_url, url_leaf), retries=url_params.num_retries, sec_between=url_params.sec_between_retries, timeout=url_params.timeout_seconds, ) except url_helper.UrlError as error: raise sources.InvalidMetaDataException( f"Failed to fetch IMDS {url_leaf}: " f"{base_url}/{url_leaf}: {error}" ) if not response.ok(): raise sources.InvalidMetaDataException( f"No valid {url_leaf} found. " f"URL {base_url}/{url_leaf} returned code {response.code}" ) try: md[new_key] = format_callback(response.contents) except json.decoder.JSONDecodeError as exc: raise sources.InvalidMetaDataException( f"Invalid JSON at {base_url}/{url_leaf}: {exc}" ) from exc return md # Used to match classes to dependencies datasources = [ (DataSourceCloudCIX, (sources.DEP_FILESYSTEM, sources.DEP_NETWORK)), ] # Return a list of data sources that match this set of dependencies def get_datasource_list(depends): return sources.list_from_depends(depends, datasources)
Simpan
Batal
Isi Zip:
Unzip
Create
Buat Folder
Buat File
Terminal / Execute
Run
Chmod Bulk
All File
All Folder
All File dan Folder
Apply