U aIZJ@sddlZddlZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlmZddlZddlZddlZddlmZmZmZmZmZmZmZmZmZmZm Z ddl!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6m7Z7m8Z8ddl9m:Z:e;e<Z=dZ>dZ?d Z@d ZAeBd ZCe+e*e,e-fZDd gZEd d ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d)d,d-d.d/d0d0ddd!d1d2d3d4d5d6d7d.d8d9d9d:d4d;dd?d@d5dAdBdCdDdEdFdFdGd&d>dHIZFdIZGdJZHdKjIeHeGdLZJeHeJdMZKdNdOdPdQdRdSdTdUdVg ZLdWZMdXdYNdZd[eLDd\ZOd]eMd^ZPd_eOdXePd`ZQeBdaeQdbZReSdcZTdddeZUdfdgZVdhdiZWdjdkZXdldmZYdndoZZdpdqZ[ddsdtZ\Gdudvdve]Z^Gdwdxdxe]Z_Gdydzdze`ZaGd{d|d|eaZbdd~dZcddZdeefddZfddZgeAfddZheAfddZiddZjddZkddZldddZmdddZnddZoddZpGddde`ZqGddde`ZrddZsddZtddZuddZvddZwdddZxdddZyddZzddZ{ddZ|ddZ}dddZ~dddZddZddZGddde`ZGdddeZGddde`ZGddde`ZGddÄde`ZGddńde`ZGddDŽde`ZGddɄde`Zdd˄Zdd̈́ZdddЄZdd҄ZddԄZddքZdd؄ZGddڄde`ZGdd܄de`ZdS)N)tzutc) jsonquote zip_longesturlsplit urlunsplit OrderedDictsixurlparseget_tzinfo_optionsget_md5 MD5_AVAILABLE) getproxies proxy_bypass)InvalidExpressionErrorConfigNotFoundInvalidDNSNameError ClientErrorMetadataRetrievalErrorEndpointConnectionErrorReadTimeoutErrorConnectionClosedErrorConnectTimeoutErrorUnsupportedS3ArnError*UnsupportedS3AccesspointConfigurationErrorSSOTokenLoadErrorInvalidRegionErrorInvalidIMDSEndpointErrorUnsupportedOutpostResourceError&UnsupportedS3ControlConfigurationErrorUnsupportedS3ControlArnErrorInvalidHostLabelErrorHTTPClientErrorUnsupportedS3ConfigurationError)LocationParseErrorzhttp://169.254.169.254/zhttp://[fe80:ec2::254%eth0]/z-._~z-z0-9][a-z0-9\-]*[a-z0-9] dualstackzalexa-for-businessZ mediatailorZpricingZ sagemakerz api-gatewayzapplication-auto-scalingZ appstreamz auto-scalingzauto-scaling-plansz cost-explorerz cloudhsm-v2zcloudsearch-domainzcognito-identity-providerzconfig-servicezcost-and-usage-report-serviceziot-data-planeziot-jobs-data-planezmediastore-dataz data-pipelinez device-farmziot-1click-devices-servicezdirect-connectzapplication-discovery-servicezdatabase-migration-servicezdirectory-servicezdynamodb-streamszelastic-beanstalkZefszelastic-load-balancingZemrzelastic-transcoderzelastic-load-balancing-v2Zseszmarketplace-entitlement-servicezelasticsearch-serviceZ eventbridgeziot-1click-projectszkinesis-analyticsz kinesis-videozlex-model-building-servicezlex-runtime-servicezcloudwatch-logszmachine-learningzmarketplace-commerce-analyticszmarketplace-meteringz migration-hubZ cloudwatchZmturkZ opsworkscmzresource-groups-tagging-apizroute-53zroute-53-domainszsagemaker-runtimeZsimpledbzsecrets-managerZserverlessapplicationrepositoryzservice-catalogsfnzstorage-gateway)IZa4bZalexaforbusinesszapi.mediatailorz api.pricingz api.sagemakerZ apigatewayzapplication-autoscalingZ appstream2Z autoscalingzautoscaling-plansZceZ cloudhsmv2Zcloudsearchdomainz cognito-idpconfigcurzdata.iotz data.jobs.iotzdata.mediastoreZ datapipelineZ devicefarmzdevices.iot1clickZ directconnectZ discoveryZdmsZdsZdynamodbstreamsZelasticbeanstalkZelasticfilesystemZelasticloadbalancingZelasticmapreduceZelastictranscoderZelbZelbv2emailzentitlement.marketplaceeseventszcloudwatch-eventsziot-dataz iot-jobs-dataziot1click-devicesziot1click-projectsZkinesisanalyticsZ kinesisvideoz lex-modelsz lex-runtimeZlogsZmachinelearningzmarketplace-entitlementZmarketplacecommerceanalyticszmetering.marketplaceZmeteringmarketplaceZmghz models.lexZ monitoringzmturk-requesterz opsworks-cmzprojects.iot1clickZresourcegroupstaggingapiZroute53Zroute53domainsz runtime.lexzruntime.sagemakerZsdbZsecretsmanagerZserverlessrepoZservicecatalogZstatesZ stepfunctionsZstoragegatewayzstreams.dynamodbZtaggingz(?:[0-9]{1,3}\.){3}[0-9]{1,3}z[0-9A-Fa-f]{1,4}z(?:{hex}:{hex}|{ipv4}))hexZipv4)r-ls32z(?:%(hex)s:){6}%(ls32)sz::(?:%(hex)s:){5}%(ls32)sz%(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)sz2(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)sz6(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)sz/(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)sz'(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)sz&(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)sz(?:(?:%(hex)s:){0,6}%(hex)s)?::zDABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~z(?:|cCsg|] }|tqS)_subs).0xr0r0/Users/jalaguru/Documents/Proserv/artifacts/aws_dms_cdk_automation/aws-dms-cdk-automation/venv/lib/python3.8/site-packages/botocore/utils.py sr5)z (?:%25|%)(?:[z]|%[a-fA-F0-9]{2})+z\[z)?\]^$z cCst|tr|S|dkSdS)z~Ensures a boolean value if a string or boolean is provided For strings, the value for True/False is case insensitive trueN) isinstanceboollowervalr0r0r4ensure_booleans r?cCs2t|do0|jddo0|jddko0|jdkS)zDetermines if the provided shape is the special header type jsonvalue. :type shape: botocore.shape :param shape: Shape to be inspected for the jsonvalue trait. :return: True if this type is a jsonvalue, False otherwise :rtype: Bool serializationZ jsonvalueFlocationheaderstring)hasattrr@get type_name)shaper0r0r4is_json_value_headers  rHcCsD|jd|jd|j}|dd}|dd}tdd|}|S)zvReturns the module name for a service This is the value used in both the documentation and client class name ZserviceAbbreviationZserviceFullNameZAmazonZAWSz\W+)metadatarEZ service_namereplaceresub)Z service_modelnamer0r0r4get_service_module_names  rOcCs|sdSt|S)N/)remove_dot_segmentspathr0r0r4normalize_url_pathsrTcCs|dkr |St|SdS)zLReturns None if val is None, otherwise ensure value converted to booleanN)r?r=r0r0r4normalize_booleansrUcCs|sdS|d}g}|D]0}|r|dkr|dkr@|rJ|q||q|ddkr^d}nd}|ddkrx|rxd}nd}|d||S)NrIrP.z..r)splitpopappendjoin)urlZ input_urlZ output_listr3firstlastr0r0r4rQs"     rQcCs6|r |dkrt|ddD]}||krt|dqdS)NrV expression)[]*)r)r`invalidr0r0r4validate_jmespath_for_sets   reTcCs||r t||dd}|dt|dkr2|dnd}}|sHt|d|rp||kr\i||<t||||ddS|||<dS)NrVr%rrIr_F)is_first)rerXlenrset_value_from_jmespath)sourcer`valuerfbits current_key remainderr0r0r4rhs  " rhc@seZdZdZdS)_RetriesExceededErrorz@Internal exception used when the number of retries are exceeded.N)__name__ __module__ __qualname____doc__r0r0r0r4rn3srnc@seZdZddZdS)BadIMDSRequestErrorcCs ||_dSNrequestselfrvr0r0r4__init__9szBadIMDSRequestError.__init__N)rorprqryr0r0r0r4rs8srsc@seZdZeZdZdZededddfddZ ddZ d d Z d d Z dd dZ ddZddZddZddZddZdddZdS) IMDSFetcherzlatest/api/tokenZ21600r%NcCsn||_||_||||_|dkr,tj}|dd|_ |j dk|_ ||_ t j j |jt|jd|_dS)NZAWS_EC2_METADATA_DISABLEDfalser9)timeoutproxies)_timeout _num_attempts_select_base_url _base_urlosenvironcopyrEr< _disabled _user_agentbotocore httpsessionURLLib3Sessionget_environ_proxies_session)rxr|Z num_attemptsbase_urlenv user_agentr(r0r0r4ryCs  zIMDSFetcher.__init__cCs|jSrt)rrxr0r0r4 get_base_urlTszIMDSFetcher.get_base_urlcCs|dkr i}t|dd}|d}|r8|r8tdd}|tkrJ|}n|rT|}n|r^t}nt}td|t|st|d|S)NZ imds_use_ipv6FZec2_metadata_service_endpointzFCustom endpoint and IMDS_USE_IPV6 are both set. Using custom endpoint.zIMDS ENDPOINT: %s)endpoint) r?rEloggerwarnMETADATA_BASE_URLMETADATA_BASE_URL_IPv6debug is_valid_urir)rxrr(Z requires_ipv6Zcustom_metadata_endpointZchosen_base_urlr0r0r4rWs$   zIMDSFetcher._select_base_urlc Cs.||j|j}d|ji}||tjjd||d}t|j D]}zN|j | }|j dkrp|jWS|j dkrWdS|j dkrt|WqDtk rYdStk r}ztjd||dd W5d}~XYqDtk r&}z(t|jd trt||d nW5d}~XYqDXqDdS) Nz$x-aws-ec2-metadata-token-ttl-secondsPUTmethodr\headers)iii)iOCaught retryable HTTP exception while making metadata service request to %s: %sTexc_infoerror)rr)_assert_enabledr _TOKEN_PATH _TOKEN_TTL_add_user_agentr awsrequest AWSRequestrangerrsendprepare status_codetextrsrRETRYABLE_HTTP_ERRORSrrr"r:kwargsrEr$r)rxr\rrviresponseer0r0r4_fetch_metadata_tokenrsD       z!IMDSFetcher._fetch_metadata_tokenc Cs||dkr|j}|j|}i}|dk r4||d<||t|jD]r}z8tjjd||d}|j | }||s|WSWqHt k r} zt jd|| ddW5d} ~ XYqHXqH|dS)aZMake a get request to the Instance Metadata Service. :type url_path: str :param url_path: The path component of the URL to make a get request. This arg is appended to the base_url that was provided in the initializer. :type retry_func: callable :param retry_func: A function that takes the response as an argument and determines if it needs to retry. By default empty and non 200 OK responses are retried. :type token: str :param token: Metadata token to send along with GET requests to IMDS. Nzx-aws-ec2-metadata-tokenGETrrTr)r_default_retryrrrrrrrrrrrrr_RETRIES_EXCEEDED_ERROR_CLS) rxurl_path retry_functokenr\rrrvrrr0r0r4 _get_requests4  zIMDSFetcher._get_requestcCs|jdk r|j|d<dS)Nz User-Agent)r)rxrr0r0r4rs zIMDSFetcher._add_user_agentcCs|jrtd|dS)Nz)Access to EC2 metadata has been disabled.)rrrrrr0r0r4rs zIMDSFetcher._assert_enabledcCs||p||Srt_is_non_ok_response _is_emptyrxrr0r0r4rs zIMDSFetcher._default_retrycCs"|jdkr|j|ddddSdS)Nrznon-200Tlog_bodyF)r_log_imds_responserr0r0r4rs zIMDSFetcher._is_non_ok_responsecCs|js|j|ddddSdS)Nzno bodyTrF)contentrrr0r0r4rszIMDSFetcher._is_emptyFcCs>d}||j|jg}|r*|d7}||jtj|f|dS)NzHMetadata service returned %s response with status code of %s for url: %sz, content body: %s)rr\rZrrr)rxrZ reason_to_logrZ statementZ logger_argsr0r0r4rs zIMDSFetcher._log_imds_response)N)F)rorprqrnrrr DEFAULT_METADATA_SERVICE_TIMEOUTrryrrrrrrrrrrr0r0r0r4rz=s(  %rzc@sXeZdZdZddddgZddZdd d Zdd d Zd dZddZ ddZ ddZ dS)InstanceMetadataFetcherz*latest/meta-data/iam/security-credentials/ AccessKeyIdSecretAccessKeyToken Expirationc Cszp|}||}|||}||rL||d|d|d|ddWSd|krhd|krhtd|iWSWnR|jk rtd |jYn0tk r}ztd |j W5d}~XYnXiS) Nrrrr) role_nameZ access_keyZ secret_keyrZ expiry_timeCodeMessagez7Error response received when retrievingcredentials: %s.z\Max number of attempts exceeded (%s) when attempting to retrieve data from metadata service.zBad IMDS request: %s) r _get_iam_role_get_credentials_contains_all_credential_fieldsrrrrrsrv)rxrr credentialsrr0r0r4retrieve_iam_role_credentialss.     z5InstanceMetadataFetcher.retrieve_iam_role_credentialsNcCs|j|j|j|djSN)rrr)r _URL_PATH_needs_retry_for_role_namer)rxrr0r0r4rs z%InstanceMetadataFetcher._get_iam_rolecCs$|j|j||j|d}t|jSr)rr_needs_retry_for_credentialsrloadsr)rxrrrr0r0r4rs z(InstanceMetadataFetcher._get_credentialscCs:zt|jWdStk r4||dYdSXdS)NFz invalid jsonT)rrr ValueErrorrrr0r0r4_is_invalid_jsons   z(InstanceMetadataFetcher._is_invalid_jsoncCs||p||Srtrrr0r0r4rs z2InstanceMetadataFetcher._needs_retry_for_role_namecCs||p||p||Srt)rrrrr0r0r4r$s  z4InstanceMetadataFetcher._needs_retry_for_credentialscCs*|jD]}||krtd|dSqdS)Nz3Retrieved credentials is missing required field: %sFT)_REQUIRED_CREDENTIAL_FIELDSrr)rxrfieldr0r0r4r+s z7InstanceMetadataFetcher._contains_all_credential_fields)N)N) rorprqrrrrrrrrrr0r0r0r4rs"  rFcCs|D]}t||trH||kr:||kr:t||||q||||<qt||tr|r||krt||tr||||q||||<q||||<qdS)zGiven two dict, merge the second dict into the first. The dicts can have arbitrary nesting. :param append_lists: If true, instead of clobbering a list with the new value, append all of the new values onto the original list. N)r:dict merge_dictslistextend)Zdict1Zdict2Z append_listskeyr0r0r4r5srcCs"i}|D]}||||<q|S)zECopies the given dictionary ensuring all keys are lowercase strings. )r<)originalrrr0r0r4lowercase_dictRsrc CsVz2|| }|}t|W5QRWSQRXWntk rPt|dYnXdS)NrR)readparse_key_val_file_contentsOSErrorr)filename_openfcontentsr0r0r4parse_key_val_fileZs   rcCsHi}|D]6}d|krq |dd\}}|}|}|||<q |S)N=r%) splitlinesrXstrip)rfinallinerr>r0r0r4rcs  rcCs~g}t|dr|}n|}|D]R\}}t|trX|D]}|dt|t|fq6q |dt|t|fq d|S)afUrlencode a dict or list into a string. This is similar to urllib.urlencode except that: * It uses quote, and not quote_plus * It has a default list of safe chars that don't need to be encoded, which matches what AWS services expect. If any value in the input ``mapping`` is a list type, then each list element wil be serialized. This is the equivalent to ``urlencode``'s ``doseq=True`` argument. This function should be preferred over the stdlib ``urlencode()`` function. :param mapping: Either a dict to urlencode or a list of ``(key, value)`` pairs. itemsz%s=%s&)rDrr:rrZpercent_encoder[)mappingsafeZ encoded_pairspairsrrjelementr0r0r4percent_encode_sequencers        rcCs>t|tjtjfst|}t|tjs2|d}t||dS)aUrlencodes a string. Whereas percent_encode_sequence handles taking a dict/sequence and producing a percent encoded string, this function deals only with taking a string (not a dict/sequence) and percent encoding it. If given the binary type, will simply URL encode it. If given the text type, will produce the binary type by UTF-8 encoding the text. If given something else, will convert it to the text type first. utf-8)r)r:r binary_type text_typeencoder)Z input_strrr0r0r4rs    rc Cst|ttfrtj||Sztjt||WSttfk rLYnXztjj |dt idWSttfk r}ztd||fW5d}~XYnXdS)z.Parse timestamp with pluggable tzinfo options.GMT)ZtzinfoszInvalid timestamp "%s": %sN) r:intfloatdatetime fromtimestamp TypeErrorrdateutilparserparserrjtzinforr0r0r4_parse_timestamp_with_tzinfosrc CsbtD]J}zt||WStk rN}ztjd|j|dW5d}~XYqXqtd|dS)zParse a timestamp into a datetime object. Supported formats: * iso8601 * rfc822 * epoch (value is an integer) This will return a ``datetime.datetime`` object. z2Unable to parse timestamp with "%s" timezone info.rNz4Unable to calculate correct timezone offset for "%s")r rrrrro RuntimeErrorrr0r0r4parse_timestamps rcCsDt|tjr|}nt|}|jdkr4|jtd}n |t}|S)aConverted the passed in value to a datetime object with tzinfo. This function can be used to normalize all timestamp inputs. This function accepts a number of different types of inputs, but will always return a datetime.datetime object with time zone information. The input param ``value`` can be one of several types: * A datetime object (both naive and aware) * An integer representing the epoch time (can also be a string of the integer, i.e '0', instead of 0). The epoch time is considered to be UTC. * An iso8601 formatted timestamp. This does not need to be a complete timestamp, it can contain just the date portion without the time component. The returned value will be a datetime object that will have tzinfo. If no timezone info was provided in the input value, then UTC is assumed, not local time. Nr)r:rrrrKr astimezone)rj datetime_objr0r0r4parse_to_aware_datetimes   r cCs~tddd}|jdkr2|dkr&t}|j|d}|jdd||}t|dr\|S|j|j|j ddddS) awCalculate the timestamp based on the given datetime instance. :type dt: datetime :param dt: A datetime object to be converted into timestamp :type default_timezone: tzinfo :param default_timezone: If it is provided as None, we treat it as tzutc(). But it is only used when dt is a naive datetime. :returns: The timestamp r%Nr total_secondsii@B) rrrrK utcoffsetrDr  microsecondssecondsdays)dtZdefault_timezoneepochdr0r0r4datetime2timestamps    rcsBt}tfdddD]}||q|r6|S|SdS)aCalculate a sha256 checksum. This method will calculate the sha256 checksum of a file like object. Note that this method will iterate through the entire file contents. The caller is responsible for ensuring the proper starting position of the file and ``seek()``'ing the file back to its starting location if other consumers need to read from the file like object. :param body: Any file like object. The file must be opened in binary mode such that a ``.read()`` call returns bytes. :param as_hex: If True, then the hex digest is returned. If False, then the digest (as binary bytes) is returned. :returns: The sha256 checksum cs dSNrr0bodyr0r4)z"calculate_sha256..rN)hashlibsha256iterupdate hexdigestdigest)rZas_hexZchecksumchunkr0rr4calculate_sha256s  r$csg}dtj}tfdddD]}|||q"|sJ|dSt|dkrg}t|D]2\}}|dk r||||qb||qb|}qJt |d dS) a\Calculate a tree hash checksum. For more information see: http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html :param body: Any file like object. This has the same constraints as the ``body`` param in calculate_sha256 :rtype: str :returns: The hex version of the calculated tree hash rcs Srtrr0rZrequired_chunk_sizer0r4rBrz%calculate_tree_hash..rr%Nrascii) rrrrZr"r!rg _in_pairsbinasciihexlifydecode)rchunksrr#Z new_chunksr]secondr0r%r4calculate_tree_hash1s   r-cCst|}t||Srt)rr)iterableZ shared_iterr0r0r4r'Rs r'c@s eZdZdZddZddZdS)CachedPropertyzA read only property that caches the initially computed value. This descriptor will only call the provided ``fget`` function once. Subsequent access to this property will return the cached value. cCs ||_dSrt)_fget)rxfgetr0r0r4rykszCachedProperty.__init__cCs,|dkr |S||}||j|jj<|SdSrt)r0__dict__ro)rxobjclsZcomputed_valuer0r0r4__get__ns  zCachedProperty.__get__N)rorprqrrryr5r0r0r0r4r/csr/c@sDeZdZdZdddZddZddd Zd d Zd d ZddZ dS)ArgumentGeneratoraGenerate sample input based on a shape model. This class contains a ``generate_skeleton`` method that will take an input/output shape (created from ``botocore.model``) and generate a sample dictionary corresponding to the input/output shape. The specific values used are place holder values. For strings either an empty string or the member name can be used, for numbers 0 or 0.0 is used. The intended usage of this class is to generate the *shape* of the input structure. This can be useful for operations that have complex input shapes. This allows a user to just fill in the necessary data instead of worrying about the specific structure of the input arguments. Example usage:: s = botocore.session.get_session() ddb = s.get_service_model('dynamodb') arg_gen = ArgumentGenerator() sample_input = arg_gen.generate_skeleton( ddb.operation_model('CreateTable').input_shape) print("Sample input for dynamodb.CreateTable: %s" % sample_input) FcCs ||_dSrt)_use_member_names)rxZuse_member_namesr0r0r4ryszArgumentGenerator.__init__cCsg}|||S)zGenerate a sample input. :type shape: ``botocore.model.Shape`` :param shape: The input shape. :return: The generated skeleton input corresponding to the provided input shape. )_generate_skeleton)rxrGstackr0r0r4generate_skeletons z#ArgumentGenerator.generate_skeletonrIcCs||jz|jdkr(|||WS|jdkrB|||WS|jdkr\|||WS|jdkr|jrt|W|S|jrt |jWfSW`dS|jdkrWNdS|jdkrW|S|f||}||j|<|Srt)tuplesortedrZ_instance_cacherE)rxargsr cache_keyZ kwarg_itemsresultfunc func_namer0r4 _cache_guards   z$instance_cache.._cache_guard)ro functoolswraps)rwryr0rvr4instance_cacheus r|cKsht|jjd}dd|D}d}t|dkrB|d|d7}|d7}|dkrVdSt||d d dS) z?Switches the current s3 endpoint with an S3 Accelerate endpointrVcSsg|]}|tkr|qSr0S3_ACCELERATE_WHITELISTr2pr0r0r4r5sz-switch_host_s3_accelerate..zhttps://s3-accelerate.r amazonaws.com)Z ListBuckets CreateBucketZ DeleteBucketNF)use_new_scheme)rr\rOrXrgr[ _switch_hosts)rvZoperation_namerrXrr0r0r4switch_host_s3_accelerates rcCs2t|jd}||r.||}t||dS)zBSwitches the host using a parameter value from a JSON request bodyrN)rrdatar*rEr)rv param_nameZ request_json new_endpointr0r0r4switch_host_with_params rcCst|j||}||_dSrt)_get_new_endpointr\)rvrrfinal_endpointr0r0r4rs rcCsRt|}t|}|j}|r |j}||j|j|jdf}t|}td||f|SNrIzUpdating URI from %s to %s)rrlrOrSrmrrr)Zoriginal_endpointrrZnew_endpoint_componentsZoriginal_endpoint_componentsrlZfinal_endpoint_componentsrr0r0r4rs"rcCsR|D]H}||kr@t||tr@t||tr@t||||q||||<qdS)zDeeply two dictionaries, overriding existing keys in the base. :param base: The base dictionary which will be merged into. :param extra: The dictionary to merge into the base. Keys from this dictionary will take precedence. N)r:r deep_merge)baseextrarr0r0r4rs rcCs|ddS)zcTranslate the form used for event emitters. :param service_id: The service_id to convert.  -)rKr<)Z service_idr0r0r4hyphenize_service_idsrc@sHeZdZdddZdddZddZdd Zd d Zd d ZddZ dS)S3RegionRedirectorNcCs,||_||_|jdkri|_t||_dSrt)_endpoint_resolver_cacheweakrefproxy_client)rxZendpoint_bridgeclientcacher0r0r4rys  zS3RegionRedirector.__init__cCs<|p |jjj}|d|j|d|j|d|jdS)Nzneeds-retry.s3zbefore-call.s3before-parameter-build.s3)rmetar,registerredirect_from_errorset_request_urlredirect_from_cache)rx event_emitterZemitterr0r0r4rs zS3RegionRedirector.registercKs|dkr dS||dir,tddS|didrLtddS|ddi}|d}|dd i}|d ko|jd k}|d ko|jd kod |dik} |dkod|k} |ddk o|djdk} |dk} t|| | | | gsdS|ddd} |dd}|| |}|dkraccesspoint|outpost)[/:](?P.+)$zc^(?P[a-zA-Z0-9\-]{1,63})[/:]accesspoint[/:](?P[a-zA-Z0-9\-]{1,63}$)rNcCs||_|dkrt|_dSrt _arn_parserrrxZ arn_parserr0r0r4ryszS3ArnParamHandler.__init__cCs|d|jdS)Nrr handle_arnrxrr0r0r4rszS3ArnParamHandler.registercKs`|j|jkrdS||}|dkr&dS|ddkrB||||n|ddkr\||||dS)N resource_type accesspointoutpost)rN_BLACKLISTED_OPERATIONS"_get_arn_details_from_bucket_param_store_accesspoint_store_outpost)rxrmodelrdr arn_detailsr0r0r4rs    zS3ArnParamHandler.handle_arncCsHd|krDz&|d}|j|}||||WStk rBYnXdS)Nr)rr_add_resource_type_and_namer)rxrrrr0r0r4rs  z4S3ArnParamHandler._get_arn_details_from_bucket_paramcCs@|j|d}|r2|d|d<|d|d<n t|ddS)Nrr resource_name)r)_RESOURCE_REGEXrQgroupr)rxrrrQr0r0r4rs z-S3ArnParamHandler._add_resource_type_and_namecCs8|d|d<|d|d|d|d|dd|d<dS) Nrrrrrr)rNrrrrrr0)rxrrdrr0r0r4rs z$S3ArnParamHandler._store_accesspointcCsd|d}|j|}|s"t|d|d}||d<|d||d|d|d|d d |d <dS) Nr)raccesspoint_namer outpost_namerrrr)rrNrrrrr)_OUTPOST_RESOURCE_REGEXrQrr)rxrrdrrrQrr0r0r4rs   z S3ArnParamHandler._store_outpost)N)rorprqrLrVrrrryrrrrrrr0r0r0r4rs   rc@seZdZdZdZd&ddZddZdd Zd d Zd d Z ddZ ddZ ddZ ddZ ddZddZddZddZddZed d!Zed"d#Zed$d%ZdS)'S3EndpointSetterawsrNcCs@||_||_||_|dkr i|_||_||_|dkr<|j|_dSrtr_region _s3_config _endpoint_url _partition_DEFAULT_PARTITIONrxZendpoint_resolverrZ s3_configrRrr0r0r4ryszS3EndpointSetter.__init__cCs |d|j|d|jdS)Nzbefore-sign.s3z%before-call.s3.WriteGetObjectResponse)r set_endpoint#update_endpoint_to_s3_object_lambdarr0r0r4rs zS3EndpointSetter.registercKsh|jrtdd||d|jr&dS|j}|d|j}dj|d|dd}t|d|d |d<dS) NzOS3 client does not support accelerate endpoints for S3 Object Lambda operationsmsgs3-object-lambdazhttps://{host_prefix}{hostname} host_prefixrU)rrUr\F) _use_accelerate_endpointr#_override_signing_namerrconstruct_endpointrformatr)rxrrdrresolverresolvedrr0r0r4rs z4S3EndpointSetter.update_endpoint_to_s3_object_lambdacKsn||r8||||}|||||dS|jrPtfd|i||jrj|jfd|i|dS)Nrv)_use_accesspoint_endpoint_validate_accesspoint_supported(_resolve_region_for_accesspoint_endpoint._resolve_signing_name_for_accesspoint_endpoint_switch_to_accesspoint_endpointrr_s3_addressing_handler)rxrvrr[r0r0r4rs   zS3EndpointSetter.set_endpointcCs d|jkSrrdrwr0r0r4r+sz*S3EndpointSetter._use_accesspoint_endpointcCs|jrtdd|jdd}||jkrsz.) rrErrrOrprXrgsetall)rxrOrXZ feature_partsr0r0r4rs       z)S3EndpointSetter._use_accelerate_endpointcCs"|jr dS|jd}|r|SdS)NvirtualZaddressing_style)rrrE)rxZconfigured_addressing_styler0r0r4_addressing_styles  z"S3EndpointSetter._addressing_stylecCsH|jdkrtdtS|jdks,|jdk r:tddStdtS)Nrz'Using S3 virtual host style addressing.rSzUsing S3 path style addressing.zSDefaulting to S3 virtual host style addressing with path style addressing fallback.)rrrrerrhrr0r0r4rs    z'S3EndpointSetter._s3_addressing_handler)NNNN)rorprqrrryrrrrrrrrrrrrrr/rrrr0r0r0r4rs4 #     " rc@seZdZdZdZedZd3ddZddZ d d Z d d Z d dZ ddZ ddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2ZdS)4S3ControlEndpointSetterrrz^[a-zA-Z0-9\-]{1,63}$NcCs@||_||_||_|dkr i|_||_||_|dkr<|j|_dSrtrrr0r0r4rysz S3ControlEndpointSetter.__init__cCs|d|jdS)Nzbefore-sign.s3-control)rrrr0r0r4rsz S3ControlEndpointSetter.registercKs||r@||||}|||||||nB||r|||jd}| |d| |j }| ||dS)N outpost_idr) _use_endpoint_from_arn_details-_validate_endpoint_from_arn_details_supported _resolve_region_from_arn_details&_resolve_signing_name_from_arn_details"_resolve_endpoint_from_arn_details_add_headers_from_arn_details_use_endpoint_from_outpost_id#_validate_outpost_redirection_validrdr_construct_outpost_endpointr_update_request_netloc)rxrvrr[r new_netlocr0r0r4rs           z$S3ControlEndpointSetter.set_endpointcCs d|jkS)Nrrrwr0r0r4rsz6S3ControlEndpointSetter._use_endpoint_from_arn_detailscCs d|jkS)Nrrrwr0r0r4rsz5S3ControlEndpointSetter._use_endpoint_from_outpost_idcCs|jdds>|jdd}||jkr>d||jf}t|d|jdd}||jkrjtd|j|fd|jd rtd dd |jdkr||dS) NrFrrzpThe use_arn_region configuration is disabled but received arn for "%s" when the client is configured to use "%s"rrzClient is configured for "%s" partition, but arn provided is for "%s" partition. The client and arn partition must be the same.r z7S3 control client does not support accelerate endpointsr)rrErdrrrr)rxrv arn_region error_msgZrequest_partionr0r0r4rs,    zES3ControlEndpointSetter._validate_endpoint_from_arn_details_supportedcCs|jdrtdddS)NrzPClient does not support s3 dualstack configuration when an outpost is specified.r)rrErrwr0r0r4r7s z;S3ControlEndpointSetter._validate_outpost_redirection_validcCs2|jddr,|jdd}||||S|jS)NrFrrr)rxrvr r0r0r4r@s  z8S3ControlEndpointSetter._resolve_region_from_arn_detailscCs|jdd}||||S)Nrrr)rxrvZ arn_servicer0r0r4rIs z>S3ControlEndpointSetter._resolve_signing_name_from_arn_detailscCs|||}|||dSrt) _resolve_netloc_from_arn_detailsr)rxrvr[rr0r0r4rNs z:S3ControlEndpointSetter._resolve_endpoint_from_arn_detailscCs@t|j}t|j||j|jdf}td|j|f||_dSr)rr\rrlrSrmrr)rxrvrrZarn_details_endpointr0r0r4rRs  z.S3ControlEndpointSetter._update_request_netloccCs0|jd}d|kr||S|d}|||S)Nrrr)rdr_construct_s3_control_endpoint)rxrvr[rrr0r0r4r"`s   z8S3ControlEndpointSetter._resolve_netloc_from_arn_detailscCs |j|Srt)_HOST_LABEL_REGEXrQ)rxlabelr0r0r4_is_valid_host_labelgsz,S3ControlEndpointSetter._is_valid_host_labelcGs"|D]}||st|dqdS)N)r%)r&r!)rxlabelsr%r0r0r4_validate_host_labelsjs z-S3ControlEndpointSetter._validate_host_labelscCs\||||jr(t|jj}||g}n*|dg}||||}|||g||S)Nz s3-control)r(rrrO_add_dualstackrr_construct_netloc)rxr[rrrOrr0r0r4r#os     z6S3ControlEndpointSetter._construct_s3_control_endpointcCs6|||jrt|jjSd|||g}||S)Nr)r(rrrOrr*)rxr[rOr0r0r4r~s  z3S3ControlEndpointSetter._construct_outpost_endpointcCs d|S)NrV)r[rxrOr0r0r4r*sz)S3ControlEndpointSetter._construct_netloccCs|jdr|ddS)Nrr&)rrErZr+r0r0r4r)s z&S3ControlEndpointSetter._add_dualstackcCs,|jd|}|j}|r(d|kr(|d}|Srrrr0r0r4rs z'S3ControlEndpointSetter._get_dns_suffixcCs$|jdi}||d<||jd<dSrrr r0r0r4rsz0S3ControlEndpointSetter._override_signing_regioncCs$|jdi}||d<||jd<dSr r)rxrvr rr0r0r4rsz.S3ControlEndpointSetter._override_signing_namecCs(|jd}|d}|r$|||dS)Nrr)rdrE_add_outpost_id_header)rxrvrrr0r0r4rs  z5S3ControlEndpointSetter._add_headers_from_arn_detailscCs||jd<dS)Nzx-amz-outpost-id)r)rxrvrr0r0r4r,sz.S3ControlEndpointSetter._add_outpost_id_header)NNNN)rorprqrrrLrVr$ryrrrrrrrrrrr"r&r(r#rr*r)rrrrr,r0r0r0r4rs<       rc@seZdZedZdddZddZddZd d Z d d Z d dZ ddZ ddZ ddZddZddZddZddZdS)S3ControlArnParamHandlerz[/:]NcCs||_|dkrt|_dSrtrrr0r0r4rysz!S3ControlArnParamHandler.__init__cCs|d|jdS)Nz!before-parameter-build.s3-controlrrr0r0r4rsz!S3ControlArnParamHandler.registercKs:|jdkr||||n||||||||dS)N)rZListRegionalBuckets)rN_handle_outpost_id_param_handle_name_param_handle_bucket_param)rxrrrdrr0r0r4rs z#S3ControlArnParamHandler.handle_arncCsX||kr dSz0||}|j|}||d<|||d<|WStk rRYdSXdS)Nr resources)rr_split_resourcer)rxrrrrr0r0r4_get_arn_details_from_params z4S3ControlArnParamHandler._get_arn_details_from_paramcCs|j|dS)Nr)_RESOURCE_SPLIT_REGEXrX)rxrr0r0r4r2sz(S3ControlArnParamHandler._split_resourcecCsD|d}d|kr8|d|kr8d|d}t|d|d||d<dS)NrZ AccountIdzGAccount ID in arn does not match the AccountId parameter provided: "%s"rrr)r )rxrrZ account_idr!r0r0r4_override_account_id_paramsz3S3ControlArnParamHandler._override_account_id_paramcCsd|kr dS|d|d<dS)NZ OutpostIdrr0)rxrrrdr0r0r4r.sz1S3ControlArnParamHandler._handle_outpost_id_paramcCsX|jdkrdS||d}|dkr&dS||r@||||nd}t|d|ddS)NZCreateAccessPointNamez4The Name parameter does not support the provided ARNrr5)rNr3_is_outpost_accesspoint_store_outpost_accesspointr rxrrrdrr!r0r0r4r/s   z+S3ControlArnParamHandler._handle_name_paramcCs@|ddkrdS|d}t|dkr(dS|ddko>|dd kS) NrrFr1rrrrirrgrxrr1r0r0r4r8s   z0S3ControlArnParamHandler._is_outpost_accesspointcCsD||||dd}||d<||d<|dd|d<||d<dS)Nr1r]r7rr%rrr6)rxrrdrrr0r0r4r9s   z3S3ControlArnParamHandler._store_outpost_accesspointcCsJ||d}|dkrdS||r2||||nd}t|d|ddS)Nrz6The Bucket parameter does not support the provided ARNrr5)r3_is_outpost_bucket_store_outpost_bucketr r:r0r0r4r0s  z-S3ControlArnParamHandler._handle_bucket_paramcCs@|ddkrdS|d}t|dkr(dS|ddko>|dd kS) NrrFr1rrrrirr;r<r0r0r4r>s   z+S3ControlArnParamHandler._is_outpost_bucketcCsD||||dd}||d<||d<|dd|d<||d<dS)Nr1r]rrar%rrr=)rxrrdrrar0r0r4r?'s   z.S3ControlArnParamHandler._store_outpost_bucket)N)rorprqrLrVr4ryrrr3r2r6r.r/r8r9r0r>r?r0r0r0r4r-s       r-c@sreZdZdZdZdZdZeddgZdej fdd Z dd d Z d d Z ddZ ddZdddZddZddZdS)ContainerMetadataFetcherrir]r%z 169.254.170.2 localhostz 127.0.0.1NcCs(|dkrtjj|jd}||_||_dS)N)r|)rrrTIMEOUT_SECONDSr_sleep)rxsessionsleepr0r0r4ry8s z!ContainerMetadataFetcher.__init__cCs|||||S)zRetrieve JSON metadata from container metadata. :type full_url: str :param full_url: The full URL of the metadata service. This should include the scheme as well, e.g "http://localhost:123/foo" )_validate_allowed_url_retrieve_credentials)rxfull_urlrr0r0r4retrieve_full_uri@s z*ContainerMetadataFetcher.retrieve_full_uricCs:tj|}||j}|s6td|jd|jfdS)NzGUnsupported host '%s'. Can only retrieve metadata from these hosts: %sz, )rcompatr _check_if_whitelisted_hostrUrr[_ALLOWED_HOSTS)rxrHparsedZis_whitelisted_hostr0r0r4rFLs z.ContainerMetadataFetcher._validate_allowed_urlcCs||jkrdSdS)NTF)rL)rxror0r0r4rKVs z3ContainerMetadataFetcher._check_if_whitelisted_hostcCs||}||S)zRetrieve JSON metadata from ECS metadata. :type relative_uri: str :param relative_uri: A relative URI, e.g "/foo/bar?id=123" :return: The parsed JSON response. )rHrG)rx relative_urirHr0r0r4 retrieve_uri[s z%ContainerMetadataFetcher.retrieve_uric Csddi}|dk r||d}z||||jWStk r}z4tjd|dd||j|d7}||jkrrW5d}~XYqXqdS)NAcceptzapplication/jsonrzAReceived error when attempting to retrieve container metadata: %sTrr%) r  _get_responserBrrrrC SLEEP_TIMERETRY_ATTEMPTS)rxrH extra_headersrZattemptsrr0r0r4rGgs&   z.ContainerMetadataFetcher._retrieve_credentialsc Csztjj}|d||d}|j|}|jd}|jdkrRt d|j|fdzt |WWSt k rd}t d||t |dYnXWn4tk r} zd | }t |dW5d} ~ XYnXdS) Nrrrrz4Received non 200 response (%s) from ECS metadata: %sr!z8Unable to parse JSON returned from ECS metadata servicesz%s:%sz;Received error when attempting to retrieve ECS metadata: %s)rrrrrrrr*rrrrrrrr) rxrHrr|rrvrZ response_textr!rr0r0r4rQxs.  z&ContainerMetadataFetcher._get_responsecCsd|j|fS)Nz http://%s%s) IP_ADDRESS)rxrNr0r0r4rHsz!ContainerMetadataFetcher.full_url)N)N)rorprqrBrSrRrVrLtimerEryrIrFrKrOrGrQrHr0r0r0r4r@0s    r@cCst|r iStSdSrt)should_bypass_proxiesrr\r0r0r4rsrc Cs8ztt|jrWdSWnttjfk r2YnXdS)z: Returns whether we should bypass proxies or not. TF)rr rOrsocketgaierrorrYr0r0r4rXs  rX ISO-8859-1cCsF|d}|sdSt|\}}d|kr6|ddSd|krB|SdS)zReturns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. :param default: default encoding if the content-type is text z content-typeNcharsetz'"r)rEcgi parse_headerr)rdefault content_typerr0r0r4get_encoding_from_headerss rbcKs0t|ttfrt|}nt|}t|dS)Nr&)r:bytes bytearray_calculate_md5_from_bytes_calculate_md5_from_filebase64 b64encoder*)rrZ binary_md5r0r0r4 calculate_md5s ricCst|}|Srt)r r")Z body_bytesmd5r0r0r4resrecsB}t}tfdddD]}||q ||S)Ncs dSrrr0fileobjr0r4rrz*_calculate_md5_from_file..r)tellr rr seekr")rlZstart_positionrjr#r0rkr4rfs   rfcKs@|d}|d}tr<|dk r s 4X   N     !#V   $ -  !`   ?   !WB{d