B -<`QJ@sddlZddlZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlmZddlZddlZddlZddlmZmZmZmZmZmZmZmZmZmZm Z ddl!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6m7Z7ddl8m9Z9e:e;Zd Z?d Z@eAd ZBe+e*e,e-fZCd gZDd d ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d)d,d-d.d/d0d0ddd!d1d2d3d4d5d6d7d.d8d9d9d:d4d;dd?d@d5dAdBdCdDdEdFdFdGd&d>dHIZEdIZFdJZGdKjHeGeFdLZIeGeIdMZJdNdOdPdQdRdSdTdUdVg ZKdWZLdXdYMdZd[eKDd\ZNd]eLd^ZOd_eNdXeOd`ZPeAdaePdbZQdcddZRdedfZSdgdhZTdidjZUdkdlZVdmdnZWdodpZXddrdsZYGdtdudueZZ[GdvdwdweZZ\Gdxdydye]Z^Gdzd{d{e^Z_dd}d~Z`ddZaebfddZcddZde@fddZee@fddZfddZgddZhddZidddZjdddZkddZlddZmGddde]ZnGddde]ZoddZpddZqddZrddZsddZtdddZudddZvddZwddZxddZyddZzdddZ{dddZ|ddZ}ddZ~Gddde]ZGdddeZGddde]ZGddde]ZGdd„de]ZGddĄde]ZGddƄde]ZGddȄde]ZddʄZdd̄ZdddτZddфZddӄZddՄZddׄZGddلde]ZGddۄde]ZdS)N)tzutc) jsonquote zip_longesturlsplit urlunsplit OrderedDictsixurlparseget_tzinfo_optionsget_md5 MD5_AVAILABLE) getproxies proxy_bypass)InvalidExpressionErrorConfigNotFoundInvalidDNSNameError ClientErrorMetadataRetrievalErrorEndpointConnectionErrorReadTimeoutErrorConnectionClosedErrorConnectTimeoutErrorUnsupportedS3ArnError*UnsupportedS3AccesspointConfigurationErrorSSOTokenLoadErrorInvalidRegionErrorInvalidIMDSEndpointErrorUnsupportedOutpostResourceError&UnsupportedS3ControlConfigurationErrorUnsupportedS3ControlArnErrorInvalidHostLabelErrorHTTPClientError)LocationParseErrorzhttp://169.254.169.254/zhttp://[fe80:ec2::254%eth0]/z-._~z-z0-9][a-z0-9\-]*[a-z0-9] dualstackzalexa-for-businessZ mediatailorZpricingZ sagemakerz api-gatewayzapplication-auto-scalingZ appstreamz auto-scalingzauto-scaling-plansz cost-explorerz cloudhsm-v2zcloudsearch-domainzcognito-identity-providerzconfig-servicezcost-and-usage-report-serviceziot-data-planeziot-jobs-data-planezmediastore-dataz data-pipelinez device-farmziot-1click-devices-servicezdirect-connectzapplication-discovery-servicezdatabase-migration-servicezdirectory-servicezdynamodb-streamszelastic-beanstalkZefszelastic-load-balancingZemrzelastic-transcoderzelastic-load-balancing-v2Zseszmarketplace-entitlement-servicezelasticsearch-serviceZ eventbridgeziot-1click-projectszkinesis-analyticsz kinesis-videozlex-model-building-servicezlex-runtime-servicezcloudwatch-logszmachine-learningzmarketplace-commerce-analyticszmarketplace-meteringz migration-hubZ cloudwatchZmturkZ opsworkscmzresource-groups-tagging-apizroute-53zroute-53-domainszsagemaker-runtimeZsimpledbzsecrets-managerZserverlessapplicationrepositoryzservice-catalogsfnzstorage-gateway)IZa4bZalexaforbusinesszapi.mediatailorz api.pricingz api.sagemakerZ apigatewayzapplication-autoscalingZ appstream2Z autoscalingzautoscaling-plansZceZ cloudhsmv2Zcloudsearchdomainz cognito-idpconfigcurzdata.iotz data.jobs.iotzdata.mediastoreZ datapipelineZ devicefarmzdevices.iot1clickZ directconnectZ discoveryZdmsZdsZdynamodbstreamsZelasticbeanstalkZelasticfilesystemZelasticloadbalancingZelasticmapreduceZelastictranscoderZelbZelbv2emailzentitlement.marketplaceeseventszcloudwatch-eventsziot-dataz iot-jobs-dataziot1click-devicesziot1click-projectsZkinesisanalyticsZ kinesisvideoz lex-modelsz lex-runtimeZlogsZmachinelearningzmarketplace-entitlementZmarketplacecommerceanalyticszmetering.marketplaceZmeteringmarketplaceZmghz models.lexZ monitoringzmturk-requesterz opsworks-cmzprojects.iot1clickZresourcegroupstaggingapiZroute53Zroute53domainsz runtime.lexzruntime.sagemakerZsdbZsecretsmanagerZserverlessrepoZservicecatalogZstatesZ stepfunctionsZstoragegatewayzstreams.dynamodbZtaggingz(?:[0-9]{1,3}\.){3}[0-9]{1,3}z[0-9A-Fa-f]{1,4}z(?:{hex}:{hex}|{ipv4}))hexZipv4)r,ls32z(?:%(hex)s:){6}%(ls32)sz::(?:%(hex)s:){5}%(ls32)sz%(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)sz2(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)sz6(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)sz/(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)sz'(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)sz&(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)sz(?:(?:%(hex)s:){0,6}%(hex)s)?::zDABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~z(?:|cCsg|] }|tqS)_subs).0xr/r/5/tmp/pip-target-7cdyy134/lib/python/botocore/utils.py sr4)z (?:%25|%)(?:[z]|%[a-fA-F0-9]{2})+z\[z)?\]^$cCst|tr|S|dkSdS)z~Ensures a boolean value if a string or boolean is provided For strings, the value for True/False is case insensitive trueN) isinstanceboollower)valr/r/r3ensure_booleans r=cCs2t|do0|jddo0|jddko0|jdkS)zDetermines if the provided shape is the special header type jsonvalue. :type shape: botocore.shape :param shape: Shape to be inspected for the jsonvalue trait. :return: True if this type is a jsonvalue, False otherwise :rtype: Bool serializationZ jsonvalueFlocationheaderstring)hasattrr>get type_name)shaper/r/r3is_json_value_headers rFcCsD|jd|jd|j}|dd}|dd}tdd|}|S)zvReturns the module name for a service This is the value used in both the documentation and client class name ZserviceAbbreviationZserviceFullNameZAmazonZAWSz\W+)metadatarCZ service_namereplaceresub)Z service_modelnamer/r/r3get_service_module_names   rMcCs|sdSt|S)N/)remove_dot_segments)pathr/r/r3normalize_url_pathsrQcCs|dkr |St|SdS)zLReturns None if val is None, otherwise ensure value converted to booleanN)r=)r<r/r/r3normalize_booleansrRcCs|sdS|d}g}x8|D]0}|r|dkr|dkrB|rL|q||qW|ddkrbd}nd}|ddkr||r|d}nd}|d||S)NrGrN.z..r)splitpopappendjoin)urlZ input_urlZ output_listr2firstlastr/r/r3rOs"     rOcCs:|r |dkrt|dxdD]}||krt|dqWdS)NrS) expression)[]*)r)r\invalidr/r/r3validate_jmespath_for_sets    raTcCs||r t||dd}|dt|dkr2|dnd}}|sHt|d|rp||kr\i||<t||||ddS|||<dS)NrSr$rrG)r\F)is_first)rarUlenrset_value_from_jmespath)sourcer\valuerbbits current_key remainderr/r/r3rds " rdc@seZdZdZdS)_RetriesExceededErrorz@Internal exception used when the number of retries are exceeded.N)__name__ __module__ __qualname____doc__r/r/r/r3rj/srjc@seZdZddZdS)BadIMDSRequestErrorcCs ||_dS)N)request)selfrpr/r/r3__init__5szBadIMDSRequestError.__init__N)rkrlrmrrr/r/r/r3ro4sroc@seZdZeZdZdZededddfddZ ddZ d d Z d d Z dd dZ ddZddZddZddZddZdddZdS) IMDSFetcherzlatest/api/tokenZ21600r$NcCsn||_||_||||_|dkr,tj}|dd|_ |j dk|_ ||_ t j j |jt|jd|_dS)NZAWS_EC2_METADATA_DISABLEDfalser8)timeoutproxies)_timeout _num_attempts_select_base_url _base_urlosenvironcopyrCr; _disabled _user_agentbotocore httpsessionURLLib3Sessionget_environ_proxies_session)rqruZ num_attemptsbase_urlenv user_agentr'r/r/r3rr?s  zIMDSFetcher.__init__cCs|jS)N)rz)rqr/r/r3 get_base_urlPszIMDSFetcher.get_base_urlcCs|dkr i}t|dd}|d}|r8|r8tdd}|tkrJ|}n|rT|}n|r^t}nt}td|t|st|d|S)NZ imds_use_ipv6FZec2_metadata_service_endpointzFCustom endpoint and IMDS_USE_IPV6 are both set. Using custom endpoint.zIMDS ENDPOINT: %s)endpoint) r=rCloggerwarnMETADATA_BASE_URLMETADATA_BASE_URL_IPv6debug is_valid_urir)rqrr'Z requires_ipv6Zcustom_metadata_endpointZchosen_base_urlr/r/r3rySs$   zIMDSFetcher._select_base_urlc Cs$||j|j}d|ji}||tjjd||d}xt|j D]}yD|j | }|j dkrl|jS|j dkrzdS|j dkrt|WqFtk rdStk r}ztjd||dd Wdd}~XYqFtk r}z(t|jd trt||d nWdd}~XYqFXqFWdS) Nz$x-aws-ec2-metadata-token-ttl-secondsPUT)methodrYheaders)iii)izOCaught retryable HTTP exception while making metadata service request to %s: %sT)exc_infoerror)rr)_assert_enabledrz _TOKEN_PATH _TOKEN_TTL_add_user_agentr awsrequest AWSRequestrangerxrsendprepare status_codetextrorRETRYABLE_HTTP_ERRORSrrr"r9kwargsrCr#r)rqrYrrpiresponseer/r/r3_fetch_metadata_tokenns4        z!IMDSFetcher._fetch_metadata_tokenc Cs||dkr|j}|j|}i}|dk r4||d<||xzt|jD]l}y2tjjd||d}|j | }||s~|SWqJt k r} zt jd|| ddWdd} ~ XYqJXqJW|dS)aZMake a get request to the Instance Metadata Service. :type url_path: str :param url_path: The path component of the URL to make a get request. This arg is appended to the base_url that was provided in the initializer. :type retry_func: callable :param retry_func: A function that takes the response as an argument and determines if it needs to retry. By default empty and non 200 OK responses are retried. :type token: str :param token: Metadata token to send along with GET requests to IMDS. Nzx-aws-ec2-metadata-tokenGET)rrYrzOCaught retryable HTTP exception while making metadata service request to %s: %sT)r)r_default_retryrzrrrxrrrrrrrrr_RETRIES_EXCEEDED_ERROR_CLS) rqurl_path retry_functokenrYrrrprrr/r/r3 _get_requests(   "zIMDSFetcher._get_requestcCs|jdk r|j|d<dS)Nz User-Agent)r)rqrr/r/r3rs zIMDSFetcher._add_user_agentcCs|jrtd|dS)Nz)Access to EC2 metadata has been disabled.)r~rrr)rqr/r/r3rs zIMDSFetcher._assert_enabledcCs||p||S)N)_is_non_ok_response _is_empty)rqrr/r/r3rs zIMDSFetcher._default_retrycCs"|jdkr|j|ddddSdS)Nrznon-200T)log_bodyF)r_log_imds_response)rqrr/r/r3rs zIMDSFetcher._is_non_ok_responsecCs|js|j|ddddSdS)Nzno bodyT)rF)contentr)rqrr/r/r3rszIMDSFetcher._is_emptyFcCs>d}||j|jg}|r*|d7}||jtj|f|dS)NzHMetadata service returned %s response with status code of %s for url: %sz, content body: %s)rrYrWrrr)rqrZ reason_to_logrZ statementZ logger_argsr/r/r3rs  zIMDSFetcher._log_imds_response)N)F)rkrlrmrjrrr DEFAULT_METADATA_SERVICE_TIMEOUTrrrrryrrrrrrrrr/r/r/r3rs9s  %rsc@sXeZdZdZddddgZddZdd d Zdd d Zd dZddZ ddZ ddZ dS)InstanceMetadataFetcherz*latest/meta-data/iam/security-credentials/ AccessKeyIdSecretAccessKeyToken Expirationc Csyl|}||}|||}||rJ||d|d|d|ddSd|krfd|krftd|iSWnR|jk rtd |jYn0tk r}ztd |j Wdd}~XYnXiS) Nrrrr) role_nameZ access_keyZ secret_keyrZ expiry_timeCodeMessagez7Error response received when retrievingcredentials: %s.z\Max number of attempts exceeded (%s) when attempting to retrieve data from metadata service.zBad IMDS request: %s) r _get_iam_role_get_credentials_contains_all_credential_fieldsrrrrxrorp)rqrr credentialsrr/r/r3retrieve_iam_role_credentialss(      z5InstanceMetadataFetcher.retrieve_iam_role_credentialsNcCs|j|j|j|djS)N)rrr)r _URL_PATH_needs_retry_for_role_namer)rqrr/r/r3rsz%InstanceMetadataFetcher._get_iam_rolecCs$|j|j||j|d}t|jS)N)rrr)rr_needs_retry_for_credentialsrloadsr)rqrrrr/r/r3r s z(InstanceMetadataFetcher._get_credentialscCs6yt|jdStk r0||ddSXdS)NFz invalid jsonT)rrr ValueErrorr)rqrr/r/r3_is_invalid_jsons   z(InstanceMetadataFetcher._is_invalid_jsoncCs||p||S)N)rr)rqrr/r/r3rs z2InstanceMetadataFetcher._needs_retry_for_role_namecCs||p||p||S)N)rrr)rqrr/r/r3r s  z4InstanceMetadataFetcher._needs_retry_for_credentialscCs,x&|jD]}||krtd|dSqWdS)Nz3Retrieved credentials is missing required field: %sFT)_REQUIRED_CREDENTIAL_FIELDSrr)rqrfieldr/r/r3r's z7InstanceMetadataFetcher._contains_all_credential_fields)N)N) rkrlrmrrrrrrrrrr/r/r/r3rs "  rFcCsx|D]}t||trJ||kr<||krt|tjtjfst|}t|tjs2|d}t||dS)aUrlencodes a string. Whereas percent_encode_sequence handles taking a dict/sequence and producing a percent encoded string, this function deals only with taking a string (not a dict/sequence) and percent encoding it. If given the binary type, will simply URL encode it. If given the text type, will produce the binary type by UTF-8 encoding the text. If given something else, will convert it to the text type first. zutf-8)r)r9r binary_type text_typeencoder)Z input_strrr/r/r3rs    rc Cst|ttfrtj||Sytjt||Sttfk rJYnXytjj |dt idSttfk r}ztd||fWdd}~XYnXdS)z.Parse timestamp with pluggable tzinfo options.GMT)ZtzinfoszInvalid timestamp "%s": %sN) r9intfloatdatetime fromtimestamp TypeErrorrdateutilparserparser)rftzinforr/r/r3_parse_timestamp_with_tzinfosrc Cs`xNtD]D}y t||Stk rJ}ztjd|j|dWdd}~XYqXqWtd|dS)zParse a timestamp into a datetime object. Supported formats: * iso8601 * rfc822 * epoch (value is an integer) This will return a ``datetime.datetime`` object. z2Unable to parse timestamp with "%s" timezone info.)rNz4Unable to calculate correct timezone offset for "%s")r rrrrrk RuntimeError)rfrrr/r/r3parse_timestamps  "rcCsDt|tjr|}nt|}|jdkr4|jtd}n |t}|S)aConverted the passed in value to a datetime object with tzinfo. This function can be used to normalize all timestamp inputs. This function accepts a number of different types of inputs, but will always return a datetime.datetime object with time zone information. The input param ``value`` can be one of several types: * A datetime object (both naive and aware) * An integer representing the epoch time (can also be a string of the integer, i.e '0', instead of 0). The epoch time is considered to be UTC. * An iso8601 formatted timestamp. This does not need to be a complete timestamp, it can contain just the date portion without the time component. The returned value will be a datetime object that will have tzinfo. If no timezone info was provided in the input value, then UTC is assumed, not local time. N)r)r9rrrrIr astimezone)rfZ datetime_objr/r/r3parse_to_aware_datetimes   rcCs~tddd}|jdkr2|dkr&t}|j|d}|jdd||}t|dr\|S|j|j|j ddddS) awCalculate the timestamp based on the given datetime instance. :type dt: datetime :param dt: A datetime object to be converted into timestamp :type default_timezone: tzinfo :param default_timezone: If it is provided as None, we treat it as tzutc(). But it is only used when dt is a naive datetime. :returns: The timestamp ir$N)r total_secondsii@B) rrrrI utcoffsetrBr microsecondssecondsdays)dtZdefault_timezoneepochdr/r/r3datetime2timestamps    rcsFt}x$tfdddD]}||qW|r:|S|SdS)aCalculate a sha256 checksum. This method will calculate the sha256 checksum of a file like object. Note that this method will iterate through the entire file contents. The caller is responsible for ensuring the proper starting position of the file and ``seek()``'ing the file back to its starting location if other consumers need to read from the file like object. :param body: Any file like object. The file must be opened in binary mode such that a ``.read()`` call returns bytes. :param as_hex: If True, then the hex digest is returned. If False, then the digest (as binary bytes) is returned. :returns: The sha256 checksum cs dS)Ni)rr/)bodyr/r3%z"calculate_sha256..rN)hashlibsha256iterupdate hexdigestdigest)rZas_hexZchecksumchunkr/)rr3calculate_sha256s r csg}dtj}x.tfdddD]}|||q$W|sN|dSxXt|dkrg}x>t|D]2\}}|dk r||||qj||qjW|}qPWt |d dS) a\Calculate a tree hash checksum. For more information see: http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html :param body: Any file like object. This has the same constraints as the ``body`` param in calculate_sha256 :rtype: str :returns: The hex version of the calculated tree hash ics S)N)rr/)rrequired_chunk_sizer/r3r>rz%calculate_tree_hash..rr$Nrascii) rrrrWr r rc _in_pairsbinasciihexlifydecode)rchunksrr Z new_chunksrZsecondr/)rr r3calculate_tree_hash-s rcCst|}t||S)N)rr)iterableZ shared_iterr/r/r3rNs rc@s eZdZdZddZddZdS)CachedPropertyzA read only property that caches the initially computed value. This descriptor will only call the provided ``fget`` function once. Subsequent access to this property will return the cached value. cCs ||_dS)N)_fget)rqfgetr/r/r3rrgszCachedProperty.__init__cCs,|dkr |S||}||j|jj<|SdS)N)r__dict__rk)rqobjclsZcomputed_valuer/r/r3__get__js  zCachedProperty.__get__N)rkrlrmrnrrrr/r/r/r3r_src@sDeZdZdZdddZddZddd Zd d Zd d ZddZ dS)ArgumentGeneratoraGenerate sample input based on a shape model. This class contains a ``generate_skeleton`` method that will take an input/output shape (created from ``botocore.model``) and generate a sample dictionary corresponding to the input/output shape. The specific values used are place holder values. For strings either an empty string or the member name can be used, for numbers 0 or 0.0 is used. The intended usage of this class is to generate the *shape* of the input structure. This can be useful for operations that have complex input shapes. This allows a user to just fill in the necessary data instead of worrying about the specific structure of the input arguments. Example usage:: s = botocore.session.get_session() ddb = s.get_service_model('dynamodb') arg_gen = ArgumentGenerator() sample_input = arg_gen.generate_skeleton( ddb.operation_model('CreateTable').input_shape) print("Sample input for dynamodb.CreateTable: %s" % sample_input) FcCs ||_dS)N)_use_member_names)rqZuse_member_namesr/r/r3rrszArgumentGenerator.__init__cCsg}|||S)zGenerate a sample input. :type shape: ``botocore.model.Shape`` :param shape: The input shape. :return: The generated skeleton input corresponding to the provided input shape. )_generate_skeleton)rqrEstackr/r/r3generate_skeletons z#ArgumentGenerator.generate_skeletonrGcCs||jz|jdkr$|||S|jdkr:|||S|jdkrP|||S|jdkrz|jrd|S|jrvt |jSdS|jdkrdS|jdkrd S|jd krd S|jd krt d dddddSWd| XdS)NZ structurermaprArG)integerlongrrgbooleanT timestampir$) rWrLrD_generate_type_structure_generate_type_list_generate_type_maprenumrandomchoicerrV)rqrEr!rLr/r/r3r s.             z$ArgumentGenerator._generate_skeletoncCsJ||jdkriSt}x*|jD]\}}|j|||d||<q&W|S)Nr$)rL)countrLrmembersrr )rqrEr!Zskeleton member_nameZ member_shaper/r/r3r(sz*ArgumentGenerator._generate_type_structurecCs$d}|jr|jj}||j||gS)NrG)rmemberrLr )rqrEr!rLr/r/r3r)sz%ArgumentGenerator._generate_type_listcCs0|j}|j}|jdksttd|||fgS)NrAZKeyName)rrfrDAssertionErrorrr )rqrEr!Z key_shapeZ value_shaper/r/r3r*s z$ArgumentGenerator._generate_type_mapN)F)rG) rkrlrmrnrrr"r r(r)r*r/r/r/r3rss    rcCst|j}t|dk S)N)r netloc IPV6_ADDRZ_REmatch) endpoint_urlr3r/r/r3is_valid_ipv6_endpoint_urls r7cCsZt|}|j}|dkrdSt|dkr*dS|ddkrB|dd}tdtj}||S)zVerify the endpoint_url is valid. :type endpoint_url: string :param endpoint_url: An endpoint_url. Must have at least a scheme and a hostname. :return: True if the endpoint url is valid. False otherwise. NFrTrSz;^((?!-)[A-Z\d-]{1,63}(?cCst|pt|S)N)r>r7)r6r/r/r3rsrcCs2|dkr dStd}||}|s.t|ddS)z0Provided region_name must be a valid host label.Nz)^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{,63}(?|S|f||}||j|<|S)N)tuplesortedrZ_instance_cacherC)rqargsr cache_keyZ kwarg_itemsresult)func func_namer/r3 _cache_guard|s   z$instance_cache.._cache_guard)rk functoolswraps)rZr\r/)rZr[r3instance_cacheks r_cKsht|jjd}dd|D}d}t|dkrB|d|d7}|d7}|dkrVdSt||d d dS) z?Switches the current s3 endpoint with an S3 Accelerate endpointrScSsg|]}|tkr|qSr/)S3_ACCELERATE_WHITELIST)r1pr/r/r3r4sz-switch_host_s3_accelerate..zhttps://s3-accelerate.rz amazonaws.com)Z ListBuckets CreateBucketZ DeleteBucketNF)use_new_scheme)rrYr3rUrcrX _switch_hosts)rpZoperation_namerr<rr/r/r3switch_host_s3_accelerates recCs2t|jd}||r.||}t||dS)zBSwitches the host using a parameter value from a JSON request bodyzutf-8N)rrdatarrCrd)rp param_nameZ request_json new_endpointr/r/r3switch_host_with_params ricCst|j||}||_dS)N)_get_new_endpointrY)rprhrcfinal_endpointr/r/r3rds rdcCsRt|}t|}|j}|r |j}||j|j|jdf}t|}td||f|S)NrGzUpdating URI from %s to %s)rrPr3rPrQrrr)Zoriginal_endpointrhrcZnew_endpoint_componentsZoriginal_endpoint_componentsrPZfinal_endpoint_componentsrkr/r/r3rjs rjcCsVxP|D]H}||krBt||trBt||trBt||||q||||<qWdS)zDeeply two dictionaries, overriding existing keys in the base. :param base: The base dictionary which will be merged into. :param extra: The dictionary to merge into the base. Keys from this dictionary will take precedence. N)r9r deep_merge)baseextrarr/r/r3rls  rlcCs|ddS)zcTranslate the form used for event emitters. :param service_id: The service_id to convert.  -)rIr;)Z service_idr/r/r3hyphenize_service_idsrqc@sHeZdZdddZdddZddZdd Zd d Zd d ZddZ dS)S3RegionRedirectorNcCs,||_||_|jdkri|_t||_dS)N)_endpoint_resolver_cacheweakrefproxy_client)rqZendpoint_bridgeclientcacher/r/r3rrs  zS3RegionRedirector.__init__cCs<|p |jjj}|d|j|d|j|d|jdS)Nzneeds-retry.s3zbefore-call.s3zbefore-parameter-build.s3)rwmetar+registerredirect_from_errorset_request_urlredirect_from_cache)rq event_emitterZemitterr/r/r3r{s zS3RegionRedirector.registercKs|dkr dS||dir,tddS|didrLtddS|ddi}|d}|dd i}|d ko|jd k}|d ko|jd kod |dik} |dkod|k} |ddk o|djdk} |dk} t|| | | | gsdS|ddd} |dd}|| |}|dkraccesspoint|outpost)[/:](?P.+)$zc^(?P[a-zA-Z0-9\-]{1,63})[/:]accesspoint[/:](?P[a-zA-Z0-9\-]{1,63}$)rbNcCs||_|dkrt|_dS)N) _arn_parserr)rq arn_parserr/r/r3rrszS3ArnParamHandler.__init__cCs|d|jdS)Nzbefore-parameter-build.s3)r{ handle_arn)rqrr/r/r3r{szS3ArnParamHandler.registercKs`|j|jkrdS||}|dkr&dS|ddkrB||||n|ddkr\||||dS)N resource_type accesspointoutpost)rL_BLACKLISTED_OPERATIONS"_get_arn_details_from_bucket_param_store_accesspoint_store_outpost)rqrmodelrHr arn_detailsr/r/r3rs    zS3ArnParamHandler.handle_arncCsFd|krBy$|d}|j|}||||Stk r@YnXdS)Nr)rr_add_resource_type_and_namer)rqrrrr/r/r3rs  z4S3ArnParamHandler._get_arn_details_from_bucket_paramcCs@|j|d}|r2|d|d<|d|d<n t|ddS)Nrr resource_name)r)_RESOURCE_REGEXr5groupr)rqrrr5r/r/r3rs z-S3ArnParamHandler._add_resource_type_and_namecCs8|d|d<|d|d|d|d|dd|d<dS) Nrrrrrr)rLrrrrrr/)rqrrHrr/r/r3rs  z$S3ArnParamHandler._store_accesspointcCsd|d}|j|}|s"t|d|d}||d<|d||d|d|d|d d |d <dS) Nr)raccesspoint_namer outpost_namerrrr)rrLrrrrr)_OUTPOST_RESOURCE_REGEXr5rr)rqrrHrrr5rr/r/r3rs   z S3ArnParamHandler._store_outpost)N)rkrlrmrJr:rrrrrr{rrrrrr/r/r/r3rs   rc@seZdZdZdZd$ddZddZdd Zd d Zd d Z ddZ ddZ ddZ ddZ ddZddZddZddZeddZed d!Zed"d#ZdS)%S3EndpointSetterawsz amazonaws.comNcCs@||_||_||_|dkr i|_||_||_|dkr<|j|_dS)N)rs_region _s3_config _endpoint_url _partition_DEFAULT_PARTITION)rqendpoint_resolverr s3_configr6rr/r/r3rrszS3EndpointSetter.__init__cCs|d|jdS)Nzbefore-sign.s3)r{ set_endpoint)rqrr/r/r3r{szS3EndpointSetter.registercKsn||r8||||}|||||dS|jrPtfd|i||jrj|jfd|i|dS)Nrp)_use_accesspoint_endpoint_validate_accesspoint_supported(_resolve_region_for_accesspoint_endpoint._resolve_signing_name_for_accesspoint_endpoint_switch_to_accesspoint_endpoint_use_accelerate_endpointre_s3_addressing_handler)rqrprr?r/r/r3rs   zS3EndpointSetter.set_endpointcCs d|jkS)Nr)rH)rqrpr/r/r3rsz*S3EndpointSetter._use_accesspoint_endpointcCsz|jrtdd|jr tdd|jdd}||jkrLtd|j|fd|jdd}|rv|jdrvtd ddS) NzNClient cannot use a custom "endpoint_url" when specifying an access-point ARN.)msgzZClient does not support s3 accelerate configuration when an access-point ARN is specified.rrzClient is configured for "%s" partition, but access-point ARN provided is for "%s" partition. The client and access-point partition must be the same.ruse_dualstack_endpointzTClient does not support s3 dualstack configuration when an outpost ARN is specified.)rrrrHrrCr)rqrprequest_partionrr/r/r3r s z0S3EndpointSetter._validate_accesspoint_supportedcCs2|jddr,|jdd}||||S|jS)Nuse_arn_regionTrr)rrCrH_override_signing_regionr)rqrpZaccesspoint_regionr/r/r3r+s  z9S3EndpointSetter._resolve_region_for_accesspoint_endpointcCs|jdd}|||dS)Nrr)rH_override_signing_name)rqrpZaccesspoint_servicer/r/r3r5sz?S3EndpointSetter._resolve_signing_name_for_accesspoint_endpointcCsTt|j}t|j||j|||j|j|jdf}t d|j|f||_dS)NrGzUpdating URI from %s to %s) rrYrrP_get_accesspoint_netlocrH_get_accesspoint_pathrPrQrr)rqrpr?original_componentsZaccesspoint_endpointr/r/r3r9s   z0S3EndpointSetter._switch_to_accesspoint_endpointcCs||d}d|d|dfg}d|kr>|ddg}||n |d|jdr^|d ||||gd |S) Nrz%s-%srLrrz s3-outpostszs3-accesspointrr%rS)rrWrrC_get_dns_suffixrX)rqrequest_contextr?rZaccesspoint_netloc_componentsZ outpost_hostr/r/r3rGs     z(S3EndpointSetter._get_accesspoint_netloccCs"|dd}|d|ddp dS)NrrLrNrGr$)rI)rqZ original_pathrrLr/r/r3r[s z&S3EndpointSetter._get_accesspoint_pathcCs,|jd|}|j}|r(d|kr(|d}|S)Nr dnsSuffix)rsconstruct_endpoint_DEFAULT_DNS_SUFFIX)rqr?resolved dns_suffixr/r/r3rds  z S3EndpointSetter._get_dns_suffixcCs$|jdi}||d<||jd<dS)Nrr)rHrC)rqrpr?rr/r/r3rlsz)S3EndpointSetter._override_signing_regioncCs$|jdi}||d<||jd<dS)Nr signing_name)rHrC)rqrprrr/r/r3rusz'S3EndpointSetter._override_signing_namecCs|jdrdS|jdkrdSt|jj}|ds8dS|d}|ddkrRdS|dd }t|tt|krvdSt d d |DS) Nuse_accelerate_endpointTFz amazonaws.comrSrz s3-accelerater$css|]}|tkVqdS)N)r`)r1rar/r/r3 sz.) rrCrrr3rTrUrcsetall)rqr3r<Z feature_partsr/r/r3r~s       z)S3EndpointSetter._use_accelerate_endpointcCs"|jr dS|jd}|r|SdS)NvirtualZaddressing_style)rrrC)rqZconfigured_addressing_styler/r/r3_addressing_styles  z"S3EndpointSetter._addressing_stylecCsH|jdkrtdtS|jdks,|jdk r:tddStdtS)Nrz'Using S3 virtual host style addressing.rPzUsing S3 path style addressing.zSDefaulting to S3 virtual host style addressing with path style addressing fallback.)rrrrIrrL)rqr/r/r3rs    z'S3EndpointSetter._s3_addressing_handler)NNNN)rkrlrmrrrrr{rrrrrrrrrrrrrrrr/r/r/r3rs& "    # rc@seZdZdZdZedZd5ddZddZ d d Z d d Z d dZ ddZ ddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4ZdS)6S3ControlEndpointSetterrz amazonaws.comz^[a-zA-Z0-9\-]{1,63}$NcCs@||_||_||_|dkr i|_||_||_|dkr<|j|_dS)N)rsrrrrr)rqrrrr6rr/r/r3rrsz S3ControlEndpointSetter.__init__cCs|d|jdS)Nzbefore-sign.s3-control)r{r)rqrr/r/r3r{sz S3ControlEndpointSetter.registercKs||r@||||}|||||||nB||r|||jd}| |d| |j }| ||dS)N outpost_idz s3-outposts) _use_endpoint_from_arn_details-_validate_endpoint_from_arn_details_supported _resolve_region_from_arn_details&_resolve_signing_name_from_arn_details"_resolve_endpoint_from_arn_details_add_headers_from_arn_details_use_endpoint_from_outpost_id#_validate_outpost_redirection_validrHr_construct_outpost_endpointr_update_request_netloc)rqrprr?r new_netlocr/r/r3rs           z$S3ControlEndpointSetter.set_endpointcCs d|jkS)Nr)rH)rqrpr/r/r3rsz6S3ControlEndpointSetter._use_endpoint_from_arn_detailscCs d|jkS)Nr)rH)rqrpr/r/r3rsz5S3ControlEndpointSetter._use_endpoint_from_outpost_idcCs||jddsF|jdd}||jkrFd||jf}t|d|jdd}||jkrrtd|j|fd|jd rtd dd |jdkr||dS) NrFrrzpThe use_arn_region configuration is disabled but received arn for "%s" when the client is configured to use "%s")rrzClient is configured for "%s" partition, but arn provided is for "%s" partition. The client and arn partition must be the same.rz7S3 control client does not support accelerate endpointsr)_validate_no_custom_endpointrrCrHrrrr)rqrp arn_region error_msgrr/r/r3rs"     zES3ControlEndpointSetter._validate_endpoint_from_arn_details_supportedcCs|jrtdddS)NzIClient cannot use a custom "endpoint_url" when specifying a resource ARN.)r)rr)rqr/r/r3rsz4S3ControlEndpointSetter._validate_no_custom_endpointcCs"||jdrtdddS)NrzPClient does not support s3 dualstack configuration when an outpost is specified.)r)rrrCr)rqrpr/r/r3rs z;S3ControlEndpointSetter._validate_outpost_redirection_validcCs2|jddr,|jdd}||||S|jS)NrFrr)rrCrHrr)rqrprr/r/r3rs  z8S3ControlEndpointSetter._resolve_region_from_arn_detailscCs|jdd}||||S)Nrr)rHr)rqrpZ arn_servicer/r/r3r$s z>S3ControlEndpointSetter._resolve_signing_name_from_arn_detailscCs|||}|||dS)N) _resolve_netloc_from_arn_detailsr)rqrpr?rr/r/r3r)s z:S3ControlEndpointSetter._resolve_endpoint_from_arn_detailscCs@t|j}t|j||j|jdf}td|j|f||_dS)NrGzUpdating URI from %s to %s)rrYrrPrPrQrr)rqrprrZarn_details_endpointr/r/r3r-s z.S3ControlEndpointSetter._update_request_netloccCs0|jd}d|kr||S|d}|||S)Nrrr)rHr_construct_s3_control_endpoint)rqrpr?rrr/r/r3r;s   z8S3ControlEndpointSetter._resolve_netloc_from_arn_detailscCs |j|S)N)_HOST_LABEL_REGEXr5)rqlabelr/r/r3_is_valid_host_labelBsz,S3ControlEndpointSetter._is_valid_host_labelcGs&x |D]}||st|dqWdS)N)r)rr!)rqlabelsrr/r/r3_validate_host_labelsEs  z-S3ControlEndpointSetter._validate_host_labelscCs@||||dg}||||}|||g||S)Nz s3-control)r_add_dualstackrr_construct_netloc)rqr?rr3rr/r/r3rJs   z6S3ControlEndpointSetter._construct_s3_control_endpointcCs$||d|||g}||S)Nz s3-outposts)rrr)rqr?r3r/r/r3rUs   z3S3ControlEndpointSetter._construct_outpost_endpointcCs d|S)NrS)rX)rqr3r/r/r3r^sz)S3ControlEndpointSetter._construct_netloccCs|jdr|ddS)Nrr%)rrCrW)rqr3r/r/r3ras z&S3ControlEndpointSetter._add_dualstackcCs,|jd|}|j}|r(d|kr(|d}|S)Nrr)rsrr)rqr?rrr/r/r3res  z'S3ControlEndpointSetter._get_dns_suffixcCs$|jdi}||d<||jd<dS)Nrr)rHrC)rqrpr?rr/r/r3rmsz0S3ControlEndpointSetter._override_signing_regioncCs$|jdi}||d<||jd<dS)Nrr)rHrC)rqrprrr/r/r3rvsz.S3ControlEndpointSetter._override_signing_namecCs(|jd}|d}|r$|||dS)Nrr)rHrC_add_outpost_id_header)rqrprrr/r/r3rs  z5S3ControlEndpointSetter._add_headers_from_arn_detailscCs||jd<dS)Nzx-amz-outpost-id)r)rqrprr/r/r3rsz.S3ControlEndpointSetter._add_outpost_id_header)NNNN) rkrlrmrrrJr:rrrr{rrrrrrrrrrrrrrrrrrrrrrr/r/r/r3rs8         rc@seZdZedZdddZddZddZd d Z d d Z d dZ ddZ ddZ ddZddZddZddZddZdS)S3ControlArnParamHandlerz[/:]NcCs||_|dkrt|_dS)N)rr)rqrr/r/r3rrsz!S3ControlArnParamHandler.__init__cCs|d|jdS)Nz!before-parameter-build.s3-control)r{r)rqrr/r/r3r{sz!S3ControlArnParamHandler.registercKs:|jdkr||||n||||||||dS)N)rbZListRegionalBuckets)rL_handle_outpost_id_param_handle_name_param_handle_bucket_param)rqrrrHrr/r/r3rs z#S3ControlArnParamHandler.handle_arncCsT||kr dSy.||}|j|}||d<|||d<|Stk rNdSXdS)Nr resources)rr_split_resourcer)rqrrgrrr/r/r3_get_arn_details_from_params z4S3ControlArnParamHandler._get_arn_details_from_paramcCs|j|dS)Nr)_RESOURCE_SPLIT_REGEXrU)rqrr/r/r3rsz(S3ControlArnParamHandler._split_resourcecCsD|d}d|kr8|d|kr8d|d}t|d|d||d<dS)NrZ AccountIdzGAccount ID in arn does not match the AccountId parameter provided: "%s"r)rr)r )rqrrZ account_idrr/r/r3_override_account_id_params z3S3ControlArnParamHandler._override_account_id_paramcCsd|kr dS|d|d<dS)NZ OutpostIdrr/)rqrrrHr/r/r3rsz1S3ControlArnParamHandler._handle_outpost_id_paramcCsX|jdkrdS||d}|dkr&dS||r@||||nd}t|d|ddS)NZCreateAccessPointNamez4The Name parameter does not support the provided ARNr)rr)rLr_is_outpost_accesspoint_store_outpost_accesspointr )rqrrrHrrr/r/r3rs   z+S3ControlArnParamHandler._handle_name_paramcCs@|ddkrdS|d}t|dkr(dS|ddko>|dd kS) Nrz s3-outpostsFrrrrrMr)rc)rqrrr/r/r3rs   z0S3ControlArnParamHandler._is_outpost_accesspointcCsD||||dd}||d<||d<|dd|d<||d<dS)NrrArrr$rr)r)rqrrHrrr/r/r3rs   z3S3ControlArnParamHandler._store_outpost_accesspointcCsJ||d}|dkrdS||r2||||nd}t|d|ddS)Nrz6The Bucket parameter does not support the provided ARNr)rr)r_is_outpost_bucket_store_outpost_bucketr )rqrrrHrrr/r/r3rs  z-S3ControlArnParamHandler._handle_bucket_paramcCs@|ddkrdS|d}t|dkr(dS|ddko>|dd kS) Nrz s3-outpostsFrrrrrMr)rc)rqrrr/r/r3rs   z+S3ControlArnParamHandler._is_outpost_bucketcCsD||||dd}||d<||d<|dd|d<||d<dS)NrrArrEr$rr)r)rqrrHrrEr/r/r3rs   z.S3ControlArnParamHandler._store_outpost_bucket)N)rkrlrmrJr:rrrr{rrrrrrrrrrrr/r/r/r3rs       rc@sreZdZdZdZdZdZeddgZdej fdd Z dd d Z d d Z ddZ ddZdddZddZddZdS)ContainerMetadataFetcherrMrAr$z 169.254.170.2 localhostz 127.0.0.1NcCs(|dkrtjj|jd}||_||_dS)N)ru)rrrTIMEOUT_SECONDSr_sleep)rqsessionsleepr/r/r3rr s  z!ContainerMetadataFetcher.__init__cCs|||||S)zRetrieve JSON metadata from container metadata. :type full_url: str :param full_url: The full URL of the metadata service. This should include the scheme as well, e.g "http://localhost:123/foo" )_validate_allowed_url_retrieve_credentials)rqfull_urlrr/r/r3retrieve_full_uris z*ContainerMetadataFetcher.retrieve_full_uricCs:tj|}||j}|s6td|jd|jfdS)NzGUnsupported host '%s'. Can only retrieve metadata from these hosts: %sz, )rcompatr _check_if_whitelisted_hostr9rrX_ALLOWED_HOSTS)rqr parsedZis_whitelisted_hostr/r/r3r  s z.ContainerMetadataFetcher._validate_allowed_urlcCs||jkrdSdS)NTF)r)rqrSr/r/r3r*s z3ContainerMetadataFetcher._check_if_whitelisted_hostcCs||}||S)zRetrieve JSON metadata from ECS metadata. :type relative_uri: str :param relative_uri: A relative URI, e.g "/foo/bar?id=123" :return: The parsed JSON response. )r r )rq relative_urir r/r/r3 retrieve_uri/s z%ContainerMetadataFetcher.retrieve_uric Csddi}|dk r||d}xhy||||jStk r}z4tjd|dd||j|d7}||jkrrWdd}~XYq Xq WdS)NAcceptzapplication/jsonrzAReceived error when attempting to retrieve container metadata: %sT)rr$) r _get_responserrrrr SLEEP_TIMERETRY_ATTEMPTS)rqr  extra_headersrattemptsrr/r/r3r ;s     z.ContainerMetadataFetcher._retrieve_credentialsc Csytjj}|d||d}|j|}|jd}|jdkrRt d|j|fdy t |St k rd}t d||t |dYnXWn4tk r} zd | }t |dWdd} ~ XYnXdS) Nr)rrYrzutf-8rz4Received non 200 response (%s) from ECS metadata: %s)rz8Unable to parse JSON returned from ECS metadata servicesz%s:%sz;Received error when attempting to retrieve ECS metadata: %s)rrrrrrrrrrrrrrrr) rqr rrurrprZ response_textrrr/r/r3rLs&   z&ContainerMetadataFetcher._get_responsecCsd|j|fS)Nz http://%s%s) IP_ADDRESS)rqrr/r/r3r dsz!ContainerMetadataFetcher.full_url)N)N)rkrlrmrrrrrtimer rrr r rrr rr r/r/r/r3rs    rcCst|r iStSdS)N)should_bypass_proxiesr)rYr/r/r3rhsrc Cs6ytt|jrdSWnttjfk r0YnXdS)z: Returns whether we should bypass proxies or not. TF)rr r3rsocketgaierror)rYr/r/r3ros r ISO-8859-1cCsF|d}|sdSt|\}}d|kr6|ddSd|krB|SdS)zReturns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. :param default: default encoding if the content-type is text z content-typeNcharsetz'"r)rCcgi parse_headerr)rdefault content_typerr/r/r3get_encoding_from_headerss r%cKs0t|ttfrt|}nt|}t|dS)Nr)r9bytes bytearray_calculate_md5_from_bytes_calculate_md5_from_filebase64 b64encoder)rrZ binary_md5r/r/r3 calculate_md5s r,cCst|}|S)N)r r )Z body_bytesmd5r/r/r3r(sr(csF}t}x$tfdddD]}||q"W||S)Ncs dS)Ni)rr/)fileobjr/r3rrz*_calculate_md5_from_file..r)tellr rrseekr )r.Zstart_positionr-r r/)r.r3r)s  r)cKs@|d}|d}tr<|dk rrr@rGrLrIrNr_rerirdrjrlrqrrrrrrrrrrrrr%r,r(r)r1r2r6r/r/r/r3 sr 4T        !#V   $ -  !`   >   !W`F{d