U B1_J@sddlZddlZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl ZddlmZddlZddlZddlZddlmZmZmZmZmZmZmZmZmZmZmZddl m!Z!m"Z"ddl#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/e0e1Z2dZ3dZ4dZ5e6d Z7e*e)e+e,fZ8d gZ9d d d d ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d'd*d+d,d-d.d.dddd/d0d1d2d3d4d5d,d6d7d7d8d2d9d:d;d/dd3d?d@dAdBdCdDdDdEd$ddOdPZ?dQdRZ@ddTdUZAGdVdWdWeBZCGdXdYdYeBZDGdZd[d[eEZFGd\d]d]eFZGdd_d`ZHdadbZIeJfdcddZKdedfZLe5fdgdhZMe5fdidjZNdkdlZOdmdnZPdodpZQddqdrZRddsdtZSdudvZTdwdxZUGdydzdzeEZVGd{d|d|eEZWd}d~ZXddZYdddZZdddZ[ddZ\ddZ]ddZ^ddZ_dddZ`dddZaddZbddZcGdddeEZdGdddeeZfGdddeEZgGdddeEZhGdddeEZiGdddeEZjddZkddZldddZmddZnddZoddZpddZqGdddeEZrGdddeEZsdS)N)tzutc) jsonquote zip_longesturlsplit urlunsplit OrderedDictsixurlparseget_tzinfo_optionsget_md5 MD5_AVAILABLE) getproxies proxy_bypass) InvalidExpressionErrorConfigNotFoundInvalidDNSNameError ClientErrorMetadataRetrievalErrorEndpointConnectionErrorReadTimeoutErrorConnectionClosedErrorConnectTimeoutErrorUnsupportedS3ArnError*UnsupportedS3AccesspointConfigurationErrorSSOTokenLoadErrorzhttp://169.254.169.254/z-._~z-z0-9][a-z0-9\-]*[a-z0-9] dualstackzalexa-for-businessZ mediatailorZpricingZ sagemakerz api-gatewayzapplication-auto-scalingZ appstreamz auto-scalingzauto-scaling-plansz cost-explorerz cloudhsm-v2zcloudsearch-domainzcognito-identity-providerzconfig-servicezcost-and-usage-report-serviceziot-data-planeziot-jobs-data-planezmediastore-dataz data-pipelinez device-farmziot-1click-devices-servicezdirect-connectzapplication-discovery-servicezdatabase-migration-servicezdirectory-servicezdynamodb-streamszelastic-beanstalkZefszelastic-load-balancingZemrzelastic-transcoderzelastic-load-balancing-v2Zseszmarketplace-entitlement-servicezelasticsearch-serviceZ eventbridgeziot-1click-projectszkinesis-analyticsz kinesis-videozlex-model-building-servicezlex-runtime-servicezcloudwatch-logszmachine-learningzmarketplace-commerce-analyticszmarketplace-meteringz migration-hubZ cloudwatchZmturkZ opsworkscmzresource-groups-tagging-apizroute-53zroute-53-domainszsagemaker-runtimeZsimpledbzsecrets-managerZserverlessapplicationrepositoryzservice-catalogsfnzstorage-gateway)IZa4bZalexaforbusinesszapi.mediatailorz api.pricingz api.sagemakerZ apigatewayzapplication-autoscalingZ appstream2Z autoscalingzautoscaling-plansZceZ cloudhsmv2Zcloudsearchdomainz cognito-idpconfigcurzdata.iotz data.jobs.iotzdata.mediastoreZ datapipelineZ devicefarmzdevices.iot1clickZ directconnectZ discoveryZdmsZdsZdynamodbstreamsZelasticbeanstalkZelasticfilesystemZelasticloadbalancingZelasticmapreduceZelastictranscoderZelbZelbv2emailzentitlement.marketplaceeseventszcloudwatch-eventsziot-dataz iot-jobs-dataziot1click-devicesziot1click-projectsZkinesisanalyticsZ kinesisvideoz lex-modelsz lex-runtimeZlogsZmachinelearningzmarketplace-entitlementZmarketplacecommerceanalyticszmetering.marketplaceZmeteringmarketplaceZmghz models.lexZ monitoringzmturk-requesterz opsworks-cmzprojects.iot1clickZresourcegroupstaggingapiZroute53Zroute53domainsz runtime.lexzruntime.sagemakerZsdbZsecretsmanagerZserverlessrepoZservicecatalogZstatesZ stepfunctionsZstoragegatewayzstreams.dynamodbZtaggingcCst|tr|S|dkSdS)z~Ensures a boolean value if a string or boolean is provided For strings, the value for True/False is case insensitive trueN) isinstanceboollower)valr)i/private/var/folders/2d/2jcwkxfx4hj39rq32sgz27wh9m_5lk/T/pip-target-h8a8k1vx/lib/python/botocore/utils.pyensure_booleans r+cCs2t|do0|jddo0|jddko0|jdkS)zDetermines if the provided shape is the special header type jsonvalue. :type shape: botocore.shape :param shape: Shape to be inspected for the jsonvalue trait. :return: True if this type is a jsonvalue, False otherwise :rtype: Bool serializationZ jsonvalueFlocationheaderstring)hasattrr,get type_name)shaper)r)r*is_json_value_headers  r4cCsD|jd|jd|j}|dd}|dd}tdd|}|S)zvReturns the module name for a service This is the value used in both the documentation and client class name ZserviceAbbreviationZserviceFullNameZAmazonZAWSz\W+)metadatar1Z service_namereplaceresub)Z service_modelnamer)r)r*get_service_module_names  r;cCs|sdSt|S)N/)remove_dot_segmentspathr)r)r*normalize_url_pathsr@cCs|sdS|d}g}|D]0}|r|dkr|dkr@|rJ|q||q|ddkr^d}nd}|ddkrx|rxd}nd}|d||S)Nr5r<.z..r)splitpopappendjoin)urlZ input_urlZ output_listxfirstlastr)r)r*r=s"     r=cCs6|r |dkrt|ddD]}||krt|dqdS)NrA expression)[]*)r)rLinvalidr)r)r*validate_jmespath_for_sets   rQTcCs||r t||dd}|dt|dkr2|dnd}}|sHt|d|rp||kr\i||<t||||ddS|||<dS)NrArrr5rKF)is_first)rQrClenrset_value_from_jmespath)sourcerLvaluerRbits current_key remainderr)r)r*rTs  " rTc@seZdZdZdS)_RetriesExceededErrorz@Internal exception used when the number of retries are exceeded.N)__name__ __module__ __qualname____doc__r)r)r)r*rZsrZc@seZdZddZdS)BadIMDSRequestErrorcCs ||_dSNrequestselfrbr)r)r*__init__szBadIMDSRequestError.__init__N)r[r\r]rer)r)r)r*r_sr_c@speZdZeZdZdZededdfddZ ddZ dd d Z d d Z d dZ ddZddZddZdddZdS) IMDSFetcherzlatest/api/tokenZ21600rNcCsf||_||_||_|dkr$tj}|dd|_|jdk|_||_ t j j |jt |jd|_dS)NZAWS_EC2_METADATA_DISABLEDfalser$)timeoutproxies)_timeout _num_attempts _base_urlosenvironcopyr1r' _disabled _user_agentbotocore httpsessionURLLib3Sessionget_environ_proxies_session)rdrhZ num_attemptsbase_urlenv user_agentr)r)r*res  zIMDSFetcher.__init__c Cs||j|j}d|ji}||tjjd||d}t|j D]}zN|j | }|j dkrp|jWS|j dkrWdS|j dkrt|WqDtk rYdStk r}ztjd||dd W5d}~XYqDXqDdS) Nz$x-aws-ec2-metadata-token-ttl-secondsPUTmethodrGheaders)iii)iOCaught retryable HTTP exception while making metadata service request to %s: %sTexc_info)_assert_enabledrl _TOKEN_PATH _TOKEN_TTL_add_user_agentrr awsrequest AWSRequestrangerkrvsendprepare status_codetextr_rRETRYABLE_HTTP_ERRORSloggerdebug)rdrGr}rbiresponseer)r)r*_fetch_metadata_token s<       z!IMDSFetcher._fetch_metadata_tokenc Cs||dkr|j}|j|}i}|dk r4||d<||t|jD]r}z8tjjd||d}|j | }||s|WSWqHt k r} zt jd|| ddW5d} ~ XYqHXqH|dS)aZMake a get request to the Instance Metadata Service. :type url_path: str :param url_path: The path component of the URL to make a get request. This arg is appended to the base_url that was provided in the initializer. :type retry_func: callable :param retry_func: A function that takes the response as an argument and determines if it needs to retry. By default empty and non 200 OK responses are retried. :type token: str :param token: Metadata token to send along with GET requests to IMDS. Nzx-aws-ec2-metadata-tokenGETr{rTr)r_default_retryrlrrrkrrrrrvrrrrr_RETRIES_EXCEEDED_ERROR_CLS) rdurl_path retry_functokenrGr}rrbrrr)r)r* _get_request:s4  zIMDSFetcher._get_requestcCs|jdk r|j|d<dS)Nz User-Agent)rq)rdr}r)r)r*r_s zIMDSFetcher._add_user_agentcCs|jrtd|dS)Nz)Access to EC2 metadata has been disabled.)rprrrrdr)r)r*rcs zIMDSFetcher._assert_enabledcCs||p||Sr`_is_non_ok_response _is_emptyrdrr)r)r*rhs zIMDSFetcher._default_retrycCs"|jdkr|j|ddddSdS)Nr~znon-200Tlog_bodyF)r_log_imds_responserr)r)r*rns zIMDSFetcher._is_non_ok_responsecCs|js|j|ddddSdS)Nzno bodyTrF)contentrrr)r)r*rtszIMDSFetcher._is_emptyFcCs>d}||j|jg}|r*|d7}||jtj|f|dS)NzHMetadata service returned %s response with status code of %s for url: %sz, content body: %s)rrGrErrr)rdrZ reason_to_logrZ statementZ logger_argsr)r)r*rzs zIMDSFetcher._log_imds_response)N)F)r[r\r]rZrrr DEFAULT_METADATA_SERVICE_TIMEOUTMETADATA_BASE_URLrerrrrrrrrr)r)r)r*rf s"  %rfc@sXeZdZdZddddgZddZdd d Zdd d Zd dZddZ ddZ ddZ dS)InstanceMetadataFetcherz*latest/meta-data/iam/security-credentials/ AccessKeyIdSecretAccessKeyToken Expirationc Cszp|}||}|||}||rL||d|d|d|ddWSd|krhd|krhtd|iWSWnR|jk rtd |jYn0tk r}ztd |j W5d}~XYnXiS) Nrrrr) role_nameZ access_keyZ secret_keyrZ expiry_timeCodeMessagez7Error response received when retrievingcredentials: %s.z\Max number of attempts exceeded (%s) when attempting to retrieve data from metadata service.zBad IMDS request: %s) r _get_iam_role_get_credentials_contains_all_credential_fieldsrrrrkr_rb)rdrr credentialsrr)r)r*retrieve_iam_role_credentialss.     z5InstanceMetadataFetcher.retrieve_iam_role_credentialsNcCs|j|j|j|djSN)rrr)r _URL_PATH_needs_retry_for_role_namer)rdrr)r)r*rs z%InstanceMetadataFetcher._get_iam_rolecCs$|j|j||j|d}t|jSr)rr_needs_retry_for_credentialsrloadsr)rdrrrr)r)r*rs z(InstanceMetadataFetcher._get_credentialscCs:zt|jWdStk r4||dYdSXdS)NFz invalid jsonT)rrr ValueErrorrrr)r)r*_is_invalid_jsons   z(InstanceMetadataFetcher._is_invalid_jsoncCs||p||Sr`rrr)r)r*rs z2InstanceMetadataFetcher._needs_retry_for_role_namecCs||p||p||Sr`)rrrrr)r)r*rs  z4InstanceMetadataFetcher._needs_retry_for_credentialscCs*|jD]}||krtd|dSqdS)Nz3Retrieved credentials is missing required field: %sFT)_REQUIRED_CREDENTIAL_FIELDSrr)rdrfieldr)r)r*rs z7InstanceMetadataFetcher._contains_all_credential_fields)N)N) r[r\r]rrrrrrrrrr)r)r)r*rs"  rFcCs|D]}t||trH||kr:||kr:t||||q||||<qt||tr|r||krt||tr||||q||||<q||||<qdS)zGiven two dict, merge the second dict into the first. The dicts can have arbitrary nesting. :param append_lists: If true, instead of clobbering a list with the new value, append all of the new values onto the original list. N)r%dict merge_dictslistextend)Zdict1Zdict2Z append_listskeyr)r)r*rsrcCs"i}|D]}||||<q|S)zECopies the given dictionary ensuring all keys are lowercase strings. )r')originalrorr)r)r*lowercase_dictsrc CsVz2|| }|}t|W5QRWSQRXWntk rPt|dYnXdS)Nr>)readparse_key_val_file_contentsOSErrorr)filename_openfcontentsr)r)r*parse_key_val_files   rcCsHi}|D]6}d|krq |dd\}}|}|}|||<q |S)N=r) splitlinesrCstrip)rfinallinerr(r)r)r*r s  rcCs~g}t|dr|}n|}|D]R\}}t|trX|D]}|dt|t|fq6q |dt|t|fq d|S)afUrlencode a dict or list into a string. This is similar to urllib.urlencode except that: * It uses quote, and not quote_plus * It has a default list of safe chars that don't need to be encoded, which matches what AWS services expect. If any value in the input ``mapping`` is a list type, then each list element wil be serialized. This is the equivalent to ``urlencode``'s ``doseq=True`` argument. This function should be preferred over the stdlib ``urlencode()`` function. :param mapping: Either a dict to urlencode or a list of ``(key, value)`` pairs. itemsz%s=%s&)r0rr%rrEpercent_encoderF)mappingsafeZ encoded_pairspairsrrVelementr)r)r*percent_encode_sequences        rcCs>t|tjtjfst|}t|tjs2|d}t||dS)aUrlencodes a string. Whereas percent_encode_sequence handles taking a dict/sequence and producing a percent encoded string, this function deals only with taking a string (not a dict/sequence) and percent encoding it. If given the binary type, will simply URL encode it. If given the text type, will produce the binary type by UTF-8 encoding the text. If given something else, will convert it to the text type first. utf-8)r)r%r binary_type text_typeencoder)Z input_strrr)r)r*r?s    rc Cst|ttfrtj||Sztjt||WSttfk rLYnXztjj |dt idWSttfk r}ztd||fW5d}~XYnXdS)z.Parse timestamp with pluggable tzinfo options.GMT)ZtzinfoszInvalid timestamp "%s": %sN) r%intfloatdatetime fromtimestamp TypeErrorrdateutilparserparserrVtzinforr)r)r*_parse_timestamp_with_tzinfoTsrc CsbtD]J}zt||WStk rN}ztjd|j|dW5d}~XYqXqtd|dS)zParse a timestamp into a datetime object. Supported formats: * iso8601 * rfc822 * epoch (value is an integer) This will return a ``datetime.datetime`` object. z2Unable to parse timestamp with "%s" timezone info.rNz4Unable to calculate correct timezone offset for "%s")r rrrrr[ RuntimeErrorrr)r)r*parse_timestampgs rcCsDt|tjr|}nt|}|jdkr4|jtd}n |t}|S)aConverted the passed in value to a datetime object with tzinfo. This function can be used to normalize all timestamp inputs. This function accepts a number of different types of inputs, but will always return a datetime.datetime object with time zone information. The input param ``value`` can be one of several types: * A datetime object (both naive and aware) * An integer representing the epoch time (can also be a string of the integer, i.e '0', instead of 0). The epoch time is considered to be UTC. * An iso8601 formatted timestamp. This does not need to be a complete timestamp, it can contain just the date portion without the time component. The returned value will be a datetime object that will have tzinfo. If no timezone info was provided in the input value, then UTC is assumed, not local time. Nr)r%rrrr7r astimezone)rVZ datetime_objr)r)r*parse_to_aware_datetime}s   rcCs~tddd}|jdkr2|dkr&t}|j|d}|jdd||}t|dr\|S|j|j|j ddddS) awCalculate the timestamp based on the given datetime instance. :type dt: datetime :param dt: A datetime object to be converted into timestamp :type default_timezone: tzinfo :param default_timezone: If it is provided as None, we treat it as tzutc(). But it is only used when dt is a naive datetime. :returns: The timestamp rNr total_secondsii@B) rrrr7 utcoffsetr0r microsecondssecondsdays)dtZdefault_timezoneepochdr)r)r*datetime2timestamps    rcsBt}tfdddD]}||q|r6|S|SdS)aCalculate a sha256 checksum. This method will calculate the sha256 checksum of a file like object. Note that this method will iterate through the entire file contents. The caller is responsible for ensuring the proper starting position of the file and ``seek()``'ing the file back to its starting location if other consumers need to read from the file like object. :param body: Any file like object. The file must be opened in binary mode such that a ``.read()`` call returns bytes. :param as_hex: If True, then the hex digest is returned. If False, then the digest (as binary bytes) is returned. :returns: The sha256 checksum cs dSNrr)bodyr)r*z"calculate_sha256..rN)hashlibsha256iterupdate hexdigestdigest)rZas_hexZchecksumchunkr)rr*calculate_sha256s  rcsg}dtj}tfdddD]}|||q"|sJ|dSt|dkrg}t|D]2\}}|dk r||||qb||qb|}qJt |d dS) a\Calculate a tree hash checksum. For more information see: http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html :param body: Any file like object. This has the same constraints as the ``body`` param in calculate_sha256 :rtype: str :returns: The hex version of the calculated tree hash rcs Sr`rr)rZrequired_chunk_sizer)r*rrz%calculate_tree_hash..rrNrascii) rrrrErrrS _in_pairsbinasciihexlifydecode)rchunksrrZ new_chunksrIsecondr)rr*calculate_tree_hashs   rcCst|}t||Sr`)rr)iterableZ shared_iterr)r)r*r s r c@s eZdZdZddZddZdS)CachedPropertyzA read only property that caches the initially computed value. This descriptor will only call the provided ``fget`` function once. Subsequent access to this property will return the cached value. cCs ||_dSr`)_fget)rdfgetr)r)r*reszCachedProperty.__init__cCs,|dkr |S||}||j|jj<|SdSr`)r__dict__r[)rdobjclsZcomputed_valuer)r)r*__get__s  zCachedProperty.__get__N)r[r\r]r^rerr)r)r)r*r src@sDeZdZdZdddZddZddd Zd d Zd d ZddZ dS)ArgumentGeneratoraGenerate sample input based on a shape model. This class contains a ``generate_skeleton`` method that will take an input/output shape (created from ``botocore.model``) and generate a sample dictionary corresponding to the input/output shape. The specific values used are place holder values. For strings either an empty string or the member name can be used, for numbers 0 or 0.0 is used. The intended usage of this class is to generate the *shape* of the input structure. This can be useful for operations that have complex input shapes. This allows a user to just fill in the necessary data instead of worrying about the specific structure of the input arguments. Example usage:: s = botocore.session.get_session() ddb = s.get_service_model('dynamodb') arg_gen = ArgumentGenerator() sample_input = arg_gen.generate_skeleton( ddb.operation_model('CreateTable').input_shape) print("Sample input for dynamodb.CreateTable: %s" % sample_input) FcCs ||_dSr`)_use_member_names)rdZuse_member_namesr)r)r*re:szArgumentGenerator.__init__cCsg}|||S)zGenerate a sample input. :type shape: ``botocore.model.Shape`` :param shape: The input shape. :return: The generated skeleton input corresponding to the provided input shape. )_generate_skeleton)rdr3stackr)r)r*generate_skeleton=s z#ArgumentGenerator.generate_skeletonr5cCs||jz|jdkr(|||WS|jdkrB|||WS|jdkr\|||WS|jdkr|jrt|W|S|jrt |jWfSW`dS|jdkrWNdS|jdkrWc Ks`|jddrd}zt|||Wn8tk rZ}z|jd}td|W5d}~XYnXdS)ar This handler looks at S3 requests just before they are signed. If there is a bucket name on the path (true for everything except ListAllBuckets) it checks to see if that bucket name conforms to the DNS naming conventions. If it does, it alters the request to use ``virtual hosting`` style addressing rather than ``path-style`` addressing. Zuse_global_endpointFzs3.amazonaws.comr<z2Not changing URI, bucket is not DNS compatible: %sN)contextr1switch_to_virtual_host_stylerkwargsrr)rbsignature_version region_namedefault_endpoint_urlrArr<r)r)r* fix_s3_hosts  rEc Ks|jdk rdSt|r$tddSt|j}|j|_|jd}|dkrP|j}t |dkr|d}|sndStd|jt |rt |dkr|jddkr|jd7_| |d |pd}|}|d|} |j | ||jd f} t| } | |_td | n t|d dS) a) This is a handler to force virtual host style s3 addressing no matter the signature version (which is taken in consideration for the default case). If the bucket is not DNS compatible an InvalidDNSName is thrown. :param request: A AWSRequest object that is about to be sent. :param signature_version: The signature version to sign with :param default_endpoint_url: The endpoint to use when switching to a virtual style. If None is supplied, the virtual host will be constructed from the url of the request. NzKRequest is GetBucketLocation operation, not checking for DNS compatibility.r<rz*Checking for DNS compatible bucket for: %srBrAr5zURI updated to: %s)r<)Z auth_path_is_get_bucket_location_requestrrrrGr?rCnetlocrSr>removerFschemequeryrr) rbrBrDrAr5 path_partsr<r?Zglobal_endpointhostZ new_tupleZnew_urir)r)r*r@sB        r@cCs |jdS)Nz ?location)rGendswithrar)r)r*rGsrGcs"jtfdd}|S)aMethod decorator for caching method calls to a single instance. **This is not a general purpose caching decorator.** In order to use this, you *must* provide an ``_instance_cache`` attribute on the instance. This decorator is used to cache method calls. The cache is only scoped to a single instance though such that multiple instances will maintain their own cache. In order to keep things simple, this decorator requires that you provide an ``_instance_cache`` attribute on your instance. cs\|f}|r&tt|}||f}|j|}|dk r>|S|f||}||j|<|Sr`)tuplesortedrZ_instance_cacher1)rdargsrA cache_keyZ kwarg_itemsresultfunc func_namer)r* _cache_guards   z$instance_cache.._cache_guard)r[ functoolswraps)rUrWr)rTr*instance_caches rZcKsht|jjd}dd|D}d}t|dkrB|d|d7}|d7}|dkrVdSt||d d dS) z?Switches the current s3 endpoint with an S3 Accelerate endpointrAcSsg|]}|tkr|qSr)S3_ACCELERATE_WHITELIST.0pr)r)r* 0sz-switch_host_s3_accelerate..zhttps://s3-accelerate.r amazonaws.com)Z ListBuckets CreateBucketZ DeleteBucketNF)use_new_scheme)rrGrHrCrSrF _switch_hosts)rbZoperation_namerAr5endpointr)r)r*switch_host_s3_accelerate(s rfcCs2t|jd}||r.||}t||dS)zBSwitches the host using a parameter value from a JSON request bodyrN)rrdatar r1rd)rb param_nameZ request_json new_endpointr)r)r*switch_host_with_param;s rjcCst|j||}||_dSr`)_get_new_endpointrG)rbrircfinal_endpointr)r)r*rdCs rdcCsRt|}t|}|j}|r |j}||j|j|jdf}t|}td||f|SNr5zUpdating URI from %s to %s)rrJrHr?rKrrr)Zoriginal_endpointrircZnew_endpoint_componentsZoriginal_endpoint_componentsrJZfinal_endpoint_componentsrlr)r)r*rkIs"rkcCsR|D]H}||kr@t||tr@t||tr@t||||q||||<qdS)zDeeply two dictionaries, overriding existing keys in the base. :param base: The base dictionary which will be merged into. :param extra: The dictionary to merge into the base. Keys from this dictionary will take precedence. N)r%r deep_merge)baseextrarr)r)r*rn\s rncCs|ddS)zcTranslate the form used for event emitters. :param service_id: The service_id to convert.  -)r7r')Z service_idr)r)r*hyphenize_service_idnsrsc@sHeZdZdddZdddZddZdd Zd d Zd d ZddZ dS)S3RegionRedirectorNcCs,||_||_|jdkri|_t||_dSr`)_endpoint_resolver_cacheweakrefproxy_client)rdZendpoint_bridgeclientcacher)r)r*rews  zS3RegionRedirector.__init__cCs<|p |jjj}|d|j|d|j|d|jdS)Nzneeds-retry.s3zbefore-call.s3before-parameter-build.s3)rymetar#registerredirect_from_errorset_request_urlredirect_from_cache)rd event_emitterZemitterr)r)r*r~s zS3RegionRedirector.registercKs|dkr dS||dir,tddS|didrLtddS|ddi}|d}|dd i}|d ko|jd k}|d ko|jd kod |dik} |dkod|k} |ddk o|djdk} |dk} t|| | | | gsdS|ddd} |dd}|| |}|dkr.+)$rbNcCs||_|dkrt|_dSr`) _arn_parserr)rdZ arn_parserr)r)r*re3szS3ArnParamHandler.__init__cCs|d|jdS)Nr|)r~ handle_arnrdrr)r)r*r~8szS3ArnParamHandler.registercKsD|j|jkrdS||}|dkr&dS|ddkr@||||dS)N resource_type accesspoint)r:_BLACKLISTED_OPERATIONS"_get_arn_details_from_bucket_param_store_accesspoint)rdrmodelr?rA arn_detailsr)r)r*r;s   zS3ArnParamHandler.handle_arncCsHd|krDz&|d}|j|}||||WStk rBYnXdS)Nr)rr_add_resource_type_and_namer)rdrrrr)r)r*rDs  z4S3ArnParamHandler._get_arn_details_from_bucket_paramcCs:|j|d}|r,d|d<|d|d<n t|ddS)Nrrr resource_name)r)_ACCESSPOINT_RESOURCE_REGEXr3groupr)rdrrr3r)r)r*rOs z-S3ArnParamHandler._add_resource_type_and_namecCs2|d|d<|d|d|d|dd|d<dS)Nrrrrr)r:rrrrr))rdrr?rr)r)r*rWs  z$S3ArnParamHandler._store_accesspoint)N) r[r\r]r8r1rrrer~rrrrr)r)r)r*r+s   rc@seZdZdZdZd ddZddZdd Zd d Zd d Z ddZ ddZ ddZ ddZ ddZddZeddZeddZeddZdS)!S3EndpointSetterZawsraNcCs@||_||_||_|dkr i|_||_||_|dkr<|j|_dSr`)ru_region _s3_config _endpoint_url _partition_DEFAULT_PARTITION)rdZendpoint_resolverrZ s3_configr4rr)r)r*relszS3EndpointSetter.__init__cCs|d|jdS)Nzbefore-sign.s3)r~ set_endpointrr)r)r*r~xszS3EndpointSetter.registercKsd||r.||||}|||dS|jrFtfd|i||jr`|jfd|i|dS)Nrb)_use_accesspoint_endpoint_validate_accesspoint_supported(_resolve_region_for_accesspoint_endpoint_switch_to_accesspoint_endpoint_use_accelerate_endpointrf_s3_addressing_handler)rdrbrArCr)r)r*r{s   zS3EndpointSetter.set_endpointcCs d|jkSrr?rcr)r)r*rsz*S3EndpointSetter._use_accesspoint_endpointcCsP|jrtdd|jr tdd|jdd}||jkrLtd|j|fddS)NzNClient cannot use a custom "endpoint_url" when specifying an access-point ARN.)msgz[Client does not support s3 accelerate configuration when and access-point ARN is specified.rrzClient is configured for "%s" partition, but access-point ARN provided is for "%s" partition. The client and access-point partition must be the same.)rrrr?r)rdrbZrequest_partionr)r)r*rs" z0S3EndpointSetter._validate_accesspoint_supportedcCs2|jddr,|jdd}||||S|jS)NZuse_arn_regionTrr)rr1r?_override_signing_regionr)rdrbZaccesspoint_regionr)r)r*rs  z9S3EndpointSetter._resolve_region_for_accesspoint_endpointcCsTt|j}t|j||j|||j|j|jdf}t d|j|f||_dSrm) rrGrrJ_get_accesspoint_netlocr?_get_accesspoint_pathr?rKrr)rdrbrCZoriginal_componentsZaccesspoint_endpointr)r)r*rs   z0S3EndpointSetter._switch_to_accesspoint_endpointcCsT|d}d|d|dfdg}|jdr6|d||||gd|S) Nrz%s-%sr:rzs3-accesspointZuse_dualstack_endpointrrA)rr1rEr_get_dns_suffixrF)rdrequest_contextrCrZaccesspoint_netloc_componentsr)r)r*rs  z(S3EndpointSetter._get_accesspoint_netloccCs"|dd}|d|ddp dS)Nrr:r<r5r)r7)rdZ original_pathrr:r)r)r*rs z&S3EndpointSetter._get_accesspoint_pathcCs,|jd|}|j}|r(d|kr(|d}|S)NrZ dnsSuffix)ruZconstruct_endpoint_DEFAULT_DNS_SUFFIX)rdrCZresolvedZ dns_suffixr)r)r*rs z S3EndpointSetter._get_dns_suffixcCsd|i}||jd<dS)Nrrr)rdrbrCrr)r)r*rsz)S3EndpointSetter._override_signing_regioncCs|jdrdS|jdkrdSt|jj}|ds8dS|d}|ddkrRdS|dd }t|tt|krvdSt d d |DS) NZuse_accelerate_endpointTFrarArz s3-acceleratercss|]}|tkVqdSr`r[r]r)r)r* sz.) rr1rrrHrNrCrSsetall)rdrHr5Z feature_partsr)r)r*rs       z)S3EndpointSetter._use_accelerate_endpointcCs"|jr dS|jd}|r|SdS)NvirtualZaddressing_style)rrr1)rdZconfigured_addressing_styler)r)r*_addressing_style s  z"S3EndpointSetter._addressing_stylecCsH|jdkrtdtS|jdks,|jdk r:tddStdtS)Nrz'Using S3 virtual host style addressing.r?zUsing S3 path style addressing.zSDefaulting to S3 virtual host style addressing with path style addressing fallback.)rrrr@rrErr)r)r*rs    z'S3EndpointSetter._s3_addressing_handler)NNNN)r[r\r]rrrer~rrrrrrrrrrrrrr)r)r)r*rhs.      " rc@sreZdZdZdZdZdZeddgZdej fdd Z dd d Z d d Z ddZ ddZdddZddZddZdS)ContainerMetadataFetcherrFr8rz 169.254.170.2 localhostz 127.0.0.1NcCs(|dkrtjj|jd}||_||_dS)N)rh)rrrsrtTIMEOUT_SECONDSrv_sleep)rdsessionsleepr)r)r*re6s z!ContainerMetadataFetcher.__init__cCs|||||S)zRetrieve JSON metadata from container metadata. :type full_url: str :param full_url: The full URL of the metadata service. This should include the scheme as well, e.g "http://localhost:123/foo" )_validate_allowed_url_retrieve_credentials)rdfull_urlr}r)r)r*retrieve_full_uri>s z*ContainerMetadataFetcher.retrieve_full_uricCs:tj|}||j}|s6td|jd|jfdS)NzGUnsupported host '%s'. Can only retrieve metadata from these hosts: %sz, )rrcompatr _check_if_whitelisted_hostr0rrF_ALLOWED_HOSTS)rdrparsedZis_whitelisted_hostr)r)r*rJs z.ContainerMetadataFetcher._validate_allowed_urlcCs||jkrdSdS)NTF)r)rdrMr)r)r*rTs z3ContainerMetadataFetcher._check_if_whitelisted_hostcCs||}||S)zRetrieve JSON metadata from ECS metadata. :type relative_uri: str :param relative_uri: A relative URI, e.g "/foo/bar?id=123" :return: The parsed JSON response. )rr)rd relative_urirr)r)r* retrieve_uriYs z%ContainerMetadataFetcher.retrieve_uric Csddi}|dk r||d}z||||jWStk r}z4tjd|dd||j|d7}||jkrrW5d}~XYqXqdS)NAcceptzapplication/jsonrzAReceived error when attempting to retrieve container metadata: %sTrr) r _get_responserrrrr SLEEP_TIMERETRY_ATTEMPTS)rdr extra_headersr}attemptsrr)r)r*res&   z.ContainerMetadataFetcher._retrieve_credentialsc Csztjj}|d||d}|j|}|jd}|jdkrRt d|j|fdzt |WWSt k rd}t d||t |dYnXWn4tk r} zd | }t |dW5d} ~ XYnXdS) Nrr{rr~z4Received non 200 response (%s) from ECS metadata: %s error_msgz8Unable to parse JSON returned from ECS metadata servicesz%s:%sz;Received error when attempting to retrieve ECS metadata: %s)rrrrrvrrrr rrrrrrrr) rdrr}rhrrbrZ response_textrrr)r)r*rvs.  z&ContainerMetadataFetcher._get_responsecCsd|j|fS)Nz http://%s%s) IP_ADDRESS)rdrr)r)r*rsz!ContainerMetadataFetcher.full_url)N)N)r[r\r]rrrrrtimerrerrrrrrrr)r)r)r*r.s    rcCst|r iStSdSr`)should_bypass_proxiesrrGr)r)r*rusruc Cs8ztt|jrWdSWnttjfk r2YnXdS)z: Returns whether we should bypass proxies or not. TF)rr rHrsocketgaierrorrr)r)r*rs  r ISO-8859-1cCsF|d}|sdSt|\}}d|kr6|ddSd|krB|SdS)zReturns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. :param default: default encoding if the content-type is text z content-typeNcharsetz'"r)r1cgi parse_headerr)r}default content_typerr)r)r*get_encoding_from_headerss rcKs0t|ttfrt|}nt|}t|dS)Nr )r%bytes bytearray_calculate_md5_from_bytes_calculate_md5_from_filebase64 b64encoder )rrAZ binary_md5r)r)r* calculate_md5s rcCst|}|Sr`)r r)Z body_bytesmd5r)r)r*rsrcsB}t}tfdddD]}||q ||S)Ncs dSrrr)fileobjr)r*rrz*_calculate_md5_from_file..r)tellr rrseekr)rZstart_positionrrr)rr*rs   rcKs<|d}|d}tr8|r8d|kr8t|f|}||dd<dS)z1Only add a Content-MD5 if the system supports it.r}rz Content-MD5N)r r)rrAr}rZ md5_digestr)r)r*conditionally_calculate_md5s  rc@s eZdZefddZddZdS)FileWebIdentityTokenLoadercCs||_||_dSr`)_web_identity_token_pathr)rdZweb_identity_token_pathrr)r)r*resz#FileWebIdentityTokenLoader.__init__c Cs,||j}|W5QRSQRXdSr`)rrr)rdZ token_filer)r)r*__call__sz#FileWebIdentityTokenLoader.__call__N)r[r\r]openrerr)r)r)r*rs rc@s&eZdZdddZddZddZdS) SSOTokenLoaderNcCs|dkr i}||_dSr`)rv)rdr{r)r)r*reszSSOTokenLoader.__init__cCst|dS)Nr)rsha1rr)rd start_urlr)r)r*_generate_cache_keysz"SSOTokenLoader._generate_cache_keycCsT||}z|j|}|dWStk rNtjdddd}t|dYnXdS)NZ accessTokenzFailed to load SSO token:Trz@The SSO access token has either expired or is otherwise invalid.r)rrvKeyErrorrrr)rdrrRrrr)r)r*rs   zSSOTokenLoader.__call__)N)r[r\r]rerrr)r)r)r*rs r)T)F)N)F)N)N)T)T)r)trr8rloggingrrr rXrwr'rmrrZdateutil.parserrZ dateutil.tzrrrZbotocore.awsrequestZbotocore.httpsessionZbotocore.compatrrrrrrr r r r r Z*botocore.vendored.six.moves.urllib.requestrrZbotocore.exceptionsrrrrrrrrrrrr getLoggerr[rrrZ SAFE_CHARSr1r:rr\Z EVENT_ALIASESr+r4r;r@r=rQrT ExceptionrZr_objectrfrrrrrrrrrrrrrrr rrr7r>rEr@rGrZrfrjrdrkrnrsrtrrrrrrrurrrrrrrrr)r)r)r* sD 48  M  !~V   $ -  !`  ?   !=Gd