import boto3 import os import botocore import sys def lambda_handler(event, context): print(event) bucket_name = event['Records'][0]['s3']['bucket']['name'] print(bucket_name) key_name = event['Records'][0]['s3']['object']['key'] print(key_name) # Handle http colon error key_object_name=os.path.basename(key_name).replace('%3A',':') mod_key_name=os.path.join(os.path.dirname(key_name), key_object_name) copy_source = os.path.join(bucket_name,mod_key_name) print(copy_source) # Change the new_bucket_name to the bucket you created as data lake stage new_bucket_name = 'appflow-sfdev-data-lake' new_key_name = 'account/account.json' s3_resource = boto3.resource('s3') s3_resource.Object(new_bucket_name, new_key_name).copy_from(CopySource=copy_source) # Copy to data lake stage try: s3_resource.Object(new_bucket_name, new_key_name).copy_from(CopySource=copy_source) except botocore.exceptions.ClientError as e: print('{}:{}'.format(e.response['Error']['Code'],e.response['Error']['Message'])) sys.exit(1) # Delete the source to prevent redundancy try: s3_resource.Object(bucket_name,mod_key_name).delete() except botocore.exceptions.ClientError as e: print('{}:{}'.format(e.response['Error']['Code'],e.response['Error']['Message'])) sys.exit(2)