#python deploy function - copy files from github source to customers s3 bucket #note, only supporting region us-east-1 from crhelper import CfnResource import urllib.request import shutil import boto3 from botocore.exceptions import ClientError import mimetypes import json from pathlib import Path import sys import re helper = CfnResource() #create s3 client interface: s3 = boto3.client('s3') amazonConnectInstanceAliasToReplace = "myamazonconnect" tmpfolder = "/tmp/" #note: "/tmp/" for production def DoInstanceAliasSubstitution(relativePathAndFileName, amazonConnectInstanceAlias): print("DoInstanceAliasSubstitution called for file: " + tmpfolder + relativePathAndFileName + ". doing instance name substitution...") print("looking for: " + amazonConnectInstanceAliasToReplace + " to replace with: " + amazonConnectInstanceAlias) # 1. read all file lines into an array with open(tmpfolder + relativePathAndFileName, mode="r", encoding="utf-8") as f: lines = f.readlines() print("read in " + str(len(lines)) + " lines") # 2. do a search and replace in it for # global static amazonConnectInstanceAliasToReplace and replace it with the value in amazonConnectInstanceAlias lines2 = [] for line in lines: if amazonConnectInstanceAliasToReplace.upper() in line.upper(): print("found a line containing it") pattern = re.compile(amazonConnectInstanceAliasToReplace, re.IGNORECASE) tmpline = pattern.sub(amazonConnectInstanceAlias, line) lines2.append(tmpline) else: lines2.append(line) print("post processed line count: " + str(len(lines2))) print("lets write them back to: " + tmpfolder + relativePathAndFileName) # 3. write all the file lines back to /tmp/index.html try: with open(tmpfolder + relativePathAndFileName, mode="w", encoding="utf-8") as f: f.writelines(lines2) except: print("ERROR: when attempting bucket create with instance alias substitution, error: ") def dodeployfile(bucketNameTemp, instanceAliasTemp, key, url): print('Key: ' + key) print('Url: ' + url) print('') try: #lets get the relative folder name we need to write to and create it under /tmp pos = key.rfind("/"); #note: limitation here for now, will only support file downloads 1 folder deep! TODO fix later if pos > 0: subfolder = key[0:pos] if len(subfolder) > 0: #make the folder p = Path(tmpfolder + subfolder) if not(p.exists()): p.mkdir() #lets try downloading 1 file: "https://raw.githubusercontent.com/plantronics/pdc/master/Amazon%20Connect%20Sample/index.html" # Download the file from `url` and save it locally under `file_name`: with urllib.request.urlopen(url) as response, open(tmpfolder+key, 'wb') as out_file: shutil.copyfileobj(response, out_file) if (key == "index.html"): DoInstanceAliasSubstitution(key, instanceAliasTemp) #now copy to bucket content_type = mimetypes.guess_type(tmpfolder+key)[0] s3.upload_file( tmpfolder+key, bucketNameTemp, key, ExtraArgs={'ACL': 'public-read', 'ContentType': content_type} ) except: print("An exception occurred downloading file: " + key + " from bucket: aws-quickstart") def dodeletefile(bucketNameTemp, key, url): print('Key: ' + key) print('Url: ' + url) print('') try: s3.delete_object(Bucket=bucketNameTemp, Key=key) #lets get the relative folder name we need to write to and create it under /tmp pos = key.rfind("/"); #note: limitation here for now, will only support file downloads 1 folder deep! TODO fix later if pos > 0: subfolder = key[0:pos] if len(subfolder) > 0: #delete the folder s3.delete_object(Bucket=bucketNameTemp, Key=subfolder) except: print("An exception occurred deleting file: " + key + " from bucket: " + bucketNameTemp) type, value, traceback = sys.exc_info() print('Error opening %s: %s' % (value.filename, value.strerror)) @helper.create @helper.update def deploy_files(event, _): print('deploy_files (create/updated) lambda function was invoked.') bucketNameTemp = event['ResourceProperties']['BUCKET_NAME'] instanceAliasTemp = event['ResourceProperties']['AMAZON_CONNECT_INSTANCE_ALIAS'] regionNameTemp = 'us-east-1' print("bucketNameTemp = " + bucketNameTemp) print("instanceAliasTemp = " + instanceAliasTemp) print("regionNameTemp = " + regionNameTemp) #if bucketname has full domain, just chop out the prefix: polystack13-s3bucket-11zoq3n2g7132.s3.amazonaws.com pos = bucketNameTemp.find("."); if pos > 0: bucketNameTemp = bucketNameTemp[0:pos] print("bucketNameTemp (prefix) = " + bucketNameTemp) #load the deploy file list s3.download_file( "aws-quickstart", "deployfilelist.json", tmpfolder + "deployfilelist.json") print('deploying to : ' + bucketNameTemp) print('substituting alias : ' + instanceAliasTemp) numFiles = 0 #open and iterate the json data in deploy file list with open(tmpfolder + "deployfilelist.json") as json_file: data = json.load(json_file) for deployfile in data["deployfilelist"]: for key, url in deployfile.items(): dodeployfile(bucketNameTemp, instanceAliasTemp, key, url) numFiles = numFiles + 1 helper.Data['NumberOfFilesDeployed'] = numFiles @helper.delete def delete_files(event, _): #todo: make sure you delete all your s3 files here so cloudformation stack can delete print('delete_files (delete) lambda function was invoked.') bucketNameTemp = event['ResourceProperties']['BUCKET_NAME'] instanceAliasTemp = event['ResourceProperties']['AMAZON_CONNECT_INSTANCE_ALIAS'] regionNameTemp = 'us-east-1' print("bucketNameTemp = " + bucketNameTemp) print("instanceAliasTemp = " + instanceAliasTemp) print("regionNameTemp = " + regionNameTemp) #if bucketname has full domain, just chop out the prefix: polystack13-s3bucket-11zoq3n2g7132.s3.amazonaws.com pos = bucketNameTemp.find("."); if pos > 0: bucketNameTemp = bucketNameTemp[0:pos] print("bucketNameTemp (prefix) = " + bucketNameTemp) #load the deploy file list s3.download_file( "aws-quickstart", "deployfilelist.json", tmpfolder + "deployfilelist.json") print('deleting files in : ' + bucketNameTemp) numFiles = 0 #open and iterate the json data in deploy file list with open(tmpfolder + "deployfilelist.json") as json_file: data = json.load(json_file) for deployfile in data["deployfilelist"]: for key, url in deployfile.items(): dodeletefile(bucketNameTemp, key, url) numFiles = numFiles + 1 helper.Data['NumberOfFilesDeployed'] = numFiles def handler(event, context): helper(event, context)