{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "QuickSight Batch Deployment\n", "\n", "Author: Ying Wang (Sr.Data Visualization Engineer in ProServe GSP)\n", "Creation Date: April 16 2020\n", "\n", "Author: Vamsi Bhadriraju (Data Architect in ProServe) \n", "Revision Date : January 12 2021\n", "Revision Date : May 27 2021\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "!pip install --upgrade pip\n", "!pip install --upgrade boto3\n", "get_ipython().system('pip install --upgrade ipynb')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import boto3\n", "import json\n", "import time\n", "from IPython.display import JSON\n", "import sys\n", "import ipynb.fs \n", "import logging\n", "from typing import Any, Dict, List, Optional\n", "from datetime import datetime\n", "\n", "# current date and time\n", "now = str(datetime.now().strftime(\"%m-%d-%Y_%H_%M\"))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from ipynb.fs.defs.Functions import data_sources\n", "from ipynb.fs.defs.Functions import describe_source \n", "from ipynb.fs.defs.Functions import delete_source\n", "from ipynb.fs.defs.Functions import create_data_source\n", "from ipynb.fs.defs.Functions import get_datasource_name\n", "from ipynb.fs.defs.Functions import get_datasource_ids\n", "from ipynb.fs.defs.Functions import update_data_source_permissions\n", "\n", "from ipynb.fs.defs.Functions import get_dataset_name\n", "from ipynb.fs.defs.Functions import data_sets\n", "from ipynb.fs.defs.Functions import describe_dataset\n", "from ipynb.fs.defs.Functions import get_dataset_ids\n", "from ipynb.fs.defs.Functions import delete_dataset \n", "from ipynb.fs.defs.Functions import create_dataset\n", "from ipynb.fs.defs.Functions import update_dataset\n", "from ipynb.fs.defs.Functions import update_data_set_permissions\n", "\n", "from ipynb.fs.defs.Functions import get_target\n", "\n", "from ipynb.fs.defs.Functions import templates\n", "from ipynb.fs.defs.Functions import delete_template\n", "from ipynb.fs.defs.Functions import update_template_permission \n", "from ipynb.fs.defs.Functions import copy_template\n", "from ipynb.fs.defs.Functions import describe_template\n", "from ipynb.fs.defs.Functions import create_template \n", "\n", "from ipynb.fs.defs.Functions import dashboards\n", "from ipynb.fs.defs.Functions import describe_dashboard\n", "from ipynb.fs.defs.Functions import create_dashboard \n", "from ipynb.fs.defs.Functions import delete_dashboard\n", "from ipynb.fs.defs.Functions import update_dashboard \n", "from ipynb.fs.defs.Functions import get_dashboard_ids\n", "from ipynb.fs.defs.Functions import get_dashboard_name\n", "\n", "from ipynb.fs.defs.Functions import themes\n", "from ipynb.fs.defs.Functions import describe_theme\n", "from ipynb.fs.defs.Functions import delete_theme\n", "from ipynb.fs.defs.Functions import create_theme\n", "from ipynb.fs.defs.Functions import update_theme\n", "from ipynb.fs.defs.Functions import describe_theme_permissions\n", "from ipynb.fs.defs.Functions import update_theme_permissions\n", "\n", "\n", "from ipynb.fs.defs.Functions import analysis\n", "from ipynb.fs.defs.Functions import describe_analysis\n", "from ipynb.fs.defs.Functions import create_analysis\n", "from ipynb.fs.defs.Functions import delete_analysis\n", "from ipynb.fs.defs.Functions import update_analysis\n", "from ipynb.fs.defs.Functions import get_analysis_ids\n", "from ipynb.fs.defs.Functions import describe_analysis_permissions\n", "\n", "\n", "\n", "#supportive functions\n", "from ipynb.fs.defs.Functions import data_sets_ls_of_dashboard\n", "from ipynb.fs.defs.Functions import data_sources_ls_of_dashboard\n", "from ipynb.fs.defs.Functions import get_data_source_deployment_list\n", "from ipynb.fs.defs.Functions import data_sources_ls_of_analysis\n", "from ipynb.fs.defs.Functions import data_sets_ls_of_analysis\n", "from ipynb.fs.defs.Functions import get_user_arn\n", "from ipynb.fs.defs.Functions import _assume_role\n", "\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Static Profile\n", "\n", "You can configure AWS profile from terminal and call the profile in below cell" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "sourceprofile=''\n", "targetprofile=''\n", "aws_region='us-east-1'\n", "sourcesession = boto3.Session(profile_name=sourceprofile, region_name=aws_region)\n", "targetsession = boto3.Session(profile_name=targetprofile, region_name=aws_region)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Assume Role\n", "\n", "You can also assume an IAM role and create session based on the role permissions" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#source account\n", "sourceaccountid=\n", "role_name=\n", "aws_region='us-east-1'\n", "sourcesession = _assume_role(sourceaccountid, role_name, aws_region)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#target account\n", "targetaccountid=\"\"\n", "role_name=\"\"\n", "aws_region='us-east-1'\n", "targetsession = _assume_role(targetaccountid, role_name, aws_region)\n", "#targetsession = boto3.Session(\n", "# aws_access_key_id=\"\",\n", "# aws_secret_access_key=\"\",\n", "# aws_session_token=\"\",\n", "# region_name=aws_region\n", "# )" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Set root and admin users\n", "\n", "root user is for the template. \n", "By default, we assign full permissions of objects to admin." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def get_user_arn (session, username, region='us-east-1', namespace='default'): \n", " sts_client = session.client(\"sts\")\n", " account_id = sts_client.get_caller_identity()[\"Account\"]\n", " if username=='root':\n", " arn='arn:aws:iam::'+account_id+':'+username\n", " else:\n", " arn=\"arn:aws:quicksight:\"+region+\":\"+account_id+\":user/\"+namespace+\"/\"+username\n", " \n", " return arn\n", " \n", "sourceroot=get_user_arn (sourcesession, 'root')\n", "sourceadmin=get_user_arn (sourcesession, 'Administrator/wangzyn-Isengard')\n", "#sourceversion='1'\n", "\n", "targetroot=get_user_arn (targetsession, 'root')\n", "targetadmin=get_user_arn (targetsession, 'Admin/wangzyn-Isengard')\n", "#targetvpc='arn:aws:quicksight:us-east-1:889399602426:vpcConnection/sg-40b7521a'" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Please define your input parameters in below cell" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "rds='wangzynrds.adrcdferg.us-east-1.rds.amazonaws.com'\n", "redshift={\n", " \"ClusterId\": 'wangzyncluster1',\n", " \"Host\": 'wangzyncluster1.coprq8ycemvc.us-east-1.redshift.amazonaws.com',\n", " \"Database\": 'dev'}\n", "\n", "s3Bucket='spaceneedle-samplefiles.prod.us-east-1'\n", "s3Key='sales/manifest.json'\n", "vpc='sg-40b7521a'\n", "tag=[\n", " {\n", " 'Key': 'test',\n", " 'Value': 'true'\n", " }\n", " ]\n", "owner=targetadmin\n", "rdscredential={\n", " 'CredentialPair': {\n", " 'Username': \"\",\n", " 'Password': \"\"}}\n", "redshiftcredential={\n", " 'CredentialPair': {\n", " 'Username': \"\",\n", " 'Password': \"\"}}\n", "region='us-east-1'\n", "namespace='default'\n", "version='1' \n", "\n", "target=get_target(targetsession, rds,redshift,s3Bucket,s3Key,vpc,tag,owner,rdscredential,redshiftcredential)\n", "\n", "#JSON(target)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#results output location\n", "successlocation = \"Deployment_Results/Successful/\"\n", "faillocation = \"Deployment_Results/Fail/\"\n", "\n", "import os\n", "try:\n", " os.makedirs(successlocation)\n", "except OSError:\n", " print (\"Creation of the directory %s failed\" % successlocation)\n", "else:\n", " print (\"Successfully created the directory %s\" % successlocation)\n", "\n", "try:\n", " os.makedirs(faillocation)\n", "except OSError:\n", " print (\"Creation of the directory %s failed\" % faillocation)\n", "else:\n", " print (\"Successfully created the directory %s\" % faillocation)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Import functions from Functions.ipynb" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "New Account Set Up Sample" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Deploy data sources" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#data_sources function will return all the data sources of a specific AWS Account\n", "#data_sources ('Account ID', profile)\n", "\n", "#new account set up sample:\n", "datasources=data_sources(sourcesession)\n", "\n", "#get data sources which already deployed\n", "targetsources=data_sources(targetsession)\n", "\n", "#already_deployed record the data source ids of target account\n", "already_deployed=[]\n", "for tsource in targetsources:\n", " already_deployed.append(tsource['DataSourceId'])\n", " \n", "newsourceslist=[]\n", "faillist=[]\n", "for i in datasources:\n", " if i['DataSourceId'] not in already_deployed and 'DataSourceParameters' in i:\n", " newdsource=create_data_source (i, targetsession, target)\n", " if 'Error' in newdsource:\n", " faillist.append(newdsource)\n", " \n", " else: newsourceslist.append(newdsource)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "with open(faillocation+now+'_Datasource_Creation_Error.json', \"w\") as f:\n", " json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n", "\n", "faillist2=[]\n", "successfulls=[]\n", "for news in newsourceslist:\n", " datasource=describe_source(targetsession, news['DataSourceId'])\n", "\n", " if datasource['DataSource']['Status']==\"CREATION_FAILED\":\n", " delete_source (targetsession, news['DataSourceId'])\n", " faillist2.append(news['DataSourceId'])\n", " \n", " if datasource['DataSource']['Status']==\"CREATION_SUCCESSFUL\":\n", " successfulls.append(datasource['DataSource'])\n", " \n", " while datasource['DataSource']['Status']==\"CREATION_IN_PROGRESS\":\n", " time.sleep(5)\n", " datasource=describe_source(targetsession, news['DataSourceId'])\n", " if datasource['DataSource']['Status']==\"CREATION_SUCCESSFUL\":\n", " successfulls.append(datasource['DataSource'])\n", " break\n", " elif datasource['DataSource']['Status']==\"CREATION_FAILED\":\n", " delete_source (targetsession, news['DataSourceId'])\n", " faillist2.append(news['DataSourceId'])\n", " \n", "with open(faillocation+now+'_Datasource_Creation_Fail.json', \"w\") as f:\n", " json.dump(faillist2, f, indent=4, sort_keys=True, default=str)\n", "\n", "with open(successlocation+now+'_Datasource_Creation_Success.json', \"w\") as f:\n", " json.dump(successfulls, f, indent=4, sort_keys=True, default=str)\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Delete objects" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# THIS WILL DELETE ALL TARGET DATASETS\n", "\n", "delete = \"datasource\"\n", "\n", "if delete == \"datasource\": \n", " for datasource in data_sources(targetsession):\n", " #if datasource['Type'] == \"REDSHIFT\":\n", " try:\n", " delete_source (targetsession, datasource['DataSourceId'])\n", " except Exception: pass \n", "elif delete == \"dataset\":\n", " for dataset in data_sets(targetsession):\n", " delete_dataset (targetsession, dataset['DataSetId'])\n", "elif delete == \"template\": \n", " for template in templates(targetsession):\n", " delete_template(targetsession, template['TemplateId'])\n", "elif delete == \"analysis\":\n", " for analysis in analysis(targetsession): delete_analysis(targetsession, analysis['AnalysisId'])\n", " \n", "elif delete == \"dashboard\": \n", " for dashboard in dashboards(targetsession):\n", " delete_dashboard(targetsession, dashboard['DashboardId'])\n", "delete =\"don't delete anything\"" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Get datasets list:" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "datasets=data_sets(sourcesession)\n", "#JSON(datasets)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Deploy Datasets" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#get datasets which already deployed\n", "targetds=data_sets(targetsession)\n", "#already_deployed record the datasets ids of target account\n", "already_deployed=[]\n", "for ds in targetds:\n", " already_deployed.append(ds['DataSetId'])\n", "#already_deployed" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "newsetslist=[]\n", "faillist=[]\n", "sts_client = targetsession.client(\"sts\")\n", "account_id = sts_client.get_caller_identity()[\"Account\"]\n", "for i in datasets:\n", " if i['DataSetId'] not in already_deployed:\n", " try:\n", " res=describe_dataset(sourcesession, i['DataSetId'])\n", " except Exception:\n", " faillist.append({\"Dataset\": i, \"Error\": str(Exception)})\n", " continue\n", " #if 's3PhysicalTable' in res['DataSet']['PhysicalTableMap']:\n", " #continue\n", " #else:\n", " #pass\n", "\n", " \n", " name=i['Name'].replace(\" \", \"_\")\n", " datasetid=i['DataSetId']\n", "\n", " PT=res['DataSet']['PhysicalTableMap']\n", " # print(PT)\n", " for key, value in PT.items():\n", " #print(value)\n", " for i,j in value.items():\n", " #print(j)\n", " dsid = j['DataSourceArn'].split(\"/\")[1]\n", " j['DataSourceArn']='arn:aws:quicksight:us-east-1:'+account_id+':datasource/'+dsid\n", "\n", " LT=res['DataSet']['LogicalTableMap']\n", " \n", " try: \n", " newdataset=create_dataset(targetsession, datasetid, name, PT, LT, res['DataSet']['ImportMode'], target['datasetpermission'])\n", " #print(\"new dataset: \", newdataset)\n", " newsetslist.append(newdataset)\n", " except Exception as e:\n", " #print('failed: '+str(e))\n", " faillist.append({\"DataSetId\": datasetid, \"Name\": name, \"Error\": str(e)})\n", " \n", " continue" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#print fail informations\n", "with open(faillocation+now+'Dataset_Creation_Error.json', \"w\") as f:\n", " json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n", "\n", "successfulls=[]\n", "for news in newsetslist:\n", " dataset=describe_dataset(targetsession, news['DataSetId'])\n", " successfulls.append(dataset['DataSet'])\n", " \n", "with open(successlocation+now+'Datasets_Creation_Success.json', \"w\") as f:\n", " json.dump(successfulls, f, indent=4, sort_keys=True, default=str)\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Get Themes List" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "themes_list =themes(sourcesession)\n", "#JSON(datasets)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Deploy Themes" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#get themes which already deployed\n", "targetthemes=themes(targetsession)\n", "#already_deployed record the datasets ids of target account\n", "already_deployed=[]\n", "for th in targetthemes:\n", " already_deployed.append(th['ThemeId'])\n", "#already_deployed" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "newthemeslist=[]\n", "faillist=[]\n", "sts_client = targetsession.client(\"sts\")\n", "account_id = sts_client.get_caller_identity()[\"Account\"]\n", "for i in themes_list:\n", " if i['ThemeId'] not in already_deployed:\n", " try:\n", " res=describe_theme(sourcesession, i['ThemeId'])\n", " except Exception:\n", " faillist.append({\"Theme\": i, \"Error\": str(Exception)})\n", " continue\n", " THEMEID=res['Theme']['ThemeId']\n", " Name=res['Theme']['Name']\n", " BaseThemeId=res['Theme']['Version']['BaseThemeId']\n", " Configuration=res['Theme']['Version']['Configuration']\n", " try: \n", " newtheme=create_theme (targetsession,THEMEID, Name,BaseThemeId,Configuration)\n", " newthemeslist.append(newtheme)\n", " except Exception as e:\n", " #print('failed: '+str(e))\n", " faillist.append({\"ThemeID\": THEMEID, \"Name\": Name, \"Error\": str(e)})\n", " continue\n", " try:\n", " update_theme_permissions(targetsession, THEMEID, targetadmin)\n", " except Exception as e:\n", " #print('failed: '+str(e))\n", " faillist.append({\"ThemeID\": THEMEID, \"Name\": Name, \"Error\": str(e)})\n", " continue" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ " \n", "#print fail informations\n", "with open(faillocation+now+'Themes_Creation_Error.json', \"w\") as f:\n", " json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n", "\n", "successfulls=[]\n", "for news in newthemeslist:\n", " theme=describe_theme(targetsession, news['ThemeId'])\n", " successfulls.append(theme['Theme']['ThemeId'])\n", " \n", "with open(successlocation+now+'Themes_Creation_Success.json', \"w\") as f:\n", " json.dump(successfulls, f, indent=4, sort_keys=True, default=str)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Get Analysis list" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "sourceanalysis_list=analysis(sourcesession)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Deploy Analysis" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "sourceanalysis_all=[]\n", "for i in sourceanalysis_list:\n", " if i['Status']!= 'DELETED':\n", " sourceanalysis_all.append(i)\n", "\n", "success=[]\n", "faillist=[]\n", "sts_client = targetsession.client(\"sts\")\n", "account_id = sts_client.get_caller_identity()[\"Account\"]\n", "for i in sourceanalysis_all:\n", " sourceanalysis=describe_analysis(sourcesession, i['AnalysisId'])\n", " sourceanalysisid=sourceanalysis['Analysis']['AnalysisId']\n", " sourceanalysisArn=sourceanalysis['Analysis']['Arn']\n", " sourceanalysisname=sourceanalysis['Analysis']['Name']\n", " DataSetArns=sourceanalysis['Analysis']['DataSetArns']\n", " sourcetid=sourceanalysisid\n", " sourcetname=sourceanalysisname\n", " targettid=sourcetid\n", " targettname=sourceanalysisname\n", " \n", " TargetThemeArn=''\n", " if 'ThemeArn' in sourceanalysis['Analysis'].keys():\n", " SourceThemeArn=sourceanalysis['Analysis']['ThemeArn']\n", " TargetThemeArn = 'arn:aws:quicksight:'+region+':'+account_id+':theme/'+sourceanalysis['Analysis']['ThemeArn'].split(\"/\")[1]\n", "\n", " sourcedsref = []\n", " for i in DataSetArns:\n", " missing=False\n", " did = i.split(\"/\")[1]\n", " try:\n", " dname=get_dataset_name(did, sourcesession)\n", " except Exception as e:\n", " faillist.append({\"Error Type\": \"Dataset: \"+did+\" is missing!\",\"sourceanalysisid\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n", " missing=True\n", " break\n", " \n", " sourcedsref.append({'DataSetPlaceholder': dname,\n", " 'DataSetArn': i})\n", " if missing: continue\n", " try:\n", " sourcetemplate = create_template(sourcesession, sourcetid, sourcetname, sourcedsref, sourceanalysisArn, '1')\n", " sourcetemplate=describe_template(sourcesession,sourcetid)\n", " except Exception as e:\n", " faillist.append({\"Error Type\": \"Create Source Template Error\",\"sourceanalysisid\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n", "\n", " continue\n", " \n", " while sourcetemplate['Template']['Version']['Status']==\"CREATION_IN_PROGRESS\":\n", " time.sleep(5)\n", " sourcetemplate=describe_template(sourcesession,sourcetid)\n", " if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n", " try:\n", " updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n", " except Exception as e:\n", " delete_template(sourcesession, sourcetid)\n", " faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n", " \"sourceanalysisid\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " else: \n", " if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n", " try:\n", " updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n", " except Exception as e:\n", " delete_template(sourcesession, sourcetid)\n", " faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n", " \"sourceanalysisid\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " continue \n", "\n", " ds=data_sets (targetsession)\n", " Template=sourcetemplate['Template']\n", " dsref=[]\n", " \n", " missing=False\n", " for i in Template['Version']['DataSetConfigurations']:\n", " #print(i)\n", " n=Template['Version']['DataSetConfigurations'].index(i)\n", " #print(n)\n", " for j in ds:\n", " if i['Placeholder']==j['Name']:\n", " dsref.append({\n", " 'DataSetPlaceholder': i['Placeholder'],\n", " 'DataSetArn': j['Arn']\n", " })\n", " if n>len(dsref): \n", " e=\"Dataset \"+i['Placeholder']+\"is missing!\"\n", " faillist.append({\"Error Type\": \"Datasets in target env are missing for this analysis\",\n", " \"sourceanalysisid\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " missing=True\n", " break\n", " if missing: break\n", " if missing: continue\n", " \n", "##working\n", " SourceEntity={\n", " 'SourceTemplate': {\n", " 'DataSetReferences': dsref,\n", " 'Arn': Template['Arn']\n", " }\n", " }\n", " \n", " #print(SourceEntity)\n", " analysis=describe_analysis(targetsession, targettid)\n", " if 'Faild to describe analysis:' in analysis or analysis['Analysis']['Status']=='DELETED':\n", " if 'analysis/'+targettid+' is not found' in analysis or analysis['Analysis']['Status']=='DELETED':\n", " print(\"Create new anlaysis now:\")\n", " try:\n", " newanalysis=create_analysis(targetsession, targettid, targettname,targetadmin,SourceEntity,TargetThemeArn)\n", " except Exception as e:\n", " delete_template(sourcesession, targettid)\n", " faillist.append({\"Error Type\": \"Create New Analysis Error\",\n", " \"AnalysisID\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(e)}) \n", " continue\n", " else:\n", " faillist.append({\"Error Type\": \"Describe Target Analysis Error\",\n", " \"AnalysisID\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(analysis)}) \n", " continue\n", " elif analysis['Analysis']['Status']==\"CREATION_FAILED\":\n", " res=delete_analysis(sourcesession, targettid)\n", " try:\n", " newanalysis=create_analysis(targetsession, targettid, targettname,targetadmin, SourceEntity,TargetThemeArn)\n", " except Exception as e:\n", " delete_template(sourcesession, targettid)\n", " faillist.append({\"Error Type\": \"Create Analysis Error\",\n", " \"AnalysisID\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(e)})\n", " continue\n", " \n", " else:\n", " print(\"analysis is existing. update it now.\")\n", " try:\n", " newanalysis=update_analysis(targetsession, targettid, targettname, SourceEntity,TargetThemeArn)\n", " except Exception as e:\n", " delete_template(sourcesession, targettid)\n", " faillist.append({\"Error Type\": \"Update Analysis Error\",\n", " \"AnalysisID\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(e)})\n", " continue\n", " time.sleep(20)\n", " res=describe_analysis(targetsession,newanalysis['AnalysisId'])\n", " if res['Status']==200:\n", " status=res['Analysis']['Status']\n", " if status=='CREATION_SUCCESSFUL' or status=='UPDATE_SUCCESSFUL':\n", " success.append(res['Analysis'])\n", " #filename=\"Deployment_Results/Successful/Analysis_\"+res['Analysis']['Name']+\".json\"\n", " else:\n", " faillist.append({\"Error Type\": \"Analysis Creation Status is not Successful\", \"Analysis\": res['Analysis']})\n", " #filename=\"Deployment_Results/Fail/Analysis_\"+res['Analysis']['Name']+\".json\"" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "with open(faillocation+now+'Analysis_Error.json', \"w\") as f:\n", " json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n", "\n", "with open(successlocation+now+'Analysis_Success.json', \"w\") as f:\n", " json.dump(success, f, indent=4, sort_keys=True, default=str)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Get dashboards list" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "sourcedashboards=dashboards(sourcesession)\n", "#dashboards" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Deploy dashboards" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "success=[]\n", "faillist=[]\n", "for i in sourcedashboards:\n", " sourcedashboard=describe_dashboard(sourcesession, i['DashboardId'])\n", " SourceEntityArn=sourcedashboard['Dashboard']['Version']['SourceEntityArn']\n", " if SourceEntityArn.split(\"/\")[0].split(\":\")[-1]==\"analysis\":\n", " sourceanalysis=sourcedashboard['Dashboard']['Version']['SourceEntityArn']\n", " else: \n", " faillist.append({\"Error Type\": \"Source Analysis is missing!\",\"DashboardId\": sourcetid, \"Name\": sourcetname, \"Error\": \"Source Analysis is missing!\"})\n", " continue\n", " sourceversion=sourcedashboard['Dashboard']['Version']['VersionNumber']\n", " sourcedid=sourcedashboard['Dashboard']['DashboardId']\n", " sourcedname=sourcedashboard['Dashboard']['Name']\n", " sourcetid=sourcedid\n", " sourcetname=sourcedname\n", " targettid=sourcetid\n", " targettname=sourcedname\n", " DataSetArns=sourcedashboard['Dashboard']['Version']['DataSetArns']\n", " TargetThemeArn=''\n", " if 'ThemeArn' in sourcedashboard['Dashboard']['Version'].keys():\n", " SourceThemearn=sourcedashboard['Dashboard']['Version']['ThemeArn']\n", " TargetThemeArn = 'arn:aws:quicksight:'+region+':'+account_id+':theme/'+SourceThemearn.split(\"/\")[1]\n", " sourcedsref = []\n", " #print(sourcedname)\n", " for i in DataSetArns:\n", " missing=False\n", " did = i.split(\"/\")[1]\n", " #print(did)\n", " try:\n", " dname=get_dataset_name(did, sourcesession)\n", " except Exception as e:\n", " faillist.append({\"Error Type\": \"Dataset: \"+did+\" is missing!\",\"DashboardId\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n", " missing=True\n", " break\n", " \n", " sourcedsref.append({'DataSetPlaceholder': dname,\n", " 'DataSetArn': i})\n", " if missing: continue\n", " try:\n", " sourcetemplate = create_template(sourcesession, sourcetid, sourcetname, sourcedsref, sourceanalysis, '1')\n", " sourcetemplate=describe_template(sourcesession,sourcetid)\n", " except Exception as e:\n", " faillist.append({\"Error Type\": \"Create Source Template Error\",\"DashboardId\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n", " continue\n", " \n", " while sourcetemplate['Template']['Version']['Status']==\"CREATION_IN_PROGRESS\":\n", " time.sleep(5)\n", " sourcetemplate=describe_template(sourcesession,sourcetid)\n", " if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n", " try:\n", " updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n", " except Exception as e:\n", " delete_template(sourcesession, sourcetid)\n", " faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n", " \"DashboardId\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " else: \n", " if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n", " try:\n", " updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n", " except Exception as e:\n", " delete_template(sourcesession, sourcetid)\n", " faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n", " \"DashboardId\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " continue\n", "\n", " if updateres['Status']==200:\n", " try:\n", " targettemplate=copy_template(targetsession, targettid, targettname, updateres['TemplateArn'])\n", " except Exception as e:\n", " delete_template(sourcesession, sourcetid)\n", " faillist.append({\"Error Type\": \"Copy Template Error\",\n", " \"DashboardId\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)}) \n", " continue\n", " else: \n", " delete_template(sourcesession, sourcetid)\n", " faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n", " \"DashboardId\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " continue\n", " \n", " targettemplate=describe_template(targetsession,targettid)\n", " \n", " while targettemplate['Template']['Version']['Status']==\"CREATION_IN_PROGRESS\":\n", " time.sleep(5)\n", " targettemplate=describe_template(targetsession,targettid)\n", " if targettemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n", " break\n", " else: \n", " if targettemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n", " print(\"Template is successful copied!\")\n", " else: \n", " delete_template(targetsession, targettid)\n", " faillist.append({\"Error Type\": \"Copy Template Error\",\n", " \"DashboardId\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " continue\n", " \n", " ds=data_sets (targetsession)\n", " Template=targettemplate['Template']\n", " dsref=[]\n", " \n", " missing=False\n", " for i in Template['Version']['DataSetConfigurations']:\n", " #print(i)\n", " n=Template['Version']['DataSetConfigurations'].index(i)\n", " #print(n)\n", " for j in ds:\n", " if i['Placeholder']==j['Name']:\n", " dsref.append({\n", " 'DataSetPlaceholder': i['Placeholder'],\n", " 'DataSetArn': j['Arn']\n", " })\n", " if n>len(dsref): \n", " e=\"Dataset \"+i['Placeholder']+\"is missing!\"\n", " faillist.append({\"Error Type\": \"Datasets in target env are missing for this dashboard\",\n", " \"DashboardId\": sourcetid, \n", " \"Name\": sourcetname, \n", " \"Error\": str(e)})\n", " missing=True\n", " break\n", " if missing: break\n", " if missing: continue\n", " \n", " SourceEntity={\n", " 'SourceTemplate': {\n", " 'DataSetReferences': dsref,\n", " 'Arn': Template['Arn']\n", " }\n", " }\n", " #print(SourceEntity)\n", " dashboard=describe_dashboard(targetsession, targettid)\n", "\n", " if 'Faild to describe dashboard:' in dashboard:\n", " if 'dashboard/'+targettid+' is not found' in dashboard:\n", " print(\"Create new dashboard now:\")\n", " try:\n", " newdashboard=create_dashboard(targetsession, targettid, targettname,targetadmin, SourceEntity, '1',TargetThemeArn,filter='ENABLED',csv='ENABLED', sheetcontrol='COLLAPSED')\n", " except Exception as e:\n", " delete_template(targetsession, targettid)\n", " faillist.append({\"Error Type\": \"Create New Dashboard Error\",\n", " \"DashboardId\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(e)}) \n", " continue\n", " else: \n", " faillist.append({\"Error Type\": \"Describe Target Dashboard Error\",\n", " \"DashboardId\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(dashboard)}) \n", " continue\n", " elif dashboard['Dashboard']['Version']['Status']==\"CREATION_FAILED\":\n", " res=delete_dashboard(targetsession, targettid)\n", " try:\n", " newdashboard=create_dashboard(targetsession, targettid, targettname,targetadmin, SourceEntity, '1',TargetThemeArn,filter='ENABLED',csv='ENABLED', sheetcontrol='COLLAPSED')\n", " except Exception as e:\n", " delete_template(targetsession, targettid)\n", " faillist.append({\"Error Type\": \"Create Dashboard Error\",\n", " \"DashboardId\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(e)})\n", " continue\n", " \n", " else:\n", " print(\"dashboard is existing. Delete and recreate it now.\")\n", " try:\n", " res=delete_dashboard(targetsession, targettid)\n", " newdashboard=create_dashboard(targetsession, targettid, targettname,targetadmin, SourceEntity, '1',TargetThemeArn,filter='ENABLED',csv='ENABLED', sheetcontrol='COLLAPSED')\n", " except Exception as e:\n", " delete_template(targetsession, targettid)\n", " faillist.append({\"Error Type\": \"Create Dashboard Error\",\n", " \"DashboardId\": targettid, \n", " \"Name\": targettname, \n", " \"Error\": str(e)})\n", " continue\n", "\n", " res=describe_dashboard(targetsession,newdashboard['DashboardId'])\n", " \n", " if res['Status']==200:\n", " status=res['Dashboard']['Version']['Status']\n", " if status=='CREATION_SUCCESSFUL' or status=='UPDATE_SUCCESSFUL':\n", " success.append(res['Dashboard'])\n", " #filename=\"Deployment_Results/Successful/Dashboard_\"+res['Dashboard']['Name']+\".json\"\n", " else:\n", " faillist.append({\"Error Type\": \"Dashboard Creation Status is not Successful\", \"Dashboard\": res['Dashboard']})\n", "\n", " #filename=\"Deployment_Results/Fail/Dashboard_\"+res['Dashboard']['Name']+\".json\"" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "with open(faillocation+now+'Dashboard_Error.json', \"w\") as f:\n", " json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n", "\n", "with open(successlocation+now+'Dashboard_Success.json', \"w\") as f:\n", " json.dump(success, f, indent=4, sort_keys=True, default=str)\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Schedule notebooks to execute" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "https://aws.amazon.com/blogs/machine-learning/scheduling-jupyter-notebooks-on-sagemaker-ephemeral-instances/" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.4" }, "toc-showcode": false, "toc-showmarkdowntxt": true }, "nbformat": 4, "nbformat_minor": 4 }