{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"QuickSight Incremental Deployment\n",
"\n",
"Author: Ying Wang (Sr.Data Visualization Engineer in ProServe GSP)\n",
"Date: July 15 2020\n",
"\n",
"Author: Vamsi Bhadriraju (Data Architect in ProServe) \n",
"Revision Date : January 12 2021\n",
"Revision Date : May 27 2021\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install --upgrade pip\n",
"!pip install --upgrade boto3\n",
"get_ipython().system('pip install --upgrade ipynb')"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import boto3\n",
"import json\n",
"import time\n",
"from IPython.display import JSON\n",
"import sys\n",
"import ipynb.fs \n",
"import logging\n",
"from typing import Any, Dict, List, Optional\n",
"from datetime import datetime\n",
"\n",
"# current date and time\n",
"now = str(datetime.now().strftime(\"%m-%d-%Y_%H_%M\"))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Import functions from functions notebook"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"from ipynb.fs.defs.Functions import data_sources\n",
"from ipynb.fs.defs.Functions import describe_source \n",
"from ipynb.fs.defs.Functions import delete_source\n",
"from ipynb.fs.defs.Functions import create_data_source\n",
"from ipynb.fs.defs.Functions import get_datasource_name\n",
"from ipynb.fs.defs.Functions import get_datasource_ids\n",
"from ipynb.fs.defs.Functions import update_data_source_permissions\n",
"\n",
"from ipynb.fs.defs.Functions import get_dataset_name\n",
"from ipynb.fs.defs.Functions import data_sets\n",
"from ipynb.fs.defs.Functions import describe_dataset\n",
"from ipynb.fs.defs.Functions import get_dataset_ids\n",
"from ipynb.fs.defs.Functions import delete_dataset \n",
"from ipynb.fs.defs.Functions import create_dataset\n",
"from ipynb.fs.defs.Functions import update_dataset\n",
"from ipynb.fs.defs.Functions import update_data_set_permissions\n",
"\n",
"from ipynb.fs.defs.Functions import get_target\n",
"\n",
"from ipynb.fs.defs.Functions import templates\n",
"from ipynb.fs.defs.Functions import delete_template\n",
"from ipynb.fs.defs.Functions import update_template_permission \n",
"from ipynb.fs.defs.Functions import copy_template\n",
"from ipynb.fs.defs.Functions import describe_template\n",
"from ipynb.fs.defs.Functions import create_template \n",
"\n",
"from ipynb.fs.defs.Functions import dashboards\n",
"from ipynb.fs.defs.Functions import describe_dashboard\n",
"from ipynb.fs.defs.Functions import create_dashboard \n",
"from ipynb.fs.defs.Functions import delete_dashboard\n",
"from ipynb.fs.defs.Functions import update_dashboard \n",
"from ipynb.fs.defs.Functions import get_dashboard_ids\n",
"from ipynb.fs.defs.Functions import get_dashboard_name\n",
"\n",
"from ipynb.fs.defs.Functions import themes\n",
"from ipynb.fs.defs.Functions import describe_theme\n",
"from ipynb.fs.defs.Functions import delete_theme\n",
"from ipynb.fs.defs.Functions import create_theme\n",
"from ipynb.fs.defs.Functions import update_theme\n",
"from ipynb.fs.defs.Functions import describe_theme_permissions\n",
"from ipynb.fs.defs.Functions import update_theme_permissions\n",
"\n",
"from ipynb.fs.defs.Functions import analysis\n",
"from ipynb.fs.defs.Functions import describe_analysis\n",
"from ipynb.fs.defs.Functions import create_analysis\n",
"from ipynb.fs.defs.Functions import delete_analysis\n",
"from ipynb.fs.defs.Functions import update_analysis\n",
"from ipynb.fs.defs.Functions import get_analysis_ids\n",
"from ipynb.fs.defs.Functions import describe_analysis_permissions\n",
"\n",
"\n",
"\n",
"#supportive functions\n",
"from ipynb.fs.defs.Functions import data_sets_ls_of_dashboard\n",
"from ipynb.fs.defs.Functions import data_sources_ls_of_dashboard\n",
"from ipynb.fs.defs.Functions import get_data_source_deployment_list\n",
"from ipynb.fs.defs.Functions import data_sources_ls_of_analysis\n",
"from ipynb.fs.defs.Functions import data_sets_ls_of_analysis\n",
"from ipynb.fs.defs.Functions import get_user_arn\n",
"from ipynb.fs.defs.Functions import _assume_role"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Static Profile\n",
"\n",
"You can configure AWS profile from terminal and call the profile in below cell"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"sourceprofile=''\n",
"targetprofile=''\n",
"aws_region='us-east-1'\n",
"sourcesession = boto3.Session(profile_name=sourceprofile, region_name=aws_region)\n",
"targetsession = boto3.Session(profile_name=targetprofile, region_name=aws_region)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Assume Role\n",
"\n",
"You can also assume an IAM role and create session based on the role permissions"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#source account\n",
"sourceaccountid=\n",
"role_name=\n",
"aws_region='us-east-1'\n",
"sourcesession = _assume_role(sourceaccountid, role_name, aws_region)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#target account\n",
"targetaccountid=\"\"\n",
"role_name=\"\"\n",
"aws_region='us-east-1'\n",
"targetsession = _assume_role(targetaccountid, role_name, aws_region)\n",
"#targetsession = boto3.Session(\n",
"# aws_access_key_id=\"\",\n",
"# aws_secret_access_key=\"\",\n",
"# aws_session_token=\"\",\n",
"# region_name=aws_region\n",
"# )"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Set root and admin users\n",
"\n",
"root user is for the template. \n",
"By default, we assign full permissions of objects to admin."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"sourceroot=get_user_arn (sourcesession, 'root')\n",
"sourceadmin=get_user_arn (sourcesession, 'Administrator/wangzyn-Isengard')\n",
"#sourceversion='1'\n",
"\n",
"targetroot=get_user_arn (targetsession, 'root')\n",
"targetadmin=get_user_arn (targetsession, 'Admin/wangzyn-Isengard')\n",
"#targetvpc='arn:aws:quicksight:us-east-1:889399602426:vpcConnection/sg-40b7521a'"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Please define your input parameters in below cell"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"application/json": {
"credential": {
"rdscredential": {
"CredentialPair": {
"Password": "",
"Username": ""
}
},
"redshiftcredential": {
"CredentialPair": {
"Password": "Ro_user1234",
"Username": "ro_user"
}
}
},
"datasetpermission": [
{
"Actions": [
"quicksight:UpdateDataSetPermissions",
"quicksight:DescribeDataSet",
"quicksight:DescribeDataSetPermissions",
"quicksight:PassDataSet",
"quicksight:DescribeIngestion",
"quicksight:ListIngestions",
"quicksight:UpdateDataSet",
"quicksight:DeleteDataSet",
"quicksight:CreateIngestion",
"quicksight:CancelIngestion"
],
"Principal": "arn:aws:quicksight:us-east-1:387046087588:user/default/Admin/wangzyn-Isengard"
}
],
"datasourcepermission": [
{
"Actions": [
"quicksight:DescribeDataSource",
"quicksight:DescribeDataSourcePermissions",
"quicksight:PassDataSource",
"quicksight:UpdateDataSource",
"quicksight:DeleteDataSource",
"quicksight:UpdateDataSourcePermissions"
],
"Principal": "arn:aws:quicksight:us-east-1:387046087588:user/default/Admin/wangzyn-Isengard"
}
],
"rds": {
"rdsinstanceid": "mssql"
},
"redshift": {
"ClusterId": "wangzyncluster1",
"Database": "dev",
"Host": "wangzyncluster1.coprq8ycemvc.us-east-1.redshift.amazonaws.com"
},
"s3": {
"manifestBucket": "spaceneedle-samplefiles.prod.us-east-1",
"manifestkey": "sales/manifest.json"
},
"tag": [
{
"Key": "covid-19-dashboard-migration",
"Value": "true"
}
],
"version": "1",
"vpc": "arn:aws:quicksight:us-east-1:387046087588:vpcConnection/sg-40b7521a"
},
"text/plain": [
""
]
},
"execution_count": 5,
"metadata": {
"application/json": {
"expanded": false,
"root": "root"
}
},
"output_type": "execute_result"
}
],
"source": [
"rds='mssql'\n",
"redshift={\n",
" \"ClusterId\": 'wangzyncluster1',\n",
" \"Host\": 'wangzyncluster1.coprq8ycemvc.us-east-1.redshift.amazonaws.com',\n",
" \"Database\": 'dev'}\n",
"\n",
"s3Bucket='spaceneedle-samplefiles.prod.us-east-1'\n",
"s3Key='sales/manifest.json'\n",
"vpc='sg-40b7521a'\n",
"tag=[\n",
" {\n",
" 'Key': 'test',\n",
" 'Value': 'true'\n",
" }\n",
" ]\n",
"owner=targetadmin\n",
"rdscredential={\n",
" 'CredentialPair': {\n",
" 'Username': \"\",\n",
" 'Password': \"\"}}\n",
"redshiftcredential={\n",
" 'CredentialPair': {\n",
" 'Username': \"ro_user\",\n",
" 'Password': \"Ro_user1234\"}}\n",
"region='us-east-1'\n",
"namespace='default'\n",
"version='1' \n",
"\n",
"target=get_target(targetsession, rds,redshift,s3Bucket,s3Key,vpc,tag,owner,rdscredential,redshiftcredential)\n",
"\n",
"JSON(target)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Deployment List"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"deploy_p = 'dashboard' \n",
"\"\"\"\"\n",
"\"all\" will deploy data source, dataset, theme, analysis and dashboard;\n",
"\"source\" means data sources only; \n",
"\"dataset\" means datasets only; \n",
"\"theme\" means theme only;\n",
"\"analysis\" means analysis only;\n",
"\"dashboard\" means dashboard only\n",
"\"\"\" \n",
"\n",
"source_deploy_list = [\"redshift-auto\", \"mssql\", \"athena_1\",\"redshift_manual\"]\n",
"dataset_deploy_list = [\"patient_info\"]\n",
"theme_deploy_list= [\"orange\"]\n",
"analysis_deploy_list= [\"QuickSight_Access_Last_24_H_Analysis\",\"Marketing Analysis\"]\n",
"dashboard_deploy_list = [\"QuickSight_Access_Last_24_H\", \"Marketing Dashboard\"]"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"QuickSight_Access_Last_24_H\n",
"['athena_1']\n",
"['quicksight_access_last_24h']\n",
"Marketing Dashboard\n",
"['redshift_manual']\n",
"['patient_info']\n"
]
}
],
"source": [
"if deploy_p in ['dashboard']:\n",
" source_deploy_list=[]\n",
" dataset_deploy_list=[]\n",
" for dashboard in dashboard_deploy_list:\n",
" print(dashboard)\n",
" datasources=data_sources_ls_of_dashboard(dashboard, sourcesession)\n",
" print(datasources)\n",
" for datasource in datasources:\n",
" source_deploy_list.append(datasource)\n",
" datasets=data_sets_ls_of_dashboard(dashboard, sourcesession)\n",
" print(datasets)\n",
" for dataset in datasets:\n",
" dataset_deploy_list.append(dataset)\n",
" \n",
"if deploy_p in ['analysis']:\n",
" source_deploy_list=[]\n",
" dataset_deploy_list=[]\n",
" for analysis_name in analysis_deploy_list:\n",
" print(analysis_name)\n",
" datasources=data_sources_ls_of_analysis(analysis_name, sourcesession)\n",
" print(datasources)\n",
" for datasource in datasources:\n",
" source_deploy_list.append(datasource)\n",
" datasets=data_sets_ls_of_analysis(analysis_name, sourcesession)\n",
" print(datasets)\n",
" for dataset in datasets:\n",
" dataset_deploy_list.append(dataset)\n",
" \n",
"if deploy_p in ['all']:\n",
" for dashboard in dashboard_deploy_list:\n",
" datasources=data_sources_ls_of_dashboard(dashboard, sourcesession)\n",
" for datasource in datasources:\n",
" source_deploy_list.append(datasource)\n",
" datasets=data_sets_ls_of_dashboard(dashboard, sourcesession)\n",
" for dataset in datasets:\n",
" dataset_deploy_list.append(dataset)\n",
" \n",
" for analysis_name in analysis_deploy_list:\n",
" datasources=data_sources_ls_of_analysis(analysis_name, sourcesession)\n",
" for datasource in datasources:\n",
" source_deploy_list.append(datasource)\n",
" datasets=data_sets_ls_of_analysis(analysis_name, sourcesession)\n",
" for dataset in datasets:\n",
" dataset_deploy_list.append(dataset)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['QuickSight_Access_Last_24_H', 'Marketing Dashboard']"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"dashboard_deploy_list"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['quicksight_access_last_24h', 'patient_info']"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"dataset_deploy_list"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['athena_1', 'redshift_manual']"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"source_deploy_list"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['orange']"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"theme_deploy_list"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['QuickSight_Access_Last_24_H_Analysis', 'Marketing Analysis']"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"analysis_deploy_list"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Results Output Location"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creation of the directory Migration_Results/Successful/ failed\n",
"Creation of the directory Migration_Results/Fail/ failed\n"
]
}
],
"source": [
"#\n",
"successlocation = \"Deployment_Results/Successful/\"\n",
"faillocation = \"Deployment_Results/Fail/\"\n",
"\n",
"import os\n",
"try:\n",
" os.makedirs(successlocation)\n",
"except OSError:\n",
" print (\"Creation of the directory %s failed\" % successlocation)\n",
"else:\n",
" print (\"Successfully created the directory %s\" % successlocation)\n",
"\n",
"try:\n",
" os.makedirs(faillocation)\n",
"except OSError:\n",
" print (\"Creation of the directory %s failed\" % faillocation)\n",
"else:\n",
" print (\"Successfully created the directory %s\" % faillocation)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"deploy data sources"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"deployment_list=get_data_source_deployment_list(sourcesession,source_deploy_list)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['18e39ce0-57a8-483b-af1f-cfa43c2bd34b']\n"
]
}
],
"source": [
"#get data sources which already deployed\n",
"targetsources=data_sources(targetsession)\n",
"\n",
"#already_deployed record the data source ids of target account\n",
"already_deployed=[]\n",
"for tsource in targetsources:\n",
" already_deployed.append(tsource['DataSourceId'])\n",
"\n",
"print(already_deployed)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Create data sources in target account"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"73ebc3dd-d3b1-403f-8c99-d00b652edbf9\n",
"f1123c10-8a6a-4316-9475-a3efd163f539\n"
]
}
],
"source": [
"faillist=[]\n",
"newsourceslist=[]\n",
"for i in deployment_list:\n",
" if i['DataSourceId'] not in already_deployed and 'DataSourceParameters' in i:\n",
" print(i['DataSourceId'])\n",
" #if 'DataSourceParameters' in i:\n",
" newdsource=create_data_source (i, targetsession, target)\n",
" if 'Error' in newdsource:\n",
" faillist.append(newdsource)\n",
"\n",
" else: newsourceslist.append(newdsource)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Write deployment results:"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"# Datasource_Creation_Error.json records the failure on create_data_source API call step\n",
"with open(faillocation+now+'_Datasource_Creation_Error.json', \"w\") as f:\n",
" json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n",
"\n",
"#Datasource_Creation_Fail.json records the creation_failed status data sources\n",
"#Datasource_Creation_Success.json records the successful created data sources\n",
"faillist2=[]\n",
"successfulls=[]\n",
"for news in newsourceslist:\n",
" datasource=describe_source(targetsession, news['DataSourceId'])\n",
"\n",
" if datasource['DataSource']['Status']==\"CREATION_FAILED\":\n",
" delete_source (targetsession, news['DataSourceId'])\n",
" faillist2.append(news['DataSourceId'])\n",
"\n",
" if datasource['DataSource']['Status']==\"CREATION_SUCCESSFUL\":\n",
" successfulls.append(datasource['DataSource'])\n",
"\n",
" while datasource['DataSource']['Status']==\"CREATION_IN_PROGRESS\":\n",
" time.sleep(5)\n",
" datasource=describe_source(targetsession, news['DataSourceId'])\n",
" if datasource['DataSource']['Status']==\"CREATION_SUCCESSFUL\":\n",
" successfulls.append(datasource['DataSource'])\n",
" break\n",
" elif datasource['DataSource']['Status']==\"CREATION_FAILED\":\n",
" delete_source (targetsession, news['DataSourceId'])\n",
" faillist2.append(news['DataSourceId'])\n",
"\n",
"with open(faillocation+now+'_Datasource_Creation_Fail.json', \"w\") as f:\n",
" json.dump(faillist2, f, indent=4, sort_keys=True, default=str)\n",
"\n",
"with open(successlocation+now+'_Datasource_Creation_Success.json', \"w\") as f:\n",
" json.dump(successfulls, f, indent=4, sort_keys=True, default=str)\n"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[{'Arn': 'arn:aws:quicksight:us-east-1:387046087588:datasource/73ebc3dd-d3b1-403f-8c99-d00b652edbf9',\n",
" 'DataSourceId': '73ebc3dd-d3b1-403f-8c99-d00b652edbf9',\n",
" 'Name': 'athena_1',\n",
" 'Type': 'ATHENA',\n",
" 'Status': 'CREATION_SUCCESSFUL',\n",
" 'CreatedTime': datetime.datetime(2020, 8, 12, 14, 37, 53, 530000, tzinfo=tzlocal()),\n",
" 'LastUpdatedTime': datetime.datetime(2020, 8, 12, 14, 37, 56, 305000, tzinfo=tzlocal()),\n",
" 'DataSourceParameters': {'AthenaParameters': {'WorkGroup': 'primary'}},\n",
" 'SslProperties': {'DisableSsl': False}},\n",
" {'Arn': 'arn:aws:quicksight:us-east-1:387046087588:datasource/f1123c10-8a6a-4316-9475-a3efd163f539',\n",
" 'DataSourceId': 'f1123c10-8a6a-4316-9475-a3efd163f539',\n",
" 'Name': 'redshift_manual',\n",
" 'Type': 'REDSHIFT',\n",
" 'Status': 'CREATION_SUCCESSFUL',\n",
" 'CreatedTime': datetime.datetime(2020, 8, 12, 16, 38, 44, 224000, tzinfo=tzlocal()),\n",
" 'LastUpdatedTime': datetime.datetime(2020, 8, 12, 16, 38, 45, 159000, tzinfo=tzlocal()),\n",
" 'DataSourceParameters': {'RedshiftParameters': {'Host': 'wangzyncluster1.coprq8ycemvc.us-east-1.redshift.amazonaws.com',\n",
" 'Port': 8192,\n",
" 'Database': 'dev'}},\n",
" 'SslProperties': {'DisableSsl': False}},\n",
" {'Arn': 'arn:aws:quicksight:us-east-1:387046087588:datasource/18e39ce0-57a8-483b-af1f-cfa43c2bd34b',\n",
" 'DataSourceId': '18e39ce0-57a8-483b-af1f-cfa43c2bd34b',\n",
" 'Name': 'Patient-Info.csv',\n",
" 'Type': 'S3',\n",
" 'CreatedTime': datetime.datetime(2020, 2, 14, 17, 42, 3, 798000, tzinfo=tzlocal()),\n",
" 'LastUpdatedTime': datetime.datetime(2020, 2, 14, 17, 42, 5, 556000, tzinfo=tzlocal())}]"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"data_sources(targetsession)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Get datasets list:"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#source account\n",
"sourceaccountid=\"\"\n",
"role_name=\"\"\n",
"aws_region=''\n",
"sourcesession = _assume_role(sourceaccountid, role_name, aws_region)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#target account\n",
"targetaccountid=\"\"\n",
"role_name=\"\"\n",
"aws_region='us-east-1'\n",
"targetsession = _assume_role(targetaccountid, role_name, aws_region)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.code.notebook.error": {
"message": "o.endsWith is not a function",
"name": "TypeError",
"stack": "TypeError: o.endsWith is not a function\n\tat s (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:142510)\n\tat u (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:143160)\n\tat f (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:143903)\n\tat p (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:144653)\n\tat Array.map ()\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:145839\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:146165\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:146172\n\tat Array.map ()\n\tat e.jupyterNotebookModelToNotebookData (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:145397)\n\tat e.NotebookSerializer.deserializeNotebook (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:147692)\n\tat _.$dataToNotebook (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:85:151702)\n\tat i._doInvokeHandler (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:13826)\n\tat i._invokeHandler (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:13510)\n\tat i._receiveRequest (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:12119)\n\tat i._receiveOneMessage (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:11044)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:8947\n\tat u.invoke (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:145)\n\tat v.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:1856)\n\tat d.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:19034)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:104:34275\n\tat u.invoke (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:145)\n\tat v.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:1856)\n\tat d.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:19034)\n\tat o._receiveMessage (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:23615)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:21149\n\tat u.invoke (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:145)\n\tat v.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:1856)\n\tat v.acceptChunk (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:15865)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:14995\n\tat Socket.P (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:104:13797)\n\tat Socket.emit (node:events:390:28)\n\tat Socket.emit (node:domain:475:12)\n\tat addChunk (node:internal/streams/readable:315:12)\n\tat readableAddChunk (node:internal/streams/readable:289:9)\n\tat Socket.Readable.push (node:internal/streams/readable:228:10)\n\tat Pipe.onStreamRead (node:internal/stream_base_commons:199:23)"
},
"text/plain": [
""
]
},
"execution_count": 17,
"metadata": {
"application/json": {
"expanded": false,
"root": "root"
}
},
"output_type": "execute_result"
}
],
"source": [
"datasets=data_sets(sourcesession)\n",
"\n",
"deployment_list=[]\n",
"for newset in dataset_deploy_list:\n",
" ids = get_dataset_ids(newset, sourcesession) #Get id of datasets deployment list\n",
" for dataset in datasets:\n",
" if ids[0] == dataset[\"DataSetId\"]:\n",
" deployment_list.append(dataset)\n",
"\n",
"JSON(deployment_list)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Get already deployd datasets list"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [],
"source": [
"#get datasets which already deployed\n",
"targetds=data_sets(targetsession)\n",
"#already_deployed record the datasets ids of target account\n",
"already_deployed=[]\n",
"for ds in targetds:\n",
" already_deployed.append(ds['DataSetId'])\n",
"#already_deployed"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"deploy Datasets"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [],
"source": [
"newsetslist=[]\n",
"faillist=[]\n",
"sts_client = targetsession.client(\"sts\")\n",
"account_id = sts_client.get_caller_identity()[\"Account\"]\n",
"for mds in deployment_list:\n",
" if mds['DataSetId'] not in already_deployed:\n",
" try:\n",
" res=describe_dataset(sourcesession, mds['DataSetId'])\n",
" except Exception:\n",
" faillist.append({\"Dataset\": mds, \"Error\": str(Exception)})\n",
" continue\n",
" \n",
" name=mds['Name']\n",
" datasetid=mds['DataSetId']\n",
"\n",
" PT=res['DataSet']['PhysicalTableMap']\n",
" for key, value in PT.items():\n",
" for k,v in value.items():\n",
" dsid = v['DataSourceArn'].split(\"/\")[1]\n",
" v['DataSourceArn']='arn:aws:quicksight:us-east-1:'+account_id+':datasource/'+dsid\n",
"\n",
" LT=res['DataSet']['LogicalTableMap']\n",
" if 'ColumnGroups' in res['DataSet']:\n",
" ColumnGroups=res['DataSet']['ColumnGroups']\n",
" else: ColumnGroups=None\n",
" try: \n",
" newdataset=create_dataset(targetsession, datasetid, name, PT, LT, res['DataSet']['ImportMode'], target['datasetpermission'],ColumnGroups)\n",
" #print(\"new dataset: \", newdataset)\n",
" newsetslist.append(newdataset)\n",
" except Exception as e:\n",
" #print('failed: '+str(e))\n",
" faillist.append({\"DataSetId\": datasetid, \"Name\": name, \"Error\": str(e)})\n",
"\n",
" continue\n",
" \n",
" if mds['DataSetId'] in already_deployed:\n",
" try:\n",
" res=describe_dataset(sourcesession, mds['DataSetId'])\n",
" except Exception:\n",
" faillist.append({\"Dataset\": mds, \"Error\": str(Exception)})\n",
" continue\n",
" \n",
" name=mds['Name']\n",
" datasetid=mds['DataSetId']\n",
"\n",
" PT=res['DataSet']['PhysicalTableMap']\n",
" for key, value in PT.items():\n",
" for k,v in value.items():\n",
" dsid = v['DataSourceArn'].split(\"/\")[1]\n",
" v['DataSourceArn']='arn:aws:quicksight:us-east-1:'+account_id+':datasource/'+dsid\n",
"\n",
" LT=res['DataSet']['LogicalTableMap']\n",
" if 'ColumnGroups' in res['DataSet']:\n",
" ColumnGroups=res['DataSet']['ColumnGroups']\n",
" else: ColumnGroups=None\n",
" try: \n",
" newdataset=update_dataset(targetsession, datasetid, name, PT, LT, res['DataSet']['ImportMode'],ColumnGroups)\n",
" #print(\"new dataset: \", newdataset)\n",
" newsetslist.append(newdataset)\n",
" except Exception as e:\n",
" #print('failed: '+str(e))\n",
" faillist.append({\"DataSetId\": datasetid, \"Name\": name, \"Error\": str(e)})\n",
"\n",
" continue\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [],
"source": [
"#print fail informations\n",
"with open(faillocation+now+'Dataset_Creation_Error.json', \"w\") as f:\n",
" json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n",
"\n",
"successfulls=[]\n",
"for news in newsetslist:\n",
" dataset=describe_dataset(targetsession, news['DataSetId'])\n",
" successfulls.append(dataset['DataSet'])\n",
" \n",
"with open(successlocation+now+'Datasets_Creation_Success.json', \"w\") as f:\n",
" json.dump(successfulls, f, indent=4, sort_keys=True, default=str)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Get themes list"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"themes_list_complete =themes(sourcesession)\n",
"themes_list=[]\n",
"#JSON(datasets)\n",
"for th in themes_list_complete:\n",
" if th[\"Name\"] in theme_deploy_list:\n",
" themes_list.append(th)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"deploy Themes"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#get themes which already deployed\n",
"targetthemes=themes(targetsession)\n",
"#already_deployed record the datasets ids of target account\n",
"already_deployed=[]\n",
"for th in targetthemes:\n",
" already_deployed.append(th['ThemeId'])\n",
"#already_deployed"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"newthemeslist=[]\n",
"faillist=[]\n",
"sts_client = targetsession.client(\"sts\")\n",
"account_id = sts_client.get_caller_identity()[\"Account\"]\n",
"for i in themes_list:\n",
" if i['ThemeId'] not in already_deployed:\n",
" try:\n",
" res=describe_theme(sourcesession, i['ThemeId'])\n",
" except Exception:\n",
" faillist.append({\"Theme\": i, \"Error\": str(Exception)})\n",
" continue\n",
" THEMEID=res['Theme']['ThemeId']\n",
" Name=res['Theme']['Name']\n",
" BaseThemeId=res['Theme']['Version']['BaseThemeId']\n",
" Configuration=res['Theme']['Version']['Configuration']\n",
" try: \n",
" newtheme=create_theme (targetsession,THEMEID, Name,BaseThemeId,Configuration)\n",
" newthemeslist.append(newtheme)\n",
" except Exception as e:\n",
" #print('failed: '+str(e))\n",
" faillist.append({\"ThemeID\": THEMEID, \"Name\": Name, \"Error\": str(e)})\n",
" continue\n",
" try:\n",
" update_theme_permissions(targetsession, THEMEID, targetadmin)\n",
" except Exception as e:\n",
" #print('failed: '+str(e))\n",
" faillist.append({\"ThemeID\": THEMEID, \"Name\": Name, \"Error\": str(e)})\n",
" continue"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#print fail informations\n",
"with open(faillocation+now+'Themes_Creation_Error.json', \"w\") as f:\n",
" json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n",
"\n",
"successfulls=[]\n",
"for news in newthemeslist:\n",
" theme=describe_theme(targetsession, news['ThemeId'])\n",
" successfulls.append(theme['Theme']['ThemeId'])\n",
" \n",
"with open(successlocation+now+'Themes_Creation_Success.json', \"w\") as f:\n",
" json.dump(successfulls, f, indent=4, sort_keys=True, default=str)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Get analysis"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"sourceanalysis_list_complete=analysis(sourcesession)\n",
"sourceanalysis_list=[]\n",
"for a in sourceanalysis_list_complete:\n",
" if a[\"Name\"] in analysis_deploy_list:\n",
" sourceanalysis_list.append(a)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Deploy Analysis"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"sourceanalysis_all=[]\n",
"for i in sourceanalysis_list:\n",
" if i['Status']!= 'DELETED':\n",
" sourceanalysis_all.append(i)\n",
"\n",
"success=[]\n",
"faillist=[]\n",
"sts_client = targetsession.client(\"sts\")\n",
"account_id = sts_client.get_caller_identity()[\"Account\"]\n",
"for i in sourceanalysis_all:\n",
" sourceanalysis=describe_analysis(sourcesession, i['AnalysisId'])\n",
" sourceanalysisid=sourceanalysis['Analysis']['AnalysisId']\n",
" sourceanalysisArn=sourceanalysis['Analysis']['Arn']\n",
" sourceanalysisname=sourceanalysis['Analysis']['Name']\n",
" DataSetArns=sourceanalysis['Analysis']['DataSetArns']\n",
" sourcetid=sourceanalysisid\n",
" sourcetname=sourceanalysisname\n",
" targettid=sourcetid\n",
" targettname=sourceanalysisname\n",
" \n",
" TargetThemeArn=''\n",
" if 'ThemeArn' in sourceanalysis['Analysis'].keys():\n",
" SourceThemeArn=sourceanalysis['Analysis']['ThemeArn']\n",
" TargetThemeArn = 'arn:aws:quicksight:'+region+':'+account_id+':theme/'+sourceanalysis['Analysis']['ThemeArn'].split(\"/\")[1]\n",
"\n",
" sourcedsref = []\n",
" for i in DataSetArns:\n",
" missing=False\n",
" did = i.split(\"/\")[1]\n",
" try:\n",
" dname=get_dataset_name(did, sourcesession)\n",
" except Exception as e:\n",
" faillist.append({\"Error Type\": \"Dataset: \"+did+\" is missing!\",\"sourceanalysisid\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n",
" missing=True\n",
" break\n",
" \n",
" sourcedsref.append({'DataSetPlaceholder': dname,\n",
" 'DataSetArn': i})\n",
" if missing: continue\n",
" try:\n",
" sourcetemplate = create_template(sourcesession, sourcetid, sourcetname, sourcedsref, sourceanalysisArn, '1')\n",
" sourcetemplate=describe_template(sourcesession,sourcetid)\n",
" except Exception as e:\n",
" faillist.append({\"Error Type\": \"Create Source Template Error\",\"sourceanalysisid\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n",
"\n",
" continue\n",
" \n",
" while sourcetemplate['Template']['Version']['Status']==\"CREATION_IN_PROGRESS\":\n",
" time.sleep(5)\n",
" sourcetemplate=describe_template(sourcesession,sourcetid)\n",
" if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n",
" try:\n",
" updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n",
" except Exception as e:\n",
" delete_template(sourcesession, sourcetid)\n",
" faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n",
" \"sourceanalysisid\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" else: \n",
" if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n",
" try:\n",
" updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n",
" except Exception as e:\n",
" delete_template(sourcesession, sourcetid)\n",
" faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n",
" \"sourceanalysisid\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" continue \n",
"\n",
" ds=data_sets (targetsession)\n",
" Template=sourcetemplate['Template']\n",
" dsref=[]\n",
" \n",
" missing=False\n",
" for i in Template['Version']['DataSetConfigurations']:\n",
" #print(i)\n",
" n=Template['Version']['DataSetConfigurations'].index(i)\n",
" #print(n)\n",
" for j in ds:\n",
" if i['Placeholder']==j['Name']:\n",
" dsref.append({\n",
" 'DataSetPlaceholder': i['Placeholder'],\n",
" 'DataSetArn': j['Arn']\n",
" })\n",
" if n>len(dsref): \n",
" e=\"Dataset \"+i['Placeholder']+\"is missing!\"\n",
" faillist.append({\"Error Type\": \"Datasets in target env are missing for this analysis\",\n",
" \"sourceanalysisid\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" missing=True\n",
" break\n",
" if missing: break\n",
" if missing: continue\n",
" \n",
"##working\n",
" SourceEntity={\n",
" 'SourceTemplate': {\n",
" 'DataSetReferences': dsref,\n",
" 'Arn': Template['Arn']\n",
" }\n",
" }\n",
" \n",
" #print(SourceEntity)\n",
" analysis=describe_analysis(targetsession, targettid)\n",
" if 'Faild to describe analysis:' in analysis or analysis['Analysis']['Status']=='DELETED':\n",
" if 'analysis/'+targettid+' is not found' in analysis or analysis['Analysis']['Status']=='DELETED':\n",
" print(\"Create new anlaysis now:\")\n",
" try:\n",
" newanalysis=create_analysis(targetsession, targettid, targettname,targetadmin,SourceEntity,TargetThemeArn)\n",
" except Exception as e:\n",
" delete_template(sourcesession, targettid)\n",
" faillist.append({\"Error Type\": \"Create New Analysis Error\",\n",
" \"AnalysisID\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(e)}) \n",
" continue\n",
" else:\n",
" faillist.append({\"Error Type\": \"Describe Target Analysis Error\",\n",
" \"AnalysisID\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(analysis)}) \n",
" continue\n",
" elif analysis['Analysis']['Status']==\"CREATION_FAILED\":\n",
" res=delete_analysis(sourcesession, targettid)\n",
" try:\n",
" newanalysis=create_analysis(targetsession, targettid, targettname,targetadmin, SourceEntity,TargetThemeArn)\n",
" except Exception as e:\n",
" delete_template(sourcesession, targettid)\n",
" faillist.append({\"Error Type\": \"Create Analysis Error\",\n",
" \"AnalysisID\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(e)})\n",
" continue\n",
" \n",
" else:\n",
" print(\"analysis is existing. update it now.\")\n",
" try:\n",
" newanalysis=update_analysis(targetsession, targettid, targettname, SourceEntity,TargetThemeArn)\n",
" except Exception as e:\n",
" delete_template(sourcesession, targettid)\n",
" faillist.append({\"Error Type\": \"Update Analysis Error\",\n",
" \"AnalysisID\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(e)})\n",
" continue\n",
" time.sleep(20)\n",
" res=describe_analysis(targetsession,newanalysis['AnalysisId'])\n",
" if res['Status']==200:\n",
" status=res['Analysis']['Status']\n",
" if status=='CREATION_SUCCESSFUL' or status=='UPDATE_SUCCESSFUL':\n",
" success.append(res['Analysis'])\n",
" #filename=\"Deployment_Results/Successful/Analysis_\"+res['Analysis']['Name']+\".json\"\n",
" else:\n",
" faillist.append({\"Error Type\": \"Analysis Creation Status is not Successful\", \"Analysis\": res['Analysis']})\n",
" #filename=\"Deployment_Results/Fail/Analysis_\"+res['Analysis']['Name']+\".json\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"with open(faillocation+now+'Analysis_Error.json', \"w\") as f:\n",
" json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n",
"\n",
"with open(successlocation+now+'Analysis_Success.json', \"w\") as f:\n",
" json.dump(success, f, indent=4, sort_keys=True, default=str)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Get dashboards list"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#source account\n",
"sourceaccountid=\"\"\n",
"role_name=\"\"\n",
"aws_region='us-east-1'\n",
"sourcesession = _assume_role(sourceaccountid, role_name, aws_region)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#target account\n",
"targetaccountid=\"\"\n",
"role_name=\"\"\n",
"aws_region='us-east-1'\n",
"targetsession = _assume_role(targetaccountid, role_name, aws_region)"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.code.notebook.error": {
"message": "o.endsWith is not a function",
"name": "TypeError",
"stack": "TypeError: o.endsWith is not a function\n\tat s (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:142510)\n\tat u (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:143160)\n\tat f (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:143903)\n\tat p (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:144653)\n\tat Array.map ()\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:145839\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:146165\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:146172\n\tat Array.map ()\n\tat e.jupyterNotebookModelToNotebookData (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:145397)\n\tat e.NotebookSerializer.deserializeNotebook (/Applications/Visual Studio Code.app/Contents/Resources/app/extensions/ipynb/dist/ipynbMain.js:1:147692)\n\tat _.$dataToNotebook (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:85:151702)\n\tat i._doInvokeHandler (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:13826)\n\tat i._invokeHandler (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:13510)\n\tat i._receiveRequest (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:12119)\n\tat i._receiveOneMessage (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:11044)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:89:8947\n\tat u.invoke (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:145)\n\tat v.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:1856)\n\tat d.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:19034)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:104:34275\n\tat u.invoke (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:145)\n\tat v.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:1856)\n\tat d.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:19034)\n\tat o._receiveMessage (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:23615)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:21149\n\tat u.invoke (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:145)\n\tat v.fire (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:58:1856)\n\tat v.acceptChunk (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:15865)\n\tat /Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:66:14995\n\tat Socket.P (/Applications/Visual Studio Code.app/Contents/Resources/app/out/vs/workbench/api/node/extensionHostProcess.js:104:13797)\n\tat Socket.emit (node:events:390:28)\n\tat Socket.emit (node:domain:475:12)\n\tat addChunk (node:internal/streams/readable:315:12)\n\tat readableAddChunk (node:internal/streams/readable:289:9)\n\tat Socket.Readable.push (node:internal/streams/readable:228:10)\n\tat Pipe.onStreamRead (node:internal/stream_base_commons:199:23)"
},
"text/plain": [
""
]
},
"execution_count": 24,
"metadata": {
"application/json": {
"expanded": false,
"root": "root"
}
},
"output_type": "execute_result"
}
],
"source": [
"sourcedashboards=dashboards(sourcesession)\n",
"\n",
"#Get id of datasets deployment list\n",
"deployment_list=[]\n",
"for newset in dashboard_deploy_list:\n",
" ids = get_dashboard_ids(newset, sourcesession)\n",
" for dashboard in sourcedashboards:\n",
" if ids[0] == dashboard[\"DashboardId\"]:\n",
" deployment_list.append(dashboard)\n",
"\n",
"JSON(deployment_list)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Deploy dashboards"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Template is successful copied!\n",
"quicksight_access_last_24h\n",
"Create new dashboard now:\n",
"Template is successful copied!\n",
"patient_info\n",
"Create new dashboard now:\n"
]
}
],
"source": [
"success=[]\n",
"faillist=[]\n",
"for dashboard in deployment_list:\n",
" sourcedashboard=describe_dashboard(sourcesession, dashboard['DashboardId'])\n",
" SourceEntityArn=sourcedashboard['Dashboard']['Version']['SourceEntityArn']\n",
" if SourceEntityArn.split(\"/\")[0].split(\":\")[-1]==\"analysis\":\n",
" sourceanalysis=sourcedashboard['Dashboard']['Version']['SourceEntityArn']\n",
" else: \n",
" faillist.append({\"Error Type\": \"Source Analysis is missing!\",\"DashboardId\": sourcetid, \"Name\": sourcetname, \"Error\": \"Source Analysis is missing!\"})\n",
" continue\n",
" sourceversion=sourcedashboard['Dashboard']['Version']['VersionNumber']\n",
" sourcedid=sourcedashboard['Dashboard']['DashboardId']\n",
" sourcedname=sourcedashboard['Dashboard']['Name']\n",
" sourcetid=sourcedid\n",
" sourcetname=sourcedname\n",
" targettid=sourcetid\n",
" targettname=sourcedname\n",
" DataSetArns=sourcedashboard['Dashboard']['Version']['DataSetArns']\n",
" TargetThemeArn=''\n",
" if 'ThemeArn' in sourcedashboard['Dashboard']['Version'].keys():\n",
" SourceThemearn=sourcedashboard['Dashboard']['Version']['ThemeArn']\n",
" TargetThemeArn = 'arn:aws:quicksight:'+region+':'+account_id+':theme/'+SourceThemearn.split(\"/\")[1]\n",
" sourcedsref = []\n",
" #print(sourcedname)\n",
" for i in DataSetArns:\n",
" missing=False\n",
" did = i.split(\"/\")[1]\n",
" try:\n",
" dname=get_dataset_name(did, sourcesession)\n",
" except Exception as e:\n",
" faillist.append({\"Error Type\": \"Dataset: \"+did+\" is missing!\",\"DashboardId\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n",
" missing=True\n",
" break\n",
" \n",
" sourcedsref.append({'DataSetPlaceholder': dname,\n",
" 'DataSetArn': i})\n",
" if missing: continue\n",
" try:\n",
" sourcetemplate = create_template(sourcesession, sourcetid, sourcetname, sourcedsref, sourceanalysis, '1')\n",
" sourcetemplate=describe_template(sourcesession,sourcetid)\n",
" except Exception as e:\n",
" faillist.append({\"Error Type\": \"Create Source Template Error\",\"DashboardId\": sourcetid, \"Name\": sourcetname, \"Error\": str(e)})\n",
" continue\n",
" \n",
" while sourcetemplate['Template']['Version']['Status']==\"CREATION_IN_PROGRESS\":\n",
" time.sleep(5)\n",
" sourcetemplate=describe_template(sourcesession,sourcetid)\n",
" if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n",
" try:\n",
" updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n",
" except Exception as e:\n",
" delete_template(sourcesession, sourcetid)\n",
" faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n",
" \"DashboardId\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" else: \n",
" if sourcetemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n",
" try:\n",
" updateres=update_template_permission(sourcesession, sourcetid, targetroot)\n",
" except Exception as e:\n",
" delete_template(sourcesession, sourcetid)\n",
" faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n",
" \"DashboardId\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" continue\n",
"\n",
" if updateres['Status']==200:\n",
" try:\n",
" targettemplate=copy_template(targetsession, targettid, targettname, updateres['TemplateArn'])\n",
" except Exception as e:\n",
" delete_template(sourcesession, sourcetid)\n",
" faillist.append({\"Error Type\": \"Copy Template Error\",\n",
" \"DashboardId\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)}) \n",
" continue\n",
" else: \n",
" delete_template(sourcesession, sourcetid)\n",
" faillist.append({\"Error Type\": \"Update Source Template Permission Error\",\n",
" \"DashboardId\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" continue\n",
" \n",
" targettemplate=describe_template(targetsession,targettid)\n",
" \n",
" while targettemplate['Template']['Version']['Status']==\"CREATION_IN_PROGRESS\":\n",
" time.sleep(5)\n",
" targettemplate=describe_template(targetsession,targettid)\n",
" if targettemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n",
" break\n",
" else: \n",
" if targettemplate['Template']['Version']['Status']==\"CREATION_SUCCESSFUL\":\n",
" print(\"Template is successful copied!\")\n",
" else: \n",
" delete_template(targetsession, targettid)\n",
" faillist.append({\"Error Type\": \"Copy Template Error\",\n",
" \"DashboardId\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" continue\n",
" \n",
" ds=data_sets (targetsession)\n",
" Template=targettemplate['Template']\n",
" dsref=[]\n",
" #print(Template['Version']['DataSetConfigurations'])\n",
" missing=False\n",
" for i in Template['Version']['DataSetConfigurations']:\n",
" #print(\"i is \"+str(i))\n",
" n=Template['Version']['DataSetConfigurations'].index(i)\n",
" #print(\"n is \"+str(n))\n",
" for j in ds:\n",
" if i['Placeholder']==j['Name']:\n",
" print(j['Name'])\n",
" dsref.append({\n",
" 'DataSetPlaceholder': i['Placeholder'],\n",
" 'DataSetArn': j['Arn']\n",
" })\n",
" break\n",
" print(\"len of dsref is \"+str(len(dsref)))\n",
" print(dsref)\n",
" if (n+1)>len(dsref): \n",
" e=\"Dataset \"+i['Placeholder']+\" is missing!\"\n",
" faillist.append({\"Error Type\": \"Datasets in target env are missing for this dashboard\",\n",
" \"DashboardId\": sourcetid, \n",
" \"Name\": sourcetname, \n",
" \"Error\": str(e)})\n",
" missing=True\n",
" break\n",
" if missing: break\n",
" if missing: continue\n",
" #print(\"len of dsref is \"+str(len(dsref)))\n",
" #print(dsref) \n",
" SourceEntity={\n",
" 'SourceTemplate': {\n",
" 'DataSetReferences': dsref,\n",
" 'Arn': Template['Arn']\n",
" }\n",
" }\n",
" #print(SourceEntity)\n",
" dashboard=describe_dashboard(targetsession, targettid)\n",
"\n",
" if 'Faild to describe dashboard:' in dashboard:\n",
" if 'dashboard/'+targettid+' is not found' in dashboard:\n",
" print(\"Create new dashboard now:\")\n",
" try:\n",
" newdashboard=create_dashboard(targetsession, targettid, targettname,targetadmin, SourceEntity, '1',TargetThemeArn, filter='ENABLED',csv='ENABLED', sheetcontrol='COLLAPSED')\n",
" except Exception as e:\n",
" delete_template(targetsession, targettid)\n",
" faillist.append({\"Error Type\": \"Create New Dashboard Error\",\n",
" \"DashboardId\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(e)}) \n",
" continue\n",
" else: \n",
" faillist.append({\"Error Type\": \"Describe Target Dashboard Error\",\n",
" \"DashboardId\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(dashboard)}) \n",
" continue\n",
" elif dashboard['Dashboard']['Version']['Status']==\"CREATION_FAILED\":\n",
" res=delete_dashboard(targetsession, targettid)\n",
" try:\n",
" newdashboard=create_dashboard(targetsession, targettid, targettname,targetadmin, SourceEntity, '1',TargetThemeArn, filter='ENABLED',csv='ENABLED', sheetcontrol='COLLAPSED')\n",
" except Exception as e:\n",
" delete_template(targetsession, targettid)\n",
" faillist.append({\"Error Type\": \"Create Dashboard Error\",\n",
" \"DashboardId\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(e)})\n",
" continue\n",
" \n",
" else:\n",
" print(\"dashboard is existing. update it now.\")\n",
" try:\n",
" res=delete_dashboard(targetsession, targettid)\n",
" newdashboard=create_dashboard(targetsession, targettid, targettname,targetadmin, SourceEntity, '1',TargetThemeArn, filter='ENABLED',csv='ENABLED', sheetcontrol='COLLAPSED')\n",
" except Exception as e:\n",
" delete_template(targetsession, targettid)\n",
" faillist.append({\"Error Type\": \"Create Dashboard Error\",\n",
" \"DashboardId\": targettid, \n",
" \"Name\": targettname, \n",
" \"Error\": str(e)})\n",
" continue\n",
"\n",
" res=describe_dashboard(targetsession,newdashboard['DashboardId'])\n",
" \n",
" if res['Status']==200:\n",
" status=res['Dashboard']['Version']['Status']\n",
" if status=='CREATION_SUCCESSFUL' or status=='UPDATE_SUCCESSFUL':\n",
" success.append(res['Dashboard'])\n",
" #filename=\"Deployment_Results/Successful/Dashboard_\"+res['Dashboard']['Name']+\".json\"\n",
" else:\n",
" faillist.append({\"Error Type\": \"Dashboard Creation Status is not Successful\", \"Dashboard\": res['Dashboard']})\n",
"\n",
" #filename=\"Deployment_Results/Fail/Dashboard_\"+res['Dashboard']['Name']+\".json\""
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"with open(faillocation+now+'Dashboard_Error.json', \"w\") as f:\n",
" json.dump(faillist, f, indent=4, sort_keys=True, default=str)\n",
"\n",
"with open(successlocation+now+'Dashboard_Success.json', \"w\") as f:\n",
" json.dump(success, f, indent=4, sort_keys=True, default=str)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Delete objects"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# THIS WILL DELETE ALL TARGET DATASETS\n",
"\n",
"delete = \"template\"\n",
"\n",
"if delete == \"datasource\": \n",
" for datasource in data_sources(targetsession):\n",
" #if datasource['Type'] == \"REDSHIFT\":\n",
" try:\n",
" delete_source (targetsession, datasource['DataSourceId'])\n",
" except Exception: pass \n",
"elif delete == \"dataset\":\n",
" for dataset in data_sets(targetsession):\n",
" delete_dataset (targetsession, dataset['DataSetId'])\n",
"elif delete == \"template\": \n",
" for template in templates(targetsession):\n",
" delete_template(targetsession, template['TemplateId'])\n",
"elif delete == \"analysis\":\n",
" for analysis in analysis(targetsession): delete_analysis(targetsession, analysis['AnalysisId'])\n",
" \n",
"elif delete == \"dashboard\": \n",
" for dashboard in dashboards(targetsession):\n",
" delete_dashboard(targetsession, dashboard['DashboardId'])\n",
"delete =\"don't delete anything\""
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Schedule notebooks to execute"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"https://aws.amazon.com/blogs/machine-learning/scheduling-jupyter-notebooks-on-sagemaker-ephemeral-instances/"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.0"
},
"toc-showcode": false,
"toc-showmarkdowntxt": true
},
"nbformat": 4,
"nbformat_minor": 4
}