data "aws_region" "current" {} data "aws_caller_identity" "current" {} locals { arn_prefix = "arn:aws:kms:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:key" } resource "aws_iam_role" "glue" { name = "${local.name}-glue-service-role-${var.env}" assume_role_policy = < database} name = "${local.name}-glue-job-${each.value.index}-${var.env}" role_arn = aws_iam_role.glue.arn description = "Glue Job to transfer incremental Data from SQL Database to S3" command { script_location = "s3://${var.assets_bucket}/glue/gbq_incremental.py" } glue_version = var.glue_version number_of_workers = var.glue_number_of_workers worker_type = var.glue_worker_type timeout = var.glue_job_timeout default_arguments = { "--job-bookmark-option": "job-bookmark-enable", "--region_name" = data.aws_region.current.name, "--s3_path" = "s3://${var.s3_landing_bucket}/input/${var.s3_prefix_source}", "--parent_project" = each.value.parent_project "--dataset" = each.value.dataset "--secret_name" = aws_secretsmanager_secret.secrets_json.name "--additional-python-modules" = "google-cloud-bigquery==3.3.5" "--s3_bucket" = var.assets_bucket "--tables_file" = each.value.tables_file "--extra-py-files": "s3://${var.assets_bucket}/glue/gbq_incremental_lib.py" } connections = [aws_glue_connection.gbq_instance.name] } resource "aws_glue_connection" "gbq_instance" { name = "${local.name}-glue-connection-${var.env}" connection_type = "MARKETPLACE" connection_properties = { CONNECTOR_CLASS_NAME = var.connector_class CONNECTOR_URL = var.connector_url SECRET_ID = aws_secretsmanager_secret.secrets.id CONNECTOR_TYPE = "Spark" } }