# Firehose Settings deliveryStreamName = mydeliveryStream firehoseRegion = us-east-1 # Provide the details of the Redshift Destination. clusterJDBCUrl = username = password = dataTableName = copyOptions = json 'auto' # Optional data columns field #dataTableColumns = # Please fill in the name of Amazon S3 bucket you'd like to use before running the sample. # If you already created the bucket manually using the aws management console, leave createS3Bucket as false, # Otherwise set it true, so that sample creates the bucket. For Amazon Redshift destination, your streaming data # is delivered to your S3 bucket first. Firehose then issues an Amazon Redshift COPY command to load data from # your S3 bucket to your Amazon Redshift cluster. createS3Bucket = false s3BucketName = s3RegionName = us-east-1 s3ObjectPrefix = mydeliverystream/ # IAM role name required for firehose create/update. If the role already exists, Make sure the role policy # is updated with specified bucket name, and also s3 prefix in KMS Condition for data delivery (Applicable # only if KMS Key ARN is provided). iamRegion = us-east-1 iamRoleName = firehose_delivery # Please fill the account id which will be used in 'sts:ExternalId' condition of trust policy document # of role customerAccountId = # Batching parameters where batch sizes can range from 1 MBs to 128 MBs and intervals # can range from 60 seconds to 900 seconds destinationSizeInMBs = 1 destinationIntervalInSeconds = 60 # Options used in Update Destination operation. After updating the destination, records will be delivered in # new prefix location. # If you like to execute the updateDestination operation, specify updateDestination as true. updateDestination = false updatedCopyOptions =