/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include Provides information that describes an Apache Kafka endpoint. This
* information includes the output format of records applied to the endpoint and
* details of transaction and control table data information.See
* Also:
AWS
* API Reference
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
A comma-separated list of one or more broker locations in your Kafka cluster
* that host your Kafka instance. Specify each broker location in the form
* broker-hostname-or-ip:port
. For example,
* "ec2-12-345-678-901.compute-1.amazonaws.com:2345"
. For more
* information and examples of specifying a list of broker locations, see Using
* Apache Kafka as a target for Database Migration Service in the Database
* Migration Service User Guide.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The topic to which you migrate the data. If you don't specify a topic, DMS
* specifies "kafka-default-topic"
as the migration topic.
The output format for the records created on the endpoint. The message format
* is JSON
(default) or JSON_UNFORMATTED
(a single line
* with no tab).
The output format for the records created on the endpoint. The message format
* is JSON
(default) or JSON_UNFORMATTED
(a single line
* with no tab).
The output format for the records created on the endpoint. The message format
* is JSON
(default) or JSON_UNFORMATTED
(a single line
* with no tab).
The output format for the records created on the endpoint. The message format
* is JSON
(default) or JSON_UNFORMATTED
(a single line
* with no tab).
The output format for the records created on the endpoint. The message format
* is JSON
(default) or JSON_UNFORMATTED
(a single line
* with no tab).
The output format for the records created on the endpoint. The message format
* is JSON
(default) or JSON_UNFORMATTED
(a single line
* with no tab).
Provides detailed transaction information from the source database. This
* information includes a commit timestamp, a log position, and values for
* transaction_id
, previous transaction_id
, and
* transaction_record_id
(the record offset within a transaction). The
* default is false
.
Provides detailed transaction information from the source database. This
* information includes a commit timestamp, a log position, and values for
* transaction_id
, previous transaction_id
, and
* transaction_record_id
(the record offset within a transaction). The
* default is false
.
Provides detailed transaction information from the source database. This
* information includes a commit timestamp, a log position, and values for
* transaction_id
, previous transaction_id
, and
* transaction_record_id
(the record offset within a transaction). The
* default is false
.
Provides detailed transaction information from the source database. This
* information includes a commit timestamp, a log position, and values for
* transaction_id
, previous transaction_id
, and
* transaction_record_id
(the record offset within a transaction). The
* default is false
.
Shows the partition value within the Kafka message output unless the
* partition type is schema-table-type
. The default is
* false
.
Shows the partition value within the Kafka message output unless the
* partition type is schema-table-type
. The default is
* false
.
Shows the partition value within the Kafka message output unless the
* partition type is schema-table-type
. The default is
* false
.
Shows the partition value within the Kafka message output unless the
* partition type is schema-table-type
. The default is
* false
.
Prefixes schema and table names to partition values, when the partition type
* is primary-key-type
. Doing this increases data distribution among
* Kafka partitions. For example, suppose that a SysBench schema has thousands of
* tables and each table has only limited range for a primary key. In this case,
* the same primary key is sent from thousands of tables to the same partition,
* which causes throttling. The default is false
.
Prefixes schema and table names to partition values, when the partition type
* is primary-key-type
. Doing this increases data distribution among
* Kafka partitions. For example, suppose that a SysBench schema has thousands of
* tables and each table has only limited range for a primary key. In this case,
* the same primary key is sent from thousands of tables to the same partition,
* which causes throttling. The default is false
.
Prefixes schema and table names to partition values, when the partition type
* is primary-key-type
. Doing this increases data distribution among
* Kafka partitions. For example, suppose that a SysBench schema has thousands of
* tables and each table has only limited range for a primary key. In this case,
* the same primary key is sent from thousands of tables to the same partition,
* which causes throttling. The default is false
.
Prefixes schema and table names to partition values, when the partition type
* is primary-key-type
. Doing this increases data distribution among
* Kafka partitions. For example, suppose that a SysBench schema has thousands of
* tables and each table has only limited range for a primary key. In this case,
* the same primary key is sent from thousands of tables to the same partition,
* which causes throttling. The default is false
.
Includes any data definition language (DDL) operations that change the table
* in the control data, such as rename-table
, drop-table
,
* add-column
, drop-column
, and
* rename-column
. The default is false
.
Includes any data definition language (DDL) operations that change the table
* in the control data, such as rename-table
, drop-table
,
* add-column
, drop-column
, and
* rename-column
. The default is false
.
Includes any data definition language (DDL) operations that change the table
* in the control data, such as rename-table
, drop-table
,
* add-column
, drop-column
, and
* rename-column
. The default is false
.
Includes any data definition language (DDL) operations that change the table
* in the control data, such as rename-table
, drop-table
,
* add-column
, drop-column
, and
* rename-column
. The default is false
.
Shows detailed control information for table definition, column definition,
* and table and column changes in the Kafka message output. The default is
* false
.
Shows detailed control information for table definition, column definition,
* and table and column changes in the Kafka message output. The default is
* false
.
Shows detailed control information for table definition, column definition,
* and table and column changes in the Kafka message output. The default is
* false
.
Shows detailed control information for table definition, column definition,
* and table and column changes in the Kafka message output. The default is
* false
.
The maximum size in bytes for records created on the endpoint The default is * 1,000,000.
*/ inline int GetMessageMaxBytes() const{ return m_messageMaxBytes; } /** *The maximum size in bytes for records created on the endpoint The default is * 1,000,000.
*/ inline bool MessageMaxBytesHasBeenSet() const { return m_messageMaxBytesHasBeenSet; } /** *The maximum size in bytes for records created on the endpoint The default is * 1,000,000.
*/ inline void SetMessageMaxBytes(int value) { m_messageMaxBytesHasBeenSet = true; m_messageMaxBytes = value; } /** *The maximum size in bytes for records created on the endpoint The default is * 1,000,000.
*/ inline KafkaSettings& WithMessageMaxBytes(int value) { SetMessageMaxBytes(value); return *this;} /** *Include NULL and empty columns for records migrated to the endpoint. The
* default is false
.
Include NULL and empty columns for records migrated to the endpoint. The
* default is false
.
Include NULL and empty columns for records migrated to the endpoint. The
* default is false
.
Include NULL and empty columns for records migrated to the endpoint. The
* default is false
.
Set secure connection to a Kafka target endpoint using Transport Layer
* Security (TLS). Options include ssl-encryption
,
* ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and
* SaslPassword
.
Set secure connection to a Kafka target endpoint using Transport Layer
* Security (TLS). Options include ssl-encryption
,
* ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and
* SaslPassword
.
Set secure connection to a Kafka target endpoint using Transport Layer
* Security (TLS). Options include ssl-encryption
,
* ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and
* SaslPassword
.
Set secure connection to a Kafka target endpoint using Transport Layer
* Security (TLS). Options include ssl-encryption
,
* ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and
* SaslPassword
.
Set secure connection to a Kafka target endpoint using Transport Layer
* Security (TLS). Options include ssl-encryption
,
* ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and
* SaslPassword
.
Set secure connection to a Kafka target endpoint using Transport Layer
* Security (TLS). Options include ssl-encryption
,
* ssl-authentication
, and sasl-ssl
.
* sasl-ssl
requires SaslUsername
and
* SaslPassword
.
The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline const Aws::String& GetSslClientCertificateArn() const{ return m_sslClientCertificateArn; } /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline bool SslClientCertificateArnHasBeenSet() const { return m_sslClientCertificateArnHasBeenSet; } /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline void SetSslClientCertificateArn(const Aws::String& value) { m_sslClientCertificateArnHasBeenSet = true; m_sslClientCertificateArn = value; } /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline void SetSslClientCertificateArn(Aws::String&& value) { m_sslClientCertificateArnHasBeenSet = true; m_sslClientCertificateArn = std::move(value); } /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline void SetSslClientCertificateArn(const char* value) { m_sslClientCertificateArnHasBeenSet = true; m_sslClientCertificateArn.assign(value); } /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline KafkaSettings& WithSslClientCertificateArn(const Aws::String& value) { SetSslClientCertificateArn(value); return *this;} /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline KafkaSettings& WithSslClientCertificateArn(Aws::String&& value) { SetSslClientCertificateArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) of the client certificate used to securely * connect to a Kafka target endpoint.
*/ inline KafkaSettings& WithSslClientCertificateArn(const char* value) { SetSslClientCertificateArn(value); return *this;} /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline const Aws::String& GetSslClientKeyArn() const{ return m_sslClientKeyArn; } /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline bool SslClientKeyArnHasBeenSet() const { return m_sslClientKeyArnHasBeenSet; } /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline void SetSslClientKeyArn(const Aws::String& value) { m_sslClientKeyArnHasBeenSet = true; m_sslClientKeyArn = value; } /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline void SetSslClientKeyArn(Aws::String&& value) { m_sslClientKeyArnHasBeenSet = true; m_sslClientKeyArn = std::move(value); } /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline void SetSslClientKeyArn(const char* value) { m_sslClientKeyArnHasBeenSet = true; m_sslClientKeyArn.assign(value); } /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline KafkaSettings& WithSslClientKeyArn(const Aws::String& value) { SetSslClientKeyArn(value); return *this;} /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline KafkaSettings& WithSslClientKeyArn(Aws::String&& value) { SetSslClientKeyArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) for the client private key used to securely * connect to a Kafka target endpoint.
*/ inline KafkaSettings& WithSslClientKeyArn(const char* value) { SetSslClientKeyArn(value); return *this;} /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline const Aws::String& GetSslClientKeyPassword() const{ return m_sslClientKeyPassword; } /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline bool SslClientKeyPasswordHasBeenSet() const { return m_sslClientKeyPasswordHasBeenSet; } /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline void SetSslClientKeyPassword(const Aws::String& value) { m_sslClientKeyPasswordHasBeenSet = true; m_sslClientKeyPassword = value; } /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline void SetSslClientKeyPassword(Aws::String&& value) { m_sslClientKeyPasswordHasBeenSet = true; m_sslClientKeyPassword = std::move(value); } /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline void SetSslClientKeyPassword(const char* value) { m_sslClientKeyPasswordHasBeenSet = true; m_sslClientKeyPassword.assign(value); } /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline KafkaSettings& WithSslClientKeyPassword(const Aws::String& value) { SetSslClientKeyPassword(value); return *this;} /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline KafkaSettings& WithSslClientKeyPassword(Aws::String&& value) { SetSslClientKeyPassword(std::move(value)); return *this;} /** *The password for the client private key used to securely connect to a Kafka * target endpoint.
*/ inline KafkaSettings& WithSslClientKeyPassword(const char* value) { SetSslClientKeyPassword(value); return *this;} /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline const Aws::String& GetSslCaCertificateArn() const{ return m_sslCaCertificateArn; } /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline bool SslCaCertificateArnHasBeenSet() const { return m_sslCaCertificateArnHasBeenSet; } /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline void SetSslCaCertificateArn(const Aws::String& value) { m_sslCaCertificateArnHasBeenSet = true; m_sslCaCertificateArn = value; } /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline void SetSslCaCertificateArn(Aws::String&& value) { m_sslCaCertificateArnHasBeenSet = true; m_sslCaCertificateArn = std::move(value); } /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline void SetSslCaCertificateArn(const char* value) { m_sslCaCertificateArnHasBeenSet = true; m_sslCaCertificateArn.assign(value); } /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline KafkaSettings& WithSslCaCertificateArn(const Aws::String& value) { SetSslCaCertificateArn(value); return *this;} /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline KafkaSettings& WithSslCaCertificateArn(Aws::String&& value) { SetSslCaCertificateArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) for the private certificate authority (CA) * cert that DMS uses to securely connect to your Kafka target endpoint.
*/ inline KafkaSettings& WithSslCaCertificateArn(const char* value) { SetSslCaCertificateArn(value); return *this;} /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline const Aws::String& GetSaslUsername() const{ return m_saslUsername; } /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline bool SaslUsernameHasBeenSet() const { return m_saslUsernameHasBeenSet; } /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline void SetSaslUsername(const Aws::String& value) { m_saslUsernameHasBeenSet = true; m_saslUsername = value; } /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline void SetSaslUsername(Aws::String&& value) { m_saslUsernameHasBeenSet = true; m_saslUsername = std::move(value); } /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline void SetSaslUsername(const char* value) { m_saslUsernameHasBeenSet = true; m_saslUsername.assign(value); } /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline KafkaSettings& WithSaslUsername(const Aws::String& value) { SetSaslUsername(value); return *this;} /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline KafkaSettings& WithSaslUsername(Aws::String&& value) { SetSaslUsername(std::move(value)); return *this;} /** *The secure user name you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline KafkaSettings& WithSaslUsername(const char* value) { SetSaslUsername(value); return *this;} /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline const Aws::String& GetSaslPassword() const{ return m_saslPassword; } /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline bool SaslPasswordHasBeenSet() const { return m_saslPasswordHasBeenSet; } /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline void SetSaslPassword(const Aws::String& value) { m_saslPasswordHasBeenSet = true; m_saslPassword = value; } /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline void SetSaslPassword(Aws::String&& value) { m_saslPasswordHasBeenSet = true; m_saslPassword = std::move(value); } /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline void SetSaslPassword(const char* value) { m_saslPasswordHasBeenSet = true; m_saslPassword.assign(value); } /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline KafkaSettings& WithSaslPassword(const Aws::String& value) { SetSaslPassword(value); return *this;} /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline KafkaSettings& WithSaslPassword(Aws::String&& value) { SetSaslPassword(std::move(value)); return *this;} /** *The secure password you created when you first set up your MSK cluster to * validate a client identity and make an encrypted connection between server and * client using SASL-SSL authentication.
*/ inline KafkaSettings& WithSaslPassword(const char* value) { SetSaslPassword(value); return *this;} /** *Set this optional parameter to true
to avoid adding a '0x'
* prefix to raw data in hexadecimal format. For example, by default, DMS adds a
* '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle
* source to a Kafka target. Use the NoHexPrefix
endpoint setting to
* enable migration of RAW data type columns without adding the '0x' prefix.
Set this optional parameter to true
to avoid adding a '0x'
* prefix to raw data in hexadecimal format. For example, by default, DMS adds a
* '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle
* source to a Kafka target. Use the NoHexPrefix
endpoint setting to
* enable migration of RAW data type columns without adding the '0x' prefix.
Set this optional parameter to true
to avoid adding a '0x'
* prefix to raw data in hexadecimal format. For example, by default, DMS adds a
* '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle
* source to a Kafka target. Use the NoHexPrefix
endpoint setting to
* enable migration of RAW data type columns without adding the '0x' prefix.
Set this optional parameter to true
to avoid adding a '0x'
* prefix to raw data in hexadecimal format. For example, by default, DMS adds a
* '0x' prefix to the LOB column type in hexadecimal format moving from an Oracle
* source to a Kafka target. Use the NoHexPrefix
endpoint setting to
* enable migration of RAW data type columns without adding the '0x' prefix.
For SASL/SSL authentication, DMS supports the SCRAM-SHA-512
* mechanism by default. DMS versions 3.5.0 and later also support the
* PLAIN
mechanism. To use the PLAIN
mechanism, set this
* parameter to PLAIN.
For SASL/SSL authentication, DMS supports the SCRAM-SHA-512
* mechanism by default. DMS versions 3.5.0 and later also support the
* PLAIN
mechanism. To use the PLAIN
mechanism, set this
* parameter to PLAIN.
For SASL/SSL authentication, DMS supports the SCRAM-SHA-512
* mechanism by default. DMS versions 3.5.0 and later also support the
* PLAIN
mechanism. To use the PLAIN
mechanism, set this
* parameter to PLAIN.
For SASL/SSL authentication, DMS supports the SCRAM-SHA-512
* mechanism by default. DMS versions 3.5.0 and later also support the
* PLAIN
mechanism. To use the PLAIN
mechanism, set this
* parameter to PLAIN.
For SASL/SSL authentication, DMS supports the SCRAM-SHA-512
* mechanism by default. DMS versions 3.5.0 and later also support the
* PLAIN
mechanism. To use the PLAIN
mechanism, set this
* parameter to PLAIN.
For SASL/SSL authentication, DMS supports the SCRAM-SHA-512
* mechanism by default. DMS versions 3.5.0 and later also support the
* PLAIN
mechanism. To use the PLAIN
mechanism, set this
* parameter to PLAIN.
Sets hostname verification for the certificate. This setting is supported in * DMS version 3.5.1 and later.
*/ inline const KafkaSslEndpointIdentificationAlgorithm& GetSslEndpointIdentificationAlgorithm() const{ return m_sslEndpointIdentificationAlgorithm; } /** *Sets hostname verification for the certificate. This setting is supported in * DMS version 3.5.1 and later.
*/ inline bool SslEndpointIdentificationAlgorithmHasBeenSet() const { return m_sslEndpointIdentificationAlgorithmHasBeenSet; } /** *Sets hostname verification for the certificate. This setting is supported in * DMS version 3.5.1 and later.
*/ inline void SetSslEndpointIdentificationAlgorithm(const KafkaSslEndpointIdentificationAlgorithm& value) { m_sslEndpointIdentificationAlgorithmHasBeenSet = true; m_sslEndpointIdentificationAlgorithm = value; } /** *Sets hostname verification for the certificate. This setting is supported in * DMS version 3.5.1 and later.
*/ inline void SetSslEndpointIdentificationAlgorithm(KafkaSslEndpointIdentificationAlgorithm&& value) { m_sslEndpointIdentificationAlgorithmHasBeenSet = true; m_sslEndpointIdentificationAlgorithm = std::move(value); } /** *Sets hostname verification for the certificate. This setting is supported in * DMS version 3.5.1 and later.
*/ inline KafkaSettings& WithSslEndpointIdentificationAlgorithm(const KafkaSslEndpointIdentificationAlgorithm& value) { SetSslEndpointIdentificationAlgorithm(value); return *this;} /** *Sets hostname verification for the certificate. This setting is supported in * DMS version 3.5.1 and later.
*/ inline KafkaSettings& WithSslEndpointIdentificationAlgorithm(KafkaSslEndpointIdentificationAlgorithm&& value) { SetSslEndpointIdentificationAlgorithm(std::move(value)); return *this;} private: Aws::String m_broker; bool m_brokerHasBeenSet = false; Aws::String m_topic; bool m_topicHasBeenSet = false; MessageFormatValue m_messageFormat; bool m_messageFormatHasBeenSet = false; bool m_includeTransactionDetails; bool m_includeTransactionDetailsHasBeenSet = false; bool m_includePartitionValue; bool m_includePartitionValueHasBeenSet = false; bool m_partitionIncludeSchemaTable; bool m_partitionIncludeSchemaTableHasBeenSet = false; bool m_includeTableAlterOperations; bool m_includeTableAlterOperationsHasBeenSet = false; bool m_includeControlDetails; bool m_includeControlDetailsHasBeenSet = false; int m_messageMaxBytes; bool m_messageMaxBytesHasBeenSet = false; bool m_includeNullAndEmpty; bool m_includeNullAndEmptyHasBeenSet = false; KafkaSecurityProtocol m_securityProtocol; bool m_securityProtocolHasBeenSet = false; Aws::String m_sslClientCertificateArn; bool m_sslClientCertificateArnHasBeenSet = false; Aws::String m_sslClientKeyArn; bool m_sslClientKeyArnHasBeenSet = false; Aws::String m_sslClientKeyPassword; bool m_sslClientKeyPasswordHasBeenSet = false; Aws::String m_sslCaCertificateArn; bool m_sslCaCertificateArnHasBeenSet = false; Aws::String m_saslUsername; bool m_saslUsernameHasBeenSet = false; Aws::String m_saslPassword; bool m_saslPasswordHasBeenSet = false; bool m_noHexPrefix; bool m_noHexPrefixHasBeenSet = false; KafkaSaslMechanism m_saslMechanism; bool m_saslMechanismHasBeenSet = false; KafkaSslEndpointIdentificationAlgorithm m_sslEndpointIdentificationAlgorithm; bool m_sslEndpointIdentificationAlgorithmHasBeenSet = false; }; } // namespace Model } // namespace DatabaseMigrationService } // namespace Aws