/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include #include #include #include #include #include #include namespace Aws { namespace Utils { namespace Json { class JsonValue; class JsonView; } // namespace Json } // namespace Utils namespace ApplicationDiscoveryService { namespace Model { /** *

A list of continuous export descriptions.

See Also:

AWS * API Reference

*/ class ContinuousExportDescription { public: AWS_APPLICATIONDISCOVERYSERVICE_API ContinuousExportDescription(); AWS_APPLICATIONDISCOVERYSERVICE_API ContinuousExportDescription(Aws::Utils::Json::JsonView jsonValue); AWS_APPLICATIONDISCOVERYSERVICE_API ContinuousExportDescription& operator=(Aws::Utils::Json::JsonView jsonValue); AWS_APPLICATIONDISCOVERYSERVICE_API Aws::Utils::Json::JsonValue Jsonize() const; /** *

The unique ID assigned to this export.

*/ inline const Aws::String& GetExportId() const{ return m_exportId; } /** *

The unique ID assigned to this export.

*/ inline bool ExportIdHasBeenSet() const { return m_exportIdHasBeenSet; } /** *

The unique ID assigned to this export.

*/ inline void SetExportId(const Aws::String& value) { m_exportIdHasBeenSet = true; m_exportId = value; } /** *

The unique ID assigned to this export.

*/ inline void SetExportId(Aws::String&& value) { m_exportIdHasBeenSet = true; m_exportId = std::move(value); } /** *

The unique ID assigned to this export.

*/ inline void SetExportId(const char* value) { m_exportIdHasBeenSet = true; m_exportId.assign(value); } /** *

The unique ID assigned to this export.

*/ inline ContinuousExportDescription& WithExportId(const Aws::String& value) { SetExportId(value); return *this;} /** *

The unique ID assigned to this export.

*/ inline ContinuousExportDescription& WithExportId(Aws::String&& value) { SetExportId(std::move(value)); return *this;} /** *

The unique ID assigned to this export.

*/ inline ContinuousExportDescription& WithExportId(const char* value) { SetExportId(value); return *this;} /** *

Describes the status of the export. Can be one of the following values:

*
  • START_IN_PROGRESS - setting up resources to start continuous * export.

  • START_FAILED - an error occurred setting up continuous * export. To recover, call start-continuous-export again.

  • ACTIVE * - data is being exported to the customer bucket.

  • ERROR - an * error occurred during export. To fix the issue, call stop-continuous-export and * start-continuous-export.

  • STOP_IN_PROGRESS - stopping the * export.

  • STOP_FAILED - an error occurred stopping the export. * To recover, call stop-continuous-export again.

  • INACTIVE - the * continuous export has been stopped. Data is no longer being exported to the * customer bucket.

*/ inline const ContinuousExportStatus& GetStatus() const{ return m_status; } /** *

Describes the status of the export. Can be one of the following values:

*
  • START_IN_PROGRESS - setting up resources to start continuous * export.

  • START_FAILED - an error occurred setting up continuous * export. To recover, call start-continuous-export again.

  • ACTIVE * - data is being exported to the customer bucket.

  • ERROR - an * error occurred during export. To fix the issue, call stop-continuous-export and * start-continuous-export.

  • STOP_IN_PROGRESS - stopping the * export.

  • STOP_FAILED - an error occurred stopping the export. * To recover, call stop-continuous-export again.

  • INACTIVE - the * continuous export has been stopped. Data is no longer being exported to the * customer bucket.

*/ inline bool StatusHasBeenSet() const { return m_statusHasBeenSet; } /** *

Describes the status of the export. Can be one of the following values:

*
  • START_IN_PROGRESS - setting up resources to start continuous * export.

  • START_FAILED - an error occurred setting up continuous * export. To recover, call start-continuous-export again.

  • ACTIVE * - data is being exported to the customer bucket.

  • ERROR - an * error occurred during export. To fix the issue, call stop-continuous-export and * start-continuous-export.

  • STOP_IN_PROGRESS - stopping the * export.

  • STOP_FAILED - an error occurred stopping the export. * To recover, call stop-continuous-export again.

  • INACTIVE - the * continuous export has been stopped. Data is no longer being exported to the * customer bucket.

*/ inline void SetStatus(const ContinuousExportStatus& value) { m_statusHasBeenSet = true; m_status = value; } /** *

Describes the status of the export. Can be one of the following values:

*
  • START_IN_PROGRESS - setting up resources to start continuous * export.

  • START_FAILED - an error occurred setting up continuous * export. To recover, call start-continuous-export again.

  • ACTIVE * - data is being exported to the customer bucket.

  • ERROR - an * error occurred during export. To fix the issue, call stop-continuous-export and * start-continuous-export.

  • STOP_IN_PROGRESS - stopping the * export.

  • STOP_FAILED - an error occurred stopping the export. * To recover, call stop-continuous-export again.

  • INACTIVE - the * continuous export has been stopped. Data is no longer being exported to the * customer bucket.

*/ inline void SetStatus(ContinuousExportStatus&& value) { m_statusHasBeenSet = true; m_status = std::move(value); } /** *

Describes the status of the export. Can be one of the following values:

*
  • START_IN_PROGRESS - setting up resources to start continuous * export.

  • START_FAILED - an error occurred setting up continuous * export. To recover, call start-continuous-export again.

  • ACTIVE * - data is being exported to the customer bucket.

  • ERROR - an * error occurred during export. To fix the issue, call stop-continuous-export and * start-continuous-export.

  • STOP_IN_PROGRESS - stopping the * export.

  • STOP_FAILED - an error occurred stopping the export. * To recover, call stop-continuous-export again.

  • INACTIVE - the * continuous export has been stopped. Data is no longer being exported to the * customer bucket.

*/ inline ContinuousExportDescription& WithStatus(const ContinuousExportStatus& value) { SetStatus(value); return *this;} /** *

Describes the status of the export. Can be one of the following values:

*
  • START_IN_PROGRESS - setting up resources to start continuous * export.

  • START_FAILED - an error occurred setting up continuous * export. To recover, call start-continuous-export again.

  • ACTIVE * - data is being exported to the customer bucket.

  • ERROR - an * error occurred during export. To fix the issue, call stop-continuous-export and * start-continuous-export.

  • STOP_IN_PROGRESS - stopping the * export.

  • STOP_FAILED - an error occurred stopping the export. * To recover, call stop-continuous-export again.

  • INACTIVE - the * continuous export has been stopped. Data is no longer being exported to the * customer bucket.

*/ inline ContinuousExportDescription& WithStatus(ContinuousExportStatus&& value) { SetStatus(std::move(value)); return *this;} /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline const Aws::String& GetStatusDetail() const{ return m_statusDetail; } /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline bool StatusDetailHasBeenSet() const { return m_statusDetailHasBeenSet; } /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline void SetStatusDetail(const Aws::String& value) { m_statusDetailHasBeenSet = true; m_statusDetail = value; } /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline void SetStatusDetail(Aws::String&& value) { m_statusDetailHasBeenSet = true; m_statusDetail = std::move(value); } /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline void SetStatusDetail(const char* value) { m_statusDetailHasBeenSet = true; m_statusDetail.assign(value); } /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline ContinuousExportDescription& WithStatusDetail(const Aws::String& value) { SetStatusDetail(value); return *this;} /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline ContinuousExportDescription& WithStatusDetail(Aws::String&& value) { SetStatusDetail(std::move(value)); return *this;} /** *

Contains information about any errors that have occurred. This data type can * have the following values:

  • ACCESS_DENIED - You don’t have * permission to start Data Exploration in Amazon Athena. Contact your Amazon Web * Services administrator for help. For more information, see Setting * Up Amazon Web Services Application Discovery Service in the Application * Discovery Service User Guide.

  • DELIVERY_STREAM_LIMIT_FAILURE - * You reached the limit for Amazon Kinesis Data Firehose delivery streams. Reduce * the number of streams or request a limit increase and try again. For more * information, see Kinesis * Data Streams Limits in the Amazon Kinesis Data Streams Developer Guide.

    *
  • FIREHOSE_ROLE_MISSING - The Data Exploration feature is in an * error state because your user is missing the Amazon Web * ServicesApplicationDiscoveryServiceFirehose role. Turn on Data Exploration in * Amazon Athena and try again. For more information, see Creating * the Amazon Web ServicesApplicationDiscoveryServiceFirehose Role in the * Application Discovery Service User Guide.

  • *

    FIREHOSE_STREAM_DOES_NOT_EXIST - The Data Exploration feature is in an error * state because your user is missing one or more of the Kinesis data delivery * streams.

  • INTERNAL_FAILURE - The Data Exploration feature is in * an error state because of an internal failure. Try again later. If this problem * persists, contact Amazon Web Services Support.

  • *

    LAKE_FORMATION_ACCESS_DENIED - You don't have sufficient lake formation * permissions to start continuous export. For more information, see * Upgrading Amazon Web Services Glue Data Permissions to the Amazon Web Services * Lake Formation Model in the Amazon Web Services Lake Formation Developer * Guide.

    You can use one of the following two ways to resolve this * issue.

    1. If you don’t want to use the Lake Formation permission * model, you can change the default Data Catalog settings to use only Amazon Web * Services Identity and Access Management (IAM) access control for new databases. * For more information, see Change * Data Catalog Settings in the Lake Formation Developer Guide.

      *
    2. You can give the service-linked IAM roles * AWSServiceRoleForApplicationDiscoveryServiceContinuousExport and * AWSApplicationDiscoveryServiceFirehose the required Lake Formation permissions. * For more information, see * Granting Database Permissions in the Lake Formation Developer Guide. *

      1. AWSServiceRoleForApplicationDiscoveryServiceContinuousExport - * Grant database creator permissions, which gives the role database creation * ability and implicit permissions for any created tables. For more information, * see * Implicit Lake Formation Permissions in the Lake Formation Developer * Guide.

      2. AWSApplicationDiscoveryServiceFirehose - Grant * describe permissions for all tables in the database.

    *
  • S3_BUCKET_LIMIT_FAILURE - You reached the limit for Amazon S3 * buckets. Reduce the number of S3 buckets or request a limit increase and try * again. For more information, see Bucket * Restrictions and Limitations in the Amazon Simple Storage Service Developer * Guide.

  • S3_NOT_SIGNED_UP - Your account is not signed up for * the Amazon S3 service. You must sign up before you can use Amazon S3. You can * sign up at the following URL: https://aws.amazon.com/s3.

*/ inline ContinuousExportDescription& WithStatusDetail(const char* value) { SetStatusDetail(value); return *this;} /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline const Aws::String& GetS3Bucket() const{ return m_s3Bucket; } /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline bool S3BucketHasBeenSet() const { return m_s3BucketHasBeenSet; } /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline void SetS3Bucket(const Aws::String& value) { m_s3BucketHasBeenSet = true; m_s3Bucket = value; } /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline void SetS3Bucket(Aws::String&& value) { m_s3BucketHasBeenSet = true; m_s3Bucket = std::move(value); } /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline void SetS3Bucket(const char* value) { m_s3BucketHasBeenSet = true; m_s3Bucket.assign(value); } /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline ContinuousExportDescription& WithS3Bucket(const Aws::String& value) { SetS3Bucket(value); return *this;} /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline ContinuousExportDescription& WithS3Bucket(Aws::String&& value) { SetS3Bucket(std::move(value)); return *this;} /** *

The name of the s3 bucket where the export data parquet files are stored.

*/ inline ContinuousExportDescription& WithS3Bucket(const char* value) { SetS3Bucket(value); return *this;} /** *

The timestamp representing when the continuous export was started.

*/ inline const Aws::Utils::DateTime& GetStartTime() const{ return m_startTime; } /** *

The timestamp representing when the continuous export was started.

*/ inline bool StartTimeHasBeenSet() const { return m_startTimeHasBeenSet; } /** *

The timestamp representing when the continuous export was started.

*/ inline void SetStartTime(const Aws::Utils::DateTime& value) { m_startTimeHasBeenSet = true; m_startTime = value; } /** *

The timestamp representing when the continuous export was started.

*/ inline void SetStartTime(Aws::Utils::DateTime&& value) { m_startTimeHasBeenSet = true; m_startTime = std::move(value); } /** *

The timestamp representing when the continuous export was started.

*/ inline ContinuousExportDescription& WithStartTime(const Aws::Utils::DateTime& value) { SetStartTime(value); return *this;} /** *

The timestamp representing when the continuous export was started.

*/ inline ContinuousExportDescription& WithStartTime(Aws::Utils::DateTime&& value) { SetStartTime(std::move(value)); return *this;} /** *

The timestamp that represents when this continuous export was stopped.

*/ inline const Aws::Utils::DateTime& GetStopTime() const{ return m_stopTime; } /** *

The timestamp that represents when this continuous export was stopped.

*/ inline bool StopTimeHasBeenSet() const { return m_stopTimeHasBeenSet; } /** *

The timestamp that represents when this continuous export was stopped.

*/ inline void SetStopTime(const Aws::Utils::DateTime& value) { m_stopTimeHasBeenSet = true; m_stopTime = value; } /** *

The timestamp that represents when this continuous export was stopped.

*/ inline void SetStopTime(Aws::Utils::DateTime&& value) { m_stopTimeHasBeenSet = true; m_stopTime = std::move(value); } /** *

The timestamp that represents when this continuous export was stopped.

*/ inline ContinuousExportDescription& WithStopTime(const Aws::Utils::DateTime& value) { SetStopTime(value); return *this;} /** *

The timestamp that represents when this continuous export was stopped.

*/ inline ContinuousExportDescription& WithStopTime(Aws::Utils::DateTime&& value) { SetStopTime(std::move(value)); return *this;} /** *

The type of data collector used to gather this data (currently only offered * for AGENT).

*/ inline const DataSource& GetDataSource() const{ return m_dataSource; } /** *

The type of data collector used to gather this data (currently only offered * for AGENT).

*/ inline bool DataSourceHasBeenSet() const { return m_dataSourceHasBeenSet; } /** *

The type of data collector used to gather this data (currently only offered * for AGENT).

*/ inline void SetDataSource(const DataSource& value) { m_dataSourceHasBeenSet = true; m_dataSource = value; } /** *

The type of data collector used to gather this data (currently only offered * for AGENT).

*/ inline void SetDataSource(DataSource&& value) { m_dataSourceHasBeenSet = true; m_dataSource = std::move(value); } /** *

The type of data collector used to gather this data (currently only offered * for AGENT).

*/ inline ContinuousExportDescription& WithDataSource(const DataSource& value) { SetDataSource(value); return *this;} /** *

The type of data collector used to gather this data (currently only offered * for AGENT).

*/ inline ContinuousExportDescription& WithDataSource(DataSource&& value) { SetDataSource(std::move(value)); return *this;} /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline const Aws::Map& GetSchemaStorageConfig() const{ return m_schemaStorageConfig; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline bool SchemaStorageConfigHasBeenSet() const { return m_schemaStorageConfigHasBeenSet; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline void SetSchemaStorageConfig(const Aws::Map& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig = value; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline void SetSchemaStorageConfig(Aws::Map&& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig = std::move(value); } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& WithSchemaStorageConfig(const Aws::Map& value) { SetSchemaStorageConfig(value); return *this;} /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& WithSchemaStorageConfig(Aws::Map&& value) { SetSchemaStorageConfig(std::move(value)); return *this;} /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(const Aws::String& key, const Aws::String& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(key, value); return *this; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(Aws::String&& key, const Aws::String& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(std::move(key), value); return *this; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(const Aws::String& key, Aws::String&& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(key, std::move(value)); return *this; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(Aws::String&& key, Aws::String&& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(std::move(key), std::move(value)); return *this; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(const char* key, Aws::String&& value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(key, std::move(value)); return *this; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(Aws::String&& key, const char* value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(std::move(key), value); return *this; } /** *

An object which describes how the data is stored.

  • * databaseName - the name of the Glue database used to store the * schema.

*/ inline ContinuousExportDescription& AddSchemaStorageConfig(const char* key, const char* value) { m_schemaStorageConfigHasBeenSet = true; m_schemaStorageConfig.emplace(key, value); return *this; } private: Aws::String m_exportId; bool m_exportIdHasBeenSet = false; ContinuousExportStatus m_status; bool m_statusHasBeenSet = false; Aws::String m_statusDetail; bool m_statusDetailHasBeenSet = false; Aws::String m_s3Bucket; bool m_s3BucketHasBeenSet = false; Aws::Utils::DateTime m_startTime; bool m_startTimeHasBeenSet = false; Aws::Utils::DateTime m_stopTime; bool m_stopTimeHasBeenSet = false; DataSource m_dataSource; bool m_dataSourceHasBeenSet = false; Aws::Map m_schemaStorageConfig; bool m_schemaStorageConfigHasBeenSet = false; }; } // namespace Model } // namespace ApplicationDiscoveryService } // namespace Aws