/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include #include #include #include #include #include #include #include namespace Aws { namespace ForecastService { namespace Model { /** */ class CreateDatasetImportJobRequest : public ForecastServiceRequest { public: AWS_FORECASTSERVICE_API CreateDatasetImportJobRequest(); // Service request name is the Operation name which will send this request out, // each operation should has unique request name, so that we can get operation's name from this request. // Note: this is not true for response, multiple operations may have the same response name, // so we can not get operation's name from response. inline virtual const char* GetServiceRequestName() const override { return "CreateDatasetImportJob"; } AWS_FORECASTSERVICE_API Aws::String SerializePayload() const override; AWS_FORECASTSERVICE_API Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override; /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline const Aws::String& GetDatasetImportJobName() const{ return m_datasetImportJobName; } /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline bool DatasetImportJobNameHasBeenSet() const { return m_datasetImportJobNameHasBeenSet; } /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline void SetDatasetImportJobName(const Aws::String& value) { m_datasetImportJobNameHasBeenSet = true; m_datasetImportJobName = value; } /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline void SetDatasetImportJobName(Aws::String&& value) { m_datasetImportJobNameHasBeenSet = true; m_datasetImportJobName = std::move(value); } /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline void SetDatasetImportJobName(const char* value) { m_datasetImportJobNameHasBeenSet = true; m_datasetImportJobName.assign(value); } /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline CreateDatasetImportJobRequest& WithDatasetImportJobName(const Aws::String& value) { SetDatasetImportJobName(value); return *this;} /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline CreateDatasetImportJobRequest& WithDatasetImportJobName(Aws::String&& value) { SetDatasetImportJobName(std::move(value)); return *this;} /** *

The name for the dataset import job. We recommend including the current * timestamp in the name, for example, 20190721DatasetImport. This can * help you avoid getting a ResourceAlreadyExistsException * exception.

*/ inline CreateDatasetImportJobRequest& WithDatasetImportJobName(const char* value) { SetDatasetImportJobName(value); return *this;} /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline const Aws::String& GetDatasetArn() const{ return m_datasetArn; } /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline bool DatasetArnHasBeenSet() const { return m_datasetArnHasBeenSet; } /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline void SetDatasetArn(const Aws::String& value) { m_datasetArnHasBeenSet = true; m_datasetArn = value; } /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline void SetDatasetArn(Aws::String&& value) { m_datasetArnHasBeenSet = true; m_datasetArn = std::move(value); } /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline void SetDatasetArn(const char* value) { m_datasetArnHasBeenSet = true; m_datasetArn.assign(value); } /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline CreateDatasetImportJobRequest& WithDatasetArn(const Aws::String& value) { SetDatasetArn(value); return *this;} /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline CreateDatasetImportJobRequest& WithDatasetArn(Aws::String&& value) { SetDatasetArn(std::move(value)); return *this;} /** *

The Amazon Resource Name (ARN) of the Amazon Forecast dataset that you want * to import data to.

*/ inline CreateDatasetImportJobRequest& WithDatasetArn(const char* value) { SetDatasetArn(value); return *this;} /** *

The location of the training data to import and an Identity and Access * Management (IAM) role that Amazon Forecast can assume to access the data. The * training data must be stored in an Amazon S3 bucket.

If encryption is * used, DataSource must include an Key Management Service (KMS) key * and the IAM role must allow Amazon Forecast permission to access the key. The * KMS key and IAM role must match those specified in the * EncryptionConfig parameter of the CreateDataset * operation.

*/ inline const DataSource& GetDataSource() const{ return m_dataSource; } /** *

The location of the training data to import and an Identity and Access * Management (IAM) role that Amazon Forecast can assume to access the data. The * training data must be stored in an Amazon S3 bucket.

If encryption is * used, DataSource must include an Key Management Service (KMS) key * and the IAM role must allow Amazon Forecast permission to access the key. The * KMS key and IAM role must match those specified in the * EncryptionConfig parameter of the CreateDataset * operation.

*/ inline bool DataSourceHasBeenSet() const { return m_dataSourceHasBeenSet; } /** *

The location of the training data to import and an Identity and Access * Management (IAM) role that Amazon Forecast can assume to access the data. The * training data must be stored in an Amazon S3 bucket.

If encryption is * used, DataSource must include an Key Management Service (KMS) key * and the IAM role must allow Amazon Forecast permission to access the key. The * KMS key and IAM role must match those specified in the * EncryptionConfig parameter of the CreateDataset * operation.

*/ inline void SetDataSource(const DataSource& value) { m_dataSourceHasBeenSet = true; m_dataSource = value; } /** *

The location of the training data to import and an Identity and Access * Management (IAM) role that Amazon Forecast can assume to access the data. The * training data must be stored in an Amazon S3 bucket.

If encryption is * used, DataSource must include an Key Management Service (KMS) key * and the IAM role must allow Amazon Forecast permission to access the key. The * KMS key and IAM role must match those specified in the * EncryptionConfig parameter of the CreateDataset * operation.

*/ inline void SetDataSource(DataSource&& value) { m_dataSourceHasBeenSet = true; m_dataSource = std::move(value); } /** *

The location of the training data to import and an Identity and Access * Management (IAM) role that Amazon Forecast can assume to access the data. The * training data must be stored in an Amazon S3 bucket.

If encryption is * used, DataSource must include an Key Management Service (KMS) key * and the IAM role must allow Amazon Forecast permission to access the key. The * KMS key and IAM role must match those specified in the * EncryptionConfig parameter of the CreateDataset * operation.

*/ inline CreateDatasetImportJobRequest& WithDataSource(const DataSource& value) { SetDataSource(value); return *this;} /** *

The location of the training data to import and an Identity and Access * Management (IAM) role that Amazon Forecast can assume to access the data. The * training data must be stored in an Amazon S3 bucket.

If encryption is * used, DataSource must include an Key Management Service (KMS) key * and the IAM role must allow Amazon Forecast permission to access the key. The * KMS key and IAM role must match those specified in the * EncryptionConfig parameter of the CreateDataset * operation.

*/ inline CreateDatasetImportJobRequest& WithDataSource(DataSource&& value) { SetDataSource(std::move(value)); return *this;} /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline const Aws::String& GetTimestampFormat() const{ return m_timestampFormat; } /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline bool TimestampFormatHasBeenSet() const { return m_timestampFormatHasBeenSet; } /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline void SetTimestampFormat(const Aws::String& value) { m_timestampFormatHasBeenSet = true; m_timestampFormat = value; } /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline void SetTimestampFormat(Aws::String&& value) { m_timestampFormatHasBeenSet = true; m_timestampFormat = std::move(value); } /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline void SetTimestampFormat(const char* value) { m_timestampFormatHasBeenSet = true; m_timestampFormat.assign(value); } /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline CreateDatasetImportJobRequest& WithTimestampFormat(const Aws::String& value) { SetTimestampFormat(value); return *this;} /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline CreateDatasetImportJobRequest& WithTimestampFormat(Aws::String&& value) { SetTimestampFormat(std::move(value)); return *this;} /** *

The format of timestamps in the dataset. The format that you specify depends * on the DataFrequency specified when the dataset was created. The * following formats are supported

  • "yyyy-MM-dd"

    For the * following data frequencies: Y, M, W, and D

  • "yyyy-MM-dd * HH:mm:ss"

    For the following data frequencies: H, 30min, 15min, and 1min; * and optionally, for: Y, M, W, and D

If the format isn't * specified, Amazon Forecast expects the format to be "yyyy-MM-dd HH:mm:ss".

*/ inline CreateDatasetImportJobRequest& WithTimestampFormat(const char* value) { SetTimestampFormat(value); return *this;} /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline const Aws::String& GetTimeZone() const{ return m_timeZone; } /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline bool TimeZoneHasBeenSet() const { return m_timeZoneHasBeenSet; } /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline void SetTimeZone(const Aws::String& value) { m_timeZoneHasBeenSet = true; m_timeZone = value; } /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline void SetTimeZone(Aws::String&& value) { m_timeZoneHasBeenSet = true; m_timeZone = std::move(value); } /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline void SetTimeZone(const char* value) { m_timeZoneHasBeenSet = true; m_timeZone.assign(value); } /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline CreateDatasetImportJobRequest& WithTimeZone(const Aws::String& value) { SetTimeZone(value); return *this;} /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline CreateDatasetImportJobRequest& WithTimeZone(Aws::String&& value) { SetTimeZone(std::move(value)); return *this;} /** *

A single time zone for every item in your dataset. This option is ideal for * datasets with all timestamps within a single time zone, or if all timestamps are * normalized to a single time zone.

Refer to the Joda-Time API for a * complete list of valid time zone names.

*/ inline CreateDatasetImportJobRequest& WithTimeZone(const char* value) { SetTimeZone(value); return *this;} /** *

Automatically derive time zone information from the geolocation attribute. * This option is ideal for datasets that contain timestamps in multiple time zones * and those timestamps are expressed in local time.

*/ inline bool GetUseGeolocationForTimeZone() const{ return m_useGeolocationForTimeZone; } /** *

Automatically derive time zone information from the geolocation attribute. * This option is ideal for datasets that contain timestamps in multiple time zones * and those timestamps are expressed in local time.

*/ inline bool UseGeolocationForTimeZoneHasBeenSet() const { return m_useGeolocationForTimeZoneHasBeenSet; } /** *

Automatically derive time zone information from the geolocation attribute. * This option is ideal for datasets that contain timestamps in multiple time zones * and those timestamps are expressed in local time.

*/ inline void SetUseGeolocationForTimeZone(bool value) { m_useGeolocationForTimeZoneHasBeenSet = true; m_useGeolocationForTimeZone = value; } /** *

Automatically derive time zone information from the geolocation attribute. * This option is ideal for datasets that contain timestamps in multiple time zones * and those timestamps are expressed in local time.

*/ inline CreateDatasetImportJobRequest& WithUseGeolocationForTimeZone(bool value) { SetUseGeolocationForTimeZone(value); return *this;} /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline const Aws::String& GetGeolocationFormat() const{ return m_geolocationFormat; } /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline bool GeolocationFormatHasBeenSet() const { return m_geolocationFormatHasBeenSet; } /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline void SetGeolocationFormat(const Aws::String& value) { m_geolocationFormatHasBeenSet = true; m_geolocationFormat = value; } /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline void SetGeolocationFormat(Aws::String&& value) { m_geolocationFormatHasBeenSet = true; m_geolocationFormat = std::move(value); } /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline void SetGeolocationFormat(const char* value) { m_geolocationFormatHasBeenSet = true; m_geolocationFormat.assign(value); } /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline CreateDatasetImportJobRequest& WithGeolocationFormat(const Aws::String& value) { SetGeolocationFormat(value); return *this;} /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline CreateDatasetImportJobRequest& WithGeolocationFormat(Aws::String&& value) { SetGeolocationFormat(std::move(value)); return *this;} /** *

The format of the geolocation attribute. The geolocation attribute can be * formatted in one of two ways:

  • LAT_LONG - the * latitude and longitude in decimal format (Example: 47.61_-122.33).

  • *
  • CC_POSTALCODE (US Only) - the country code (US), followed * by the 5-digit ZIP code (Example: US_98121).

*/ inline CreateDatasetImportJobRequest& WithGeolocationFormat(const char* value) { SetGeolocationFormat(value); return *this;} /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline const Aws::Vector& GetTags() const{ return m_tags; } /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline bool TagsHasBeenSet() const { return m_tagsHasBeenSet; } /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline void SetTags(const Aws::Vector& value) { m_tagsHasBeenSet = true; m_tags = value; } /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline void SetTags(Aws::Vector&& value) { m_tagsHasBeenSet = true; m_tags = std::move(value); } /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline CreateDatasetImportJobRequest& WithTags(const Aws::Vector& value) { SetTags(value); return *this;} /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline CreateDatasetImportJobRequest& WithTags(Aws::Vector&& value) { SetTags(std::move(value)); return *this;} /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline CreateDatasetImportJobRequest& AddTags(const Tag& value) { m_tagsHasBeenSet = true; m_tags.push_back(value); return *this; } /** *

The optional metadata that you apply to the dataset import job to help you * categorize and organize them. Each tag consists of a key and an optional value, * both of which you define.

The following basic restrictions apply to * tags:

  • Maximum number of tags per resource - 50.

  • *

    For each resource, each tag key must be unique, and each tag key can have * only one value.

  • Maximum key length - 128 Unicode characters in * UTF-8.

  • Maximum value length - 256 Unicode characters in * UTF-8.

  • If your tagging schema is used across multiple services * and resources, remember that other services may have restrictions on allowed * characters. Generally allowed characters are: letters, numbers, and spaces * representable in UTF-8, and the following characters: + - = . _ : / @.

  • *
  • Tag keys and values are case sensitive.

  • Do not use * aws:, AWS:, or any upper or lowercase combination of * such as a prefix for keys as it is reserved for Amazon Web Services use. You * cannot edit or delete tag keys with this prefix. Values can have this prefix. If * a tag value has aws as its prefix but the key does not, then * Forecast considers it to be a user tag and will count against the limit of 50 * tags. Tags with only the key prefix of aws do not count against * your tags per resource limit.

*/ inline CreateDatasetImportJobRequest& AddTags(Tag&& value) { m_tagsHasBeenSet = true; m_tags.push_back(std::move(value)); return *this; } /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline const Aws::String& GetFormat() const{ return m_format; } /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline bool FormatHasBeenSet() const { return m_formatHasBeenSet; } /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline void SetFormat(const Aws::String& value) { m_formatHasBeenSet = true; m_format = value; } /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline void SetFormat(Aws::String&& value) { m_formatHasBeenSet = true; m_format = std::move(value); } /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline void SetFormat(const char* value) { m_formatHasBeenSet = true; m_format.assign(value); } /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline CreateDatasetImportJobRequest& WithFormat(const Aws::String& value) { SetFormat(value); return *this;} /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline CreateDatasetImportJobRequest& WithFormat(Aws::String&& value) { SetFormat(std::move(value)); return *this;} /** *

The format of the imported data, CSV or PARQUET. The default value is * CSV.

*/ inline CreateDatasetImportJobRequest& WithFormat(const char* value) { SetFormat(value); return *this;} /** *

Specifies whether the dataset import job is a FULL or * INCREMENTAL import. A FULL dataset import replaces all * of the existing data with the newly imported data. An INCREMENTAL * import appends the imported data to the existing data.

*/ inline const ImportMode& GetImportMode() const{ return m_importMode; } /** *

Specifies whether the dataset import job is a FULL or * INCREMENTAL import. A FULL dataset import replaces all * of the existing data with the newly imported data. An INCREMENTAL * import appends the imported data to the existing data.

*/ inline bool ImportModeHasBeenSet() const { return m_importModeHasBeenSet; } /** *

Specifies whether the dataset import job is a FULL or * INCREMENTAL import. A FULL dataset import replaces all * of the existing data with the newly imported data. An INCREMENTAL * import appends the imported data to the existing data.

*/ inline void SetImportMode(const ImportMode& value) { m_importModeHasBeenSet = true; m_importMode = value; } /** *

Specifies whether the dataset import job is a FULL or * INCREMENTAL import. A FULL dataset import replaces all * of the existing data with the newly imported data. An INCREMENTAL * import appends the imported data to the existing data.

*/ inline void SetImportMode(ImportMode&& value) { m_importModeHasBeenSet = true; m_importMode = std::move(value); } /** *

Specifies whether the dataset import job is a FULL or * INCREMENTAL import. A FULL dataset import replaces all * of the existing data with the newly imported data. An INCREMENTAL * import appends the imported data to the existing data.

*/ inline CreateDatasetImportJobRequest& WithImportMode(const ImportMode& value) { SetImportMode(value); return *this;} /** *

Specifies whether the dataset import job is a FULL or * INCREMENTAL import. A FULL dataset import replaces all * of the existing data with the newly imported data. An INCREMENTAL * import appends the imported data to the existing data.

*/ inline CreateDatasetImportJobRequest& WithImportMode(ImportMode&& value) { SetImportMode(std::move(value)); return *this;} private: Aws::String m_datasetImportJobName; bool m_datasetImportJobNameHasBeenSet = false; Aws::String m_datasetArn; bool m_datasetArnHasBeenSet = false; DataSource m_dataSource; bool m_dataSourceHasBeenSet = false; Aws::String m_timestampFormat; bool m_timestampFormatHasBeenSet = false; Aws::String m_timeZone; bool m_timeZoneHasBeenSet = false; bool m_useGeolocationForTimeZone; bool m_useGeolocationForTimeZoneHasBeenSet = false; Aws::String m_geolocationFormat; bool m_geolocationFormatHasBeenSet = false; Aws::Vector m_tags; bool m_tagsHasBeenSet = false; Aws::String m_format; bool m_formatHasBeenSet = false; ImportMode m_importMode; bool m_importModeHasBeenSet = false; }; } // namespace Model } // namespace ForecastService } // namespace Aws