/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include #include #include #include #include #include #include #include #include namespace Aws { namespace Personalize { namespace Model { /** */ class CreateBatchInferenceJobRequest : public PersonalizeRequest { public: AWS_PERSONALIZE_API CreateBatchInferenceJobRequest(); // Service request name is the Operation name which will send this request out, // each operation should has unique request name, so that we can get operation's name from this request. // Note: this is not true for response, multiple operations may have the same response name, // so we can not get operation's name from response. inline virtual const char* GetServiceRequestName() const override { return "CreateBatchInferenceJob"; } AWS_PERSONALIZE_API Aws::String SerializePayload() const override; AWS_PERSONALIZE_API Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override; /** *

The name of the batch inference job to create.

*/ inline const Aws::String& GetJobName() const{ return m_jobName; } /** *

The name of the batch inference job to create.

*/ inline bool JobNameHasBeenSet() const { return m_jobNameHasBeenSet; } /** *

The name of the batch inference job to create.

*/ inline void SetJobName(const Aws::String& value) { m_jobNameHasBeenSet = true; m_jobName = value; } /** *

The name of the batch inference job to create.

*/ inline void SetJobName(Aws::String&& value) { m_jobNameHasBeenSet = true; m_jobName = std::move(value); } /** *

The name of the batch inference job to create.

*/ inline void SetJobName(const char* value) { m_jobNameHasBeenSet = true; m_jobName.assign(value); } /** *

The name of the batch inference job to create.

*/ inline CreateBatchInferenceJobRequest& WithJobName(const Aws::String& value) { SetJobName(value); return *this;} /** *

The name of the batch inference job to create.

*/ inline CreateBatchInferenceJobRequest& WithJobName(Aws::String&& value) { SetJobName(std::move(value)); return *this;} /** *

The name of the batch inference job to create.

*/ inline CreateBatchInferenceJobRequest& WithJobName(const char* value) { SetJobName(value); return *this;} /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline const Aws::String& GetSolutionVersionArn() const{ return m_solutionVersionArn; } /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline bool SolutionVersionArnHasBeenSet() const { return m_solutionVersionArnHasBeenSet; } /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline void SetSolutionVersionArn(const Aws::String& value) { m_solutionVersionArnHasBeenSet = true; m_solutionVersionArn = value; } /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline void SetSolutionVersionArn(Aws::String&& value) { m_solutionVersionArnHasBeenSet = true; m_solutionVersionArn = std::move(value); } /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline void SetSolutionVersionArn(const char* value) { m_solutionVersionArnHasBeenSet = true; m_solutionVersionArn.assign(value); } /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline CreateBatchInferenceJobRequest& WithSolutionVersionArn(const Aws::String& value) { SetSolutionVersionArn(value); return *this;} /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline CreateBatchInferenceJobRequest& WithSolutionVersionArn(Aws::String&& value) { SetSolutionVersionArn(std::move(value)); return *this;} /** *

The Amazon Resource Name (ARN) of the solution version that will be used to * generate the batch inference recommendations.

*/ inline CreateBatchInferenceJobRequest& WithSolutionVersionArn(const char* value) { SetSolutionVersionArn(value); return *this;} /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline const Aws::String& GetFilterArn() const{ return m_filterArn; } /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline bool FilterArnHasBeenSet() const { return m_filterArnHasBeenSet; } /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline void SetFilterArn(const Aws::String& value) { m_filterArnHasBeenSet = true; m_filterArn = value; } /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline void SetFilterArn(Aws::String&& value) { m_filterArnHasBeenSet = true; m_filterArn = std::move(value); } /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline void SetFilterArn(const char* value) { m_filterArnHasBeenSet = true; m_filterArn.assign(value); } /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline CreateBatchInferenceJobRequest& WithFilterArn(const Aws::String& value) { SetFilterArn(value); return *this;} /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline CreateBatchInferenceJobRequest& WithFilterArn(Aws::String&& value) { SetFilterArn(std::move(value)); return *this;} /** *

The ARN of the filter to apply to the batch inference job. For more * information on using filters, see Filtering * batch recommendations.

*/ inline CreateBatchInferenceJobRequest& WithFilterArn(const char* value) { SetFilterArn(value); return *this;} /** *

The number of recommendations to retrieve.

*/ inline int GetNumResults() const{ return m_numResults; } /** *

The number of recommendations to retrieve.

*/ inline bool NumResultsHasBeenSet() const { return m_numResultsHasBeenSet; } /** *

The number of recommendations to retrieve.

*/ inline void SetNumResults(int value) { m_numResultsHasBeenSet = true; m_numResults = value; } /** *

The number of recommendations to retrieve.

*/ inline CreateBatchInferenceJobRequest& WithNumResults(int value) { SetNumResults(value); return *this;} /** *

The Amazon S3 path that leads to the input file to base your recommendations * on. The input material must be in JSON format.

*/ inline const BatchInferenceJobInput& GetJobInput() const{ return m_jobInput; } /** *

The Amazon S3 path that leads to the input file to base your recommendations * on. The input material must be in JSON format.

*/ inline bool JobInputHasBeenSet() const { return m_jobInputHasBeenSet; } /** *

The Amazon S3 path that leads to the input file to base your recommendations * on. The input material must be in JSON format.

*/ inline void SetJobInput(const BatchInferenceJobInput& value) { m_jobInputHasBeenSet = true; m_jobInput = value; } /** *

The Amazon S3 path that leads to the input file to base your recommendations * on. The input material must be in JSON format.

*/ inline void SetJobInput(BatchInferenceJobInput&& value) { m_jobInputHasBeenSet = true; m_jobInput = std::move(value); } /** *

The Amazon S3 path that leads to the input file to base your recommendations * on. The input material must be in JSON format.

*/ inline CreateBatchInferenceJobRequest& WithJobInput(const BatchInferenceJobInput& value) { SetJobInput(value); return *this;} /** *

The Amazon S3 path that leads to the input file to base your recommendations * on. The input material must be in JSON format.

*/ inline CreateBatchInferenceJobRequest& WithJobInput(BatchInferenceJobInput&& value) { SetJobInput(std::move(value)); return *this;} /** *

The path to the Amazon S3 bucket where the job's output will be stored.

*/ inline const BatchInferenceJobOutput& GetJobOutput() const{ return m_jobOutput; } /** *

The path to the Amazon S3 bucket where the job's output will be stored.

*/ inline bool JobOutputHasBeenSet() const { return m_jobOutputHasBeenSet; } /** *

The path to the Amazon S3 bucket where the job's output will be stored.

*/ inline void SetJobOutput(const BatchInferenceJobOutput& value) { m_jobOutputHasBeenSet = true; m_jobOutput = value; } /** *

The path to the Amazon S3 bucket where the job's output will be stored.

*/ inline void SetJobOutput(BatchInferenceJobOutput&& value) { m_jobOutputHasBeenSet = true; m_jobOutput = std::move(value); } /** *

The path to the Amazon S3 bucket where the job's output will be stored.

*/ inline CreateBatchInferenceJobRequest& WithJobOutput(const BatchInferenceJobOutput& value) { SetJobOutput(value); return *this;} /** *

The path to the Amazon S3 bucket where the job's output will be stored.

*/ inline CreateBatchInferenceJobRequest& WithJobOutput(BatchInferenceJobOutput&& value) { SetJobOutput(std::move(value)); return *this;} /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline const Aws::String& GetRoleArn() const{ return m_roleArn; } /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline bool RoleArnHasBeenSet() const { return m_roleArnHasBeenSet; } /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline void SetRoleArn(const Aws::String& value) { m_roleArnHasBeenSet = true; m_roleArn = value; } /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline void SetRoleArn(Aws::String&& value) { m_roleArnHasBeenSet = true; m_roleArn = std::move(value); } /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline void SetRoleArn(const char* value) { m_roleArnHasBeenSet = true; m_roleArn.assign(value); } /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline CreateBatchInferenceJobRequest& WithRoleArn(const Aws::String& value) { SetRoleArn(value); return *this;} /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline CreateBatchInferenceJobRequest& WithRoleArn(Aws::String&& value) { SetRoleArn(std::move(value)); return *this;} /** *

The ARN of the Amazon Identity and Access Management role that has * permissions to read and write to your input and output Amazon S3 buckets * respectively.

*/ inline CreateBatchInferenceJobRequest& WithRoleArn(const char* value) { SetRoleArn(value); return *this;} /** *

The configuration details of a batch inference job.

*/ inline const BatchInferenceJobConfig& GetBatchInferenceJobConfig() const{ return m_batchInferenceJobConfig; } /** *

The configuration details of a batch inference job.

*/ inline bool BatchInferenceJobConfigHasBeenSet() const { return m_batchInferenceJobConfigHasBeenSet; } /** *

The configuration details of a batch inference job.

*/ inline void SetBatchInferenceJobConfig(const BatchInferenceJobConfig& value) { m_batchInferenceJobConfigHasBeenSet = true; m_batchInferenceJobConfig = value; } /** *

The configuration details of a batch inference job.

*/ inline void SetBatchInferenceJobConfig(BatchInferenceJobConfig&& value) { m_batchInferenceJobConfigHasBeenSet = true; m_batchInferenceJobConfig = std::move(value); } /** *

The configuration details of a batch inference job.

*/ inline CreateBatchInferenceJobRequest& WithBatchInferenceJobConfig(const BatchInferenceJobConfig& value) { SetBatchInferenceJobConfig(value); return *this;} /** *

The configuration details of a batch inference job.

*/ inline CreateBatchInferenceJobRequest& WithBatchInferenceJobConfig(BatchInferenceJobConfig&& value) { SetBatchInferenceJobConfig(std::move(value)); return *this;} /** *

A list of tags * to apply to the batch inference job.

*/ inline const Aws::Vector& GetTags() const{ return m_tags; } /** *

A list of tags * to apply to the batch inference job.

*/ inline bool TagsHasBeenSet() const { return m_tagsHasBeenSet; } /** *

A list of tags * to apply to the batch inference job.

*/ inline void SetTags(const Aws::Vector& value) { m_tagsHasBeenSet = true; m_tags = value; } /** *

A list of tags * to apply to the batch inference job.

*/ inline void SetTags(Aws::Vector&& value) { m_tagsHasBeenSet = true; m_tags = std::move(value); } /** *

A list of tags * to apply to the batch inference job.

*/ inline CreateBatchInferenceJobRequest& WithTags(const Aws::Vector& value) { SetTags(value); return *this;} /** *

A list of tags * to apply to the batch inference job.

*/ inline CreateBatchInferenceJobRequest& WithTags(Aws::Vector&& value) { SetTags(std::move(value)); return *this;} /** *

A list of tags * to apply to the batch inference job.

*/ inline CreateBatchInferenceJobRequest& AddTags(const Tag& value) { m_tagsHasBeenSet = true; m_tags.push_back(value); return *this; } /** *

A list of tags * to apply to the batch inference job.

*/ inline CreateBatchInferenceJobRequest& AddTags(Tag&& value) { m_tagsHasBeenSet = true; m_tags.push_back(std::move(value)); return *this; } private: Aws::String m_jobName; bool m_jobNameHasBeenSet = false; Aws::String m_solutionVersionArn; bool m_solutionVersionArnHasBeenSet = false; Aws::String m_filterArn; bool m_filterArnHasBeenSet = false; int m_numResults; bool m_numResultsHasBeenSet = false; BatchInferenceJobInput m_jobInput; bool m_jobInputHasBeenSet = false; BatchInferenceJobOutput m_jobOutput; bool m_jobOutputHasBeenSet = false; Aws::String m_roleArn; bool m_roleArnHasBeenSet = false; BatchInferenceJobConfig m_batchInferenceJobConfig; bool m_batchInferenceJobConfigHasBeenSet = false; Aws::Vector m_tags; bool m_tagsHasBeenSet = false; }; } // namespace Model } // namespace Personalize } // namespace Aws