/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include Contains information on a batch inference job.See Also:
AWS
* API Reference
The name of the batch inference job.
*/ inline const Aws::String& GetJobName() const{ return m_jobName; } /** *The name of the batch inference job.
*/ inline bool JobNameHasBeenSet() const { return m_jobNameHasBeenSet; } /** *The name of the batch inference job.
*/ inline void SetJobName(const Aws::String& value) { m_jobNameHasBeenSet = true; m_jobName = value; } /** *The name of the batch inference job.
*/ inline void SetJobName(Aws::String&& value) { m_jobNameHasBeenSet = true; m_jobName = std::move(value); } /** *The name of the batch inference job.
*/ inline void SetJobName(const char* value) { m_jobNameHasBeenSet = true; m_jobName.assign(value); } /** *The name of the batch inference job.
*/ inline BatchInferenceJob& WithJobName(const Aws::String& value) { SetJobName(value); return *this;} /** *The name of the batch inference job.
*/ inline BatchInferenceJob& WithJobName(Aws::String&& value) { SetJobName(std::move(value)); return *this;} /** *The name of the batch inference job.
*/ inline BatchInferenceJob& WithJobName(const char* value) { SetJobName(value); return *this;} /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline const Aws::String& GetBatchInferenceJobArn() const{ return m_batchInferenceJobArn; } /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline bool BatchInferenceJobArnHasBeenSet() const { return m_batchInferenceJobArnHasBeenSet; } /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline void SetBatchInferenceJobArn(const Aws::String& value) { m_batchInferenceJobArnHasBeenSet = true; m_batchInferenceJobArn = value; } /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline void SetBatchInferenceJobArn(Aws::String&& value) { m_batchInferenceJobArnHasBeenSet = true; m_batchInferenceJobArn = std::move(value); } /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline void SetBatchInferenceJobArn(const char* value) { m_batchInferenceJobArnHasBeenSet = true; m_batchInferenceJobArn.assign(value); } /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline BatchInferenceJob& WithBatchInferenceJobArn(const Aws::String& value) { SetBatchInferenceJobArn(value); return *this;} /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline BatchInferenceJob& WithBatchInferenceJobArn(Aws::String&& value) { SetBatchInferenceJobArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) of the batch inference job.
*/ inline BatchInferenceJob& WithBatchInferenceJobArn(const char* value) { SetBatchInferenceJobArn(value); return *this;} /** *The ARN of the filter used on the batch inference job.
*/ inline const Aws::String& GetFilterArn() const{ return m_filterArn; } /** *The ARN of the filter used on the batch inference job.
*/ inline bool FilterArnHasBeenSet() const { return m_filterArnHasBeenSet; } /** *The ARN of the filter used on the batch inference job.
*/ inline void SetFilterArn(const Aws::String& value) { m_filterArnHasBeenSet = true; m_filterArn = value; } /** *The ARN of the filter used on the batch inference job.
*/ inline void SetFilterArn(Aws::String&& value) { m_filterArnHasBeenSet = true; m_filterArn = std::move(value); } /** *The ARN of the filter used on the batch inference job.
*/ inline void SetFilterArn(const char* value) { m_filterArnHasBeenSet = true; m_filterArn.assign(value); } /** *The ARN of the filter used on the batch inference job.
*/ inline BatchInferenceJob& WithFilterArn(const Aws::String& value) { SetFilterArn(value); return *this;} /** *The ARN of the filter used on the batch inference job.
*/ inline BatchInferenceJob& WithFilterArn(Aws::String&& value) { SetFilterArn(std::move(value)); return *this;} /** *The ARN of the filter used on the batch inference job.
*/ inline BatchInferenceJob& WithFilterArn(const char* value) { SetFilterArn(value); return *this;} /** *If the batch inference job failed, the reason for the failure.
*/ inline const Aws::String& GetFailureReason() const{ return m_failureReason; } /** *If the batch inference job failed, the reason for the failure.
*/ inline bool FailureReasonHasBeenSet() const { return m_failureReasonHasBeenSet; } /** *If the batch inference job failed, the reason for the failure.
*/ inline void SetFailureReason(const Aws::String& value) { m_failureReasonHasBeenSet = true; m_failureReason = value; } /** *If the batch inference job failed, the reason for the failure.
*/ inline void SetFailureReason(Aws::String&& value) { m_failureReasonHasBeenSet = true; m_failureReason = std::move(value); } /** *If the batch inference job failed, the reason for the failure.
*/ inline void SetFailureReason(const char* value) { m_failureReasonHasBeenSet = true; m_failureReason.assign(value); } /** *If the batch inference job failed, the reason for the failure.
*/ inline BatchInferenceJob& WithFailureReason(const Aws::String& value) { SetFailureReason(value); return *this;} /** *If the batch inference job failed, the reason for the failure.
*/ inline BatchInferenceJob& WithFailureReason(Aws::String&& value) { SetFailureReason(std::move(value)); return *this;} /** *If the batch inference job failed, the reason for the failure.
*/ inline BatchInferenceJob& WithFailureReason(const char* value) { SetFailureReason(value); return *this;} /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline const Aws::String& GetSolutionVersionArn() const{ return m_solutionVersionArn; } /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline bool SolutionVersionArnHasBeenSet() const { return m_solutionVersionArnHasBeenSet; } /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline void SetSolutionVersionArn(const Aws::String& value) { m_solutionVersionArnHasBeenSet = true; m_solutionVersionArn = value; } /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline void SetSolutionVersionArn(Aws::String&& value) { m_solutionVersionArnHasBeenSet = true; m_solutionVersionArn = std::move(value); } /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline void SetSolutionVersionArn(const char* value) { m_solutionVersionArnHasBeenSet = true; m_solutionVersionArn.assign(value); } /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline BatchInferenceJob& WithSolutionVersionArn(const Aws::String& value) { SetSolutionVersionArn(value); return *this;} /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline BatchInferenceJob& WithSolutionVersionArn(Aws::String&& value) { SetSolutionVersionArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) of the solution version from which the batch * inference job was created.
*/ inline BatchInferenceJob& WithSolutionVersionArn(const char* value) { SetSolutionVersionArn(value); return *this;} /** *The number of recommendations generated by the batch inference job. This * number includes the error messages generated for failed input records.
*/ inline int GetNumResults() const{ return m_numResults; } /** *The number of recommendations generated by the batch inference job. This * number includes the error messages generated for failed input records.
*/ inline bool NumResultsHasBeenSet() const { return m_numResultsHasBeenSet; } /** *The number of recommendations generated by the batch inference job. This * number includes the error messages generated for failed input records.
*/ inline void SetNumResults(int value) { m_numResultsHasBeenSet = true; m_numResults = value; } /** *The number of recommendations generated by the batch inference job. This * number includes the error messages generated for failed input records.
*/ inline BatchInferenceJob& WithNumResults(int value) { SetNumResults(value); return *this;} /** *The Amazon S3 path that leads to the input data used to generate the batch * inference job.
*/ inline const BatchInferenceJobInput& GetJobInput() const{ return m_jobInput; } /** *The Amazon S3 path that leads to the input data used to generate the batch * inference job.
*/ inline bool JobInputHasBeenSet() const { return m_jobInputHasBeenSet; } /** *The Amazon S3 path that leads to the input data used to generate the batch * inference job.
*/ inline void SetJobInput(const BatchInferenceJobInput& value) { m_jobInputHasBeenSet = true; m_jobInput = value; } /** *The Amazon S3 path that leads to the input data used to generate the batch * inference job.
*/ inline void SetJobInput(BatchInferenceJobInput&& value) { m_jobInputHasBeenSet = true; m_jobInput = std::move(value); } /** *The Amazon S3 path that leads to the input data used to generate the batch * inference job.
*/ inline BatchInferenceJob& WithJobInput(const BatchInferenceJobInput& value) { SetJobInput(value); return *this;} /** *The Amazon S3 path that leads to the input data used to generate the batch * inference job.
*/ inline BatchInferenceJob& WithJobInput(BatchInferenceJobInput&& value) { SetJobInput(std::move(value)); return *this;} /** *The Amazon S3 bucket that contains the output data generated by the batch * inference job.
*/ inline const BatchInferenceJobOutput& GetJobOutput() const{ return m_jobOutput; } /** *The Amazon S3 bucket that contains the output data generated by the batch * inference job.
*/ inline bool JobOutputHasBeenSet() const { return m_jobOutputHasBeenSet; } /** *The Amazon S3 bucket that contains the output data generated by the batch * inference job.
*/ inline void SetJobOutput(const BatchInferenceJobOutput& value) { m_jobOutputHasBeenSet = true; m_jobOutput = value; } /** *The Amazon S3 bucket that contains the output data generated by the batch * inference job.
*/ inline void SetJobOutput(BatchInferenceJobOutput&& value) { m_jobOutputHasBeenSet = true; m_jobOutput = std::move(value); } /** *The Amazon S3 bucket that contains the output data generated by the batch * inference job.
*/ inline BatchInferenceJob& WithJobOutput(const BatchInferenceJobOutput& value) { SetJobOutput(value); return *this;} /** *The Amazon S3 bucket that contains the output data generated by the batch * inference job.
*/ inline BatchInferenceJob& WithJobOutput(BatchInferenceJobOutput&& value) { SetJobOutput(std::move(value)); return *this;} /** *A string to string map of the configuration details of a batch inference * job.
*/ inline const BatchInferenceJobConfig& GetBatchInferenceJobConfig() const{ return m_batchInferenceJobConfig; } /** *A string to string map of the configuration details of a batch inference * job.
*/ inline bool BatchInferenceJobConfigHasBeenSet() const { return m_batchInferenceJobConfigHasBeenSet; } /** *A string to string map of the configuration details of a batch inference * job.
*/ inline void SetBatchInferenceJobConfig(const BatchInferenceJobConfig& value) { m_batchInferenceJobConfigHasBeenSet = true; m_batchInferenceJobConfig = value; } /** *A string to string map of the configuration details of a batch inference * job.
*/ inline void SetBatchInferenceJobConfig(BatchInferenceJobConfig&& value) { m_batchInferenceJobConfigHasBeenSet = true; m_batchInferenceJobConfig = std::move(value); } /** *A string to string map of the configuration details of a batch inference * job.
*/ inline BatchInferenceJob& WithBatchInferenceJobConfig(const BatchInferenceJobConfig& value) { SetBatchInferenceJobConfig(value); return *this;} /** *A string to string map of the configuration details of a batch inference * job.
*/ inline BatchInferenceJob& WithBatchInferenceJobConfig(BatchInferenceJobConfig&& value) { SetBatchInferenceJobConfig(std::move(value)); return *this;} /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline const Aws::String& GetRoleArn() const{ return m_roleArn; } /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline bool RoleArnHasBeenSet() const { return m_roleArnHasBeenSet; } /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline void SetRoleArn(const Aws::String& value) { m_roleArnHasBeenSet = true; m_roleArn = value; } /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline void SetRoleArn(Aws::String&& value) { m_roleArnHasBeenSet = true; m_roleArn = std::move(value); } /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline void SetRoleArn(const char* value) { m_roleArnHasBeenSet = true; m_roleArn.assign(value); } /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline BatchInferenceJob& WithRoleArn(const Aws::String& value) { SetRoleArn(value); return *this;} /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline BatchInferenceJob& WithRoleArn(Aws::String&& value) { SetRoleArn(std::move(value)); return *this;} /** *The ARN of the Amazon Identity and Access Management (IAM) role that * requested the batch inference job.
*/ inline BatchInferenceJob& WithRoleArn(const char* value) { SetRoleArn(value); return *this;} /** *The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The status of the batch inference job. The status is one of the following * values:
PENDING
IN PROGRESS
ACTIVE
CREATE FAILED
The time at which the batch inference job was created.
*/ inline const Aws::Utils::DateTime& GetCreationDateTime() const{ return m_creationDateTime; } /** *The time at which the batch inference job was created.
*/ inline bool CreationDateTimeHasBeenSet() const { return m_creationDateTimeHasBeenSet; } /** *The time at which the batch inference job was created.
*/ inline void SetCreationDateTime(const Aws::Utils::DateTime& value) { m_creationDateTimeHasBeenSet = true; m_creationDateTime = value; } /** *The time at which the batch inference job was created.
*/ inline void SetCreationDateTime(Aws::Utils::DateTime&& value) { m_creationDateTimeHasBeenSet = true; m_creationDateTime = std::move(value); } /** *The time at which the batch inference job was created.
*/ inline BatchInferenceJob& WithCreationDateTime(const Aws::Utils::DateTime& value) { SetCreationDateTime(value); return *this;} /** *The time at which the batch inference job was created.
*/ inline BatchInferenceJob& WithCreationDateTime(Aws::Utils::DateTime&& value) { SetCreationDateTime(std::move(value)); return *this;} /** *The time at which the batch inference job was last updated.
*/ inline const Aws::Utils::DateTime& GetLastUpdatedDateTime() const{ return m_lastUpdatedDateTime; } /** *The time at which the batch inference job was last updated.
*/ inline bool LastUpdatedDateTimeHasBeenSet() const { return m_lastUpdatedDateTimeHasBeenSet; } /** *The time at which the batch inference job was last updated.
*/ inline void SetLastUpdatedDateTime(const Aws::Utils::DateTime& value) { m_lastUpdatedDateTimeHasBeenSet = true; m_lastUpdatedDateTime = value; } /** *The time at which the batch inference job was last updated.
*/ inline void SetLastUpdatedDateTime(Aws::Utils::DateTime&& value) { m_lastUpdatedDateTimeHasBeenSet = true; m_lastUpdatedDateTime = std::move(value); } /** *The time at which the batch inference job was last updated.
*/ inline BatchInferenceJob& WithLastUpdatedDateTime(const Aws::Utils::DateTime& value) { SetLastUpdatedDateTime(value); return *this;} /** *The time at which the batch inference job was last updated.
*/ inline BatchInferenceJob& WithLastUpdatedDateTime(Aws::Utils::DateTime&& value) { SetLastUpdatedDateTime(std::move(value)); return *this;} private: Aws::String m_jobName; bool m_jobNameHasBeenSet = false; Aws::String m_batchInferenceJobArn; bool m_batchInferenceJobArnHasBeenSet = false; Aws::String m_filterArn; bool m_filterArnHasBeenSet = false; Aws::String m_failureReason; bool m_failureReasonHasBeenSet = false; Aws::String m_solutionVersionArn; bool m_solutionVersionArnHasBeenSet = false; int m_numResults; bool m_numResultsHasBeenSet = false; BatchInferenceJobInput m_jobInput; bool m_jobInputHasBeenSet = false; BatchInferenceJobOutput m_jobOutput; bool m_jobOutputHasBeenSet = false; BatchInferenceJobConfig m_batchInferenceJobConfig; bool m_batchInferenceJobConfigHasBeenSet = false; Aws::String m_roleArn; bool m_roleArnHasBeenSet = false; Aws::String m_status; bool m_statusHasBeenSet = false; Aws::Utils::DateTime m_creationDateTime; bool m_creationDateTimeHasBeenSet = false; Aws::Utils::DateTime m_lastUpdatedDateTime; bool m_lastUpdatedDateTimeHasBeenSet = false; }; } // namespace Model } // namespace Personalize } // namespace Aws