/* * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.personalize.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see AWS API Documentation */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateBatchInferenceJobRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** *
* The name of the batch inference job to create. *
*/ private String jobName; /** ** The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. *
*/ private String solutionVersionArn; /** ** The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch recommendations. *
*/ private String filterArn; /** ** The number of recommendations to retrieve. *
*/ private Integer numResults; /** ** The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in * JSON format. *
*/ private BatchInferenceJobInput jobInput; /** ** The path to the Amazon S3 bucket where the job's output will be stored. *
*/ private BatchInferenceJobOutput jobOutput; /** ** The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input * and output Amazon S3 buckets respectively. *
*/ private String roleArn; /** ** The configuration details of a batch inference job. *
*/ private BatchInferenceJobConfig batchInferenceJobConfig; /** ** A list of tags to apply to * the batch inference job. *
*/ private java.util.List* The name of the batch inference job to create. *
* * @param jobName * The name of the batch inference job to create. */ public void setJobName(String jobName) { this.jobName = jobName; } /** ** The name of the batch inference job to create. *
* * @return The name of the batch inference job to create. */ public String getJobName() { return this.jobName; } /** ** The name of the batch inference job to create. *
* * @param jobName * The name of the batch inference job to create. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withJobName(String jobName) { setJobName(jobName); return this; } /** ** The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. *
* * @param solutionVersionArn * The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. */ public void setSolutionVersionArn(String solutionVersionArn) { this.solutionVersionArn = solutionVersionArn; } /** ** The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. *
* * @return The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. */ public String getSolutionVersionArn() { return this.solutionVersionArn; } /** ** The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. *
* * @param solutionVersionArn * The Amazon Resource Name (ARN) of the solution version that will be used to generate the batch inference * recommendations. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withSolutionVersionArn(String solutionVersionArn) { setSolutionVersionArn(solutionVersionArn); return this; } /** ** The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch recommendations. *
* * @param filterArn * The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch * recommendations. */ public void setFilterArn(String filterArn) { this.filterArn = filterArn; } /** ** The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch recommendations. *
* * @return The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch * recommendations. */ public String getFilterArn() { return this.filterArn; } /** ** The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch recommendations. *
* * @param filterArn * The ARN of the filter to apply to the batch inference job. For more information on using filters, see Filtering batch * recommendations. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withFilterArn(String filterArn) { setFilterArn(filterArn); return this; } /** ** The number of recommendations to retrieve. *
* * @param numResults * The number of recommendations to retrieve. */ public void setNumResults(Integer numResults) { this.numResults = numResults; } /** ** The number of recommendations to retrieve. *
* * @return The number of recommendations to retrieve. */ public Integer getNumResults() { return this.numResults; } /** ** The number of recommendations to retrieve. *
* * @param numResults * The number of recommendations to retrieve. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withNumResults(Integer numResults) { setNumResults(numResults); return this; } /** ** The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in * JSON format. *
* * @param jobInput * The Amazon S3 path that leads to the input file to base your recommendations on. The input material must * be in JSON format. */ public void setJobInput(BatchInferenceJobInput jobInput) { this.jobInput = jobInput; } /** ** The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in * JSON format. *
* * @return The Amazon S3 path that leads to the input file to base your recommendations on. The input material must * be in JSON format. */ public BatchInferenceJobInput getJobInput() { return this.jobInput; } /** ** The Amazon S3 path that leads to the input file to base your recommendations on. The input material must be in * JSON format. *
* * @param jobInput * The Amazon S3 path that leads to the input file to base your recommendations on. The input material must * be in JSON format. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withJobInput(BatchInferenceJobInput jobInput) { setJobInput(jobInput); return this; } /** ** The path to the Amazon S3 bucket where the job's output will be stored. *
* * @param jobOutput * The path to the Amazon S3 bucket where the job's output will be stored. */ public void setJobOutput(BatchInferenceJobOutput jobOutput) { this.jobOutput = jobOutput; } /** ** The path to the Amazon S3 bucket where the job's output will be stored. *
* * @return The path to the Amazon S3 bucket where the job's output will be stored. */ public BatchInferenceJobOutput getJobOutput() { return this.jobOutput; } /** ** The path to the Amazon S3 bucket where the job's output will be stored. *
* * @param jobOutput * The path to the Amazon S3 bucket where the job's output will be stored. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withJobOutput(BatchInferenceJobOutput jobOutput) { setJobOutput(jobOutput); return this; } /** ** The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input * and output Amazon S3 buckets respectively. *
* * @param roleArn * The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your * input and output Amazon S3 buckets respectively. */ public void setRoleArn(String roleArn) { this.roleArn = roleArn; } /** ** The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input * and output Amazon S3 buckets respectively. *
* * @return The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your * input and output Amazon S3 buckets respectively. */ public String getRoleArn() { return this.roleArn; } /** ** The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your input * and output Amazon S3 buckets respectively. *
* * @param roleArn * The ARN of the Amazon Identity and Access Management role that has permissions to read and write to your * input and output Amazon S3 buckets respectively. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withRoleArn(String roleArn) { setRoleArn(roleArn); return this; } /** ** The configuration details of a batch inference job. *
* * @param batchInferenceJobConfig * The configuration details of a batch inference job. */ public void setBatchInferenceJobConfig(BatchInferenceJobConfig batchInferenceJobConfig) { this.batchInferenceJobConfig = batchInferenceJobConfig; } /** ** The configuration details of a batch inference job. *
* * @return The configuration details of a batch inference job. */ public BatchInferenceJobConfig getBatchInferenceJobConfig() { return this.batchInferenceJobConfig; } /** ** The configuration details of a batch inference job. *
* * @param batchInferenceJobConfig * The configuration details of a batch inference job. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withBatchInferenceJobConfig(BatchInferenceJobConfig batchInferenceJobConfig) { setBatchInferenceJobConfig(batchInferenceJobConfig); return this; } /** ** A list of tags to apply to * the batch inference job. *
* * @return A list of tags to * apply to the batch inference job. */ public java.util.List* A list of tags to apply to * the batch inference job. *
* * @param tags * A list of tags to * apply to the batch inference job. */ public void setTags(java.util.Collection* A list of tags to apply to * the batch inference job. *
** NOTE: This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. *
* * @param tags * A list of tags to * apply to the batch inference job. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList* A list of tags to apply to * the batch inference job. *
* * @param tags * A list of tags to * apply to the batch inference job. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateBatchInferenceJobRequest withTags(java.util.Collection