/* * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** *

* Contains information about the location of input model artifacts, the name and shape of the expected data inputs, and * the framework in which the model was trained. *

* * @see AWS API * Documentation */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InputConfig implements Serializable, Cloneable, StructuredPojo { /** *

* The S3 path where the model artifacts, which result from model training, are stored. This path must point to a * single gzip compressed tar archive (.tar.gz suffix). *

*/ private String s3Uri; /** *

* Specifies the name and shape of the expected data inputs for your trained model with a JSON dictionary form. The * data inputs are Framework specific. *

* *

* DataInputConfig supports the following parameters for CoreML TargetDevice * (ML Model format): *

* *

* CoreML ClassifierConfig parameters can be specified using OutputConfig * CompilerOptions. CoreML converter supports Tensorflow and PyTorch models. CoreML conversion * examples: *

* *

* Depending on the model format, DataInputConfig requires the following parameters for * ml_eia2 OutputConfig:TargetDevice. *

* */ private String dataInputConfig; /** *

* Identifies the framework in which the model was trained. For example: TENSORFLOW. *

*/ private String framework; /** *

* Specifies the framework version to use. This API field is only supported for the MXNet, PyTorch, TensorFlow and * TensorFlow Lite frameworks. *

*

* For information about framework versions supported for cloud targets and edge devices, see Cloud Supported Instance Types * and Frameworks and Edge Supported * Frameworks. *

*/ private String frameworkVersion; /** *

* The S3 path where the model artifacts, which result from model training, are stored. This path must point to a * single gzip compressed tar archive (.tar.gz suffix). *

* * @param s3Uri * The S3 path where the model artifacts, which result from model training, are stored. This path must point * to a single gzip compressed tar archive (.tar.gz suffix). */ public void setS3Uri(String s3Uri) { this.s3Uri = s3Uri; } /** *

* The S3 path where the model artifacts, which result from model training, are stored. This path must point to a * single gzip compressed tar archive (.tar.gz suffix). *

* * @return The S3 path where the model artifacts, which result from model training, are stored. This path must point * to a single gzip compressed tar archive (.tar.gz suffix). */ public String getS3Uri() { return this.s3Uri; } /** *

* The S3 path where the model artifacts, which result from model training, are stored. This path must point to a * single gzip compressed tar archive (.tar.gz suffix). *

* * @param s3Uri * The S3 path where the model artifacts, which result from model training, are stored. This path must point * to a single gzip compressed tar archive (.tar.gz suffix). * @return Returns a reference to this object so that method calls can be chained together. */ public InputConfig withS3Uri(String s3Uri) { setS3Uri(s3Uri); return this; } /** *

* Specifies the name and shape of the expected data inputs for your trained model with a JSON dictionary form. The * data inputs are Framework specific. *

* *

* DataInputConfig supports the following parameters for CoreML TargetDevice * (ML Model format): *

* *

* CoreML ClassifierConfig parameters can be specified using OutputConfig * CompilerOptions. CoreML converter supports Tensorflow and PyTorch models. CoreML conversion * examples: *

* *

* Depending on the model format, DataInputConfig requires the following parameters for * ml_eia2 OutputConfig:TargetDevice. *

* * * @param dataInputConfig * Specifies the name and shape of the expected data inputs for your trained model with a JSON dictionary * form. The data inputs are Framework specific.

* *

* DataInputConfig supports the following parameters for CoreML * TargetDevice (ML Model format): *

* *

* CoreML ClassifierConfig parameters can be specified using OutputConfig * CompilerOptions. CoreML converter supports Tensorflow and PyTorch models. CoreML conversion * examples: *

* *

* Depending on the model format, DataInputConfig requires the following parameters for * ml_eia2 OutputConfig:TargetDevice. *

*