/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include Describes the Docker container for the model package.See
* Also:
AWS
* API Reference
The DNS host name for the Docker container.
*/ inline const Aws::String& GetContainerHostname() const{ return m_containerHostname; } /** *The DNS host name for the Docker container.
*/ inline bool ContainerHostnameHasBeenSet() const { return m_containerHostnameHasBeenSet; } /** *The DNS host name for the Docker container.
*/ inline void SetContainerHostname(const Aws::String& value) { m_containerHostnameHasBeenSet = true; m_containerHostname = value; } /** *The DNS host name for the Docker container.
*/ inline void SetContainerHostname(Aws::String&& value) { m_containerHostnameHasBeenSet = true; m_containerHostname = std::move(value); } /** *The DNS host name for the Docker container.
*/ inline void SetContainerHostname(const char* value) { m_containerHostnameHasBeenSet = true; m_containerHostname.assign(value); } /** *The DNS host name for the Docker container.
*/ inline ModelPackageContainerDefinition& WithContainerHostname(const Aws::String& value) { SetContainerHostname(value); return *this;} /** *The DNS host name for the Docker container.
*/ inline ModelPackageContainerDefinition& WithContainerHostname(Aws::String&& value) { SetContainerHostname(std::move(value)); return *this;} /** *The DNS host name for the Docker container.
*/ inline ModelPackageContainerDefinition& WithContainerHostname(const char* value) { SetContainerHostname(value); return *this;} /** *The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
The Amazon EC2 Container Registry (Amazon ECR) path where inference code is * stored.
If you are using your own custom algorithm instead of an
* algorithm provided by SageMaker, the inference code must meet SageMaker
* requirements. SageMaker supports both registry/repository[:tag]
and
* registry/repository[@digest]
image path formats. For more
* information, see Using
* Your Own Algorithms with Amazon SageMaker.
An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline const Aws::String& GetImageDigest() const{ return m_imageDigest; } /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline bool ImageDigestHasBeenSet() const { return m_imageDigestHasBeenSet; } /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline void SetImageDigest(const Aws::String& value) { m_imageDigestHasBeenSet = true; m_imageDigest = value; } /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline void SetImageDigest(Aws::String&& value) { m_imageDigestHasBeenSet = true; m_imageDigest = std::move(value); } /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline void SetImageDigest(const char* value) { m_imageDigestHasBeenSet = true; m_imageDigest.assign(value); } /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline ModelPackageContainerDefinition& WithImageDigest(const Aws::String& value) { SetImageDigest(value); return *this;} /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline ModelPackageContainerDefinition& WithImageDigest(Aws::String&& value) { SetImageDigest(std::move(value)); return *this;} /** *An MD5 hash of the training algorithm that identifies the Docker image used * for training.
*/ inline ModelPackageContainerDefinition& WithImageDigest(const char* value) { SetImageDigest(value); return *this;} /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline const Aws::String& GetModelDataUrl() const{ return m_modelDataUrl; } /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline bool ModelDataUrlHasBeenSet() const { return m_modelDataUrlHasBeenSet; } /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline void SetModelDataUrl(const Aws::String& value) { m_modelDataUrlHasBeenSet = true; m_modelDataUrl = value; } /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline void SetModelDataUrl(Aws::String&& value) { m_modelDataUrlHasBeenSet = true; m_modelDataUrl = std::move(value); } /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline void SetModelDataUrl(const char* value) { m_modelDataUrlHasBeenSet = true; m_modelDataUrl.assign(value); } /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline ModelPackageContainerDefinition& WithModelDataUrl(const Aws::String& value) { SetModelDataUrl(value); return *this;} /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline ModelPackageContainerDefinition& WithModelDataUrl(Aws::String&& value) { SetModelDataUrl(std::move(value)); return *this;} /** *The Amazon S3 path where the model artifacts, which result from model
* training, are stored. This path must point to a single gzip
* compressed tar archive (.tar.gz
suffix).
The model * artifacts must be in an S3 bucket that is in the same region as the model * package.
*/ inline ModelPackageContainerDefinition& WithModelDataUrl(const char* value) { SetModelDataUrl(value); return *this;} /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline const Aws::String& GetProductId() const{ return m_productId; } /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline bool ProductIdHasBeenSet() const { return m_productIdHasBeenSet; } /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline void SetProductId(const Aws::String& value) { m_productIdHasBeenSet = true; m_productId = value; } /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline void SetProductId(Aws::String&& value) { m_productIdHasBeenSet = true; m_productId = std::move(value); } /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline void SetProductId(const char* value) { m_productIdHasBeenSet = true; m_productId.assign(value); } /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline ModelPackageContainerDefinition& WithProductId(const Aws::String& value) { SetProductId(value); return *this;} /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline ModelPackageContainerDefinition& WithProductId(Aws::String&& value) { SetProductId(std::move(value)); return *this;} /** *The Amazon Web Services Marketplace product ID of the model package.
*/ inline ModelPackageContainerDefinition& WithProductId(const char* value) { SetProductId(value); return *this;} /** *The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
The environment variables to set in the Docker container. Each key and value
* in the Environment
string to string map can have length of up to
* 1024. We support up to 16 entries in the map.
A structure with Model Input details.
*/ inline const ModelInput& GetModelInput() const{ return m_modelInput; } /** *A structure with Model Input details.
*/ inline bool ModelInputHasBeenSet() const { return m_modelInputHasBeenSet; } /** *A structure with Model Input details.
*/ inline void SetModelInput(const ModelInput& value) { m_modelInputHasBeenSet = true; m_modelInput = value; } /** *A structure with Model Input details.
*/ inline void SetModelInput(ModelInput&& value) { m_modelInputHasBeenSet = true; m_modelInput = std::move(value); } /** *A structure with Model Input details.
*/ inline ModelPackageContainerDefinition& WithModelInput(const ModelInput& value) { SetModelInput(value); return *this;} /** *A structure with Model Input details.
*/ inline ModelPackageContainerDefinition& WithModelInput(ModelInput&& value) { SetModelInput(std::move(value)); return *this;} /** *The machine learning framework of the model package container image.
*/ inline const Aws::String& GetFramework() const{ return m_framework; } /** *The machine learning framework of the model package container image.
*/ inline bool FrameworkHasBeenSet() const { return m_frameworkHasBeenSet; } /** *The machine learning framework of the model package container image.
*/ inline void SetFramework(const Aws::String& value) { m_frameworkHasBeenSet = true; m_framework = value; } /** *The machine learning framework of the model package container image.
*/ inline void SetFramework(Aws::String&& value) { m_frameworkHasBeenSet = true; m_framework = std::move(value); } /** *The machine learning framework of the model package container image.
*/ inline void SetFramework(const char* value) { m_frameworkHasBeenSet = true; m_framework.assign(value); } /** *The machine learning framework of the model package container image.
*/ inline ModelPackageContainerDefinition& WithFramework(const Aws::String& value) { SetFramework(value); return *this;} /** *The machine learning framework of the model package container image.
*/ inline ModelPackageContainerDefinition& WithFramework(Aws::String&& value) { SetFramework(std::move(value)); return *this;} /** *The machine learning framework of the model package container image.
*/ inline ModelPackageContainerDefinition& WithFramework(const char* value) { SetFramework(value); return *this;} /** *The framework version of the Model Package Container Image.
*/ inline const Aws::String& GetFrameworkVersion() const{ return m_frameworkVersion; } /** *The framework version of the Model Package Container Image.
*/ inline bool FrameworkVersionHasBeenSet() const { return m_frameworkVersionHasBeenSet; } /** *The framework version of the Model Package Container Image.
*/ inline void SetFrameworkVersion(const Aws::String& value) { m_frameworkVersionHasBeenSet = true; m_frameworkVersion = value; } /** *The framework version of the Model Package Container Image.
*/ inline void SetFrameworkVersion(Aws::String&& value) { m_frameworkVersionHasBeenSet = true; m_frameworkVersion = std::move(value); } /** *The framework version of the Model Package Container Image.
*/ inline void SetFrameworkVersion(const char* value) { m_frameworkVersionHasBeenSet = true; m_frameworkVersion.assign(value); } /** *The framework version of the Model Package Container Image.
*/ inline ModelPackageContainerDefinition& WithFrameworkVersion(const Aws::String& value) { SetFrameworkVersion(value); return *this;} /** *The framework version of the Model Package Container Image.
*/ inline ModelPackageContainerDefinition& WithFrameworkVersion(Aws::String&& value) { SetFrameworkVersion(std::move(value)); return *this;} /** *The framework version of the Model Package Container Image.
*/ inline ModelPackageContainerDefinition& WithFrameworkVersion(const char* value) { SetFrameworkVersion(value); return *this;} /** *The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.
The name of a pre-trained machine learning benchmarked by Amazon SageMaker
* Inference Recommender model that matches your model. You can find a list of
* benchmarked models by calling ListModelMetadata
.