/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include Contains information about the specific inference scheduler, including data
* delay offset, model name and ARN, status, and so on. See Also:
* AWS
* API Reference
The name of the ML model used for the inference scheduler.
*/ inline const Aws::String& GetModelName() const{ return m_modelName; } /** *The name of the ML model used for the inference scheduler.
*/ inline bool ModelNameHasBeenSet() const { return m_modelNameHasBeenSet; } /** *The name of the ML model used for the inference scheduler.
*/ inline void SetModelName(const Aws::String& value) { m_modelNameHasBeenSet = true; m_modelName = value; } /** *The name of the ML model used for the inference scheduler.
*/ inline void SetModelName(Aws::String&& value) { m_modelNameHasBeenSet = true; m_modelName = std::move(value); } /** *The name of the ML model used for the inference scheduler.
*/ inline void SetModelName(const char* value) { m_modelNameHasBeenSet = true; m_modelName.assign(value); } /** *The name of the ML model used for the inference scheduler.
*/ inline InferenceSchedulerSummary& WithModelName(const Aws::String& value) { SetModelName(value); return *this;} /** *The name of the ML model used for the inference scheduler.
*/ inline InferenceSchedulerSummary& WithModelName(Aws::String&& value) { SetModelName(std::move(value)); return *this;} /** *The name of the ML model used for the inference scheduler.
*/ inline InferenceSchedulerSummary& WithModelName(const char* value) { SetModelName(value); return *this;} /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline const Aws::String& GetModelArn() const{ return m_modelArn; } /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline bool ModelArnHasBeenSet() const { return m_modelArnHasBeenSet; } /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline void SetModelArn(const Aws::String& value) { m_modelArnHasBeenSet = true; m_modelArn = value; } /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline void SetModelArn(Aws::String&& value) { m_modelArnHasBeenSet = true; m_modelArn = std::move(value); } /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline void SetModelArn(const char* value) { m_modelArnHasBeenSet = true; m_modelArn.assign(value); } /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline InferenceSchedulerSummary& WithModelArn(const Aws::String& value) { SetModelArn(value); return *this;} /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline InferenceSchedulerSummary& WithModelArn(Aws::String&& value) { SetModelArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) of the ML model used by the inference * scheduler.
*/ inline InferenceSchedulerSummary& WithModelArn(const char* value) { SetModelArn(value); return *this;} /** *The name of the inference scheduler.
*/ inline const Aws::String& GetInferenceSchedulerName() const{ return m_inferenceSchedulerName; } /** *The name of the inference scheduler.
*/ inline bool InferenceSchedulerNameHasBeenSet() const { return m_inferenceSchedulerNameHasBeenSet; } /** *The name of the inference scheduler.
*/ inline void SetInferenceSchedulerName(const Aws::String& value) { m_inferenceSchedulerNameHasBeenSet = true; m_inferenceSchedulerName = value; } /** *The name of the inference scheduler.
*/ inline void SetInferenceSchedulerName(Aws::String&& value) { m_inferenceSchedulerNameHasBeenSet = true; m_inferenceSchedulerName = std::move(value); } /** *The name of the inference scheduler.
*/ inline void SetInferenceSchedulerName(const char* value) { m_inferenceSchedulerNameHasBeenSet = true; m_inferenceSchedulerName.assign(value); } /** *The name of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithInferenceSchedulerName(const Aws::String& value) { SetInferenceSchedulerName(value); return *this;} /** *The name of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithInferenceSchedulerName(Aws::String&& value) { SetInferenceSchedulerName(std::move(value)); return *this;} /** *The name of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithInferenceSchedulerName(const char* value) { SetInferenceSchedulerName(value); return *this;} /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline const Aws::String& GetInferenceSchedulerArn() const{ return m_inferenceSchedulerArn; } /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline bool InferenceSchedulerArnHasBeenSet() const { return m_inferenceSchedulerArnHasBeenSet; } /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline void SetInferenceSchedulerArn(const Aws::String& value) { m_inferenceSchedulerArnHasBeenSet = true; m_inferenceSchedulerArn = value; } /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline void SetInferenceSchedulerArn(Aws::String&& value) { m_inferenceSchedulerArnHasBeenSet = true; m_inferenceSchedulerArn = std::move(value); } /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline void SetInferenceSchedulerArn(const char* value) { m_inferenceSchedulerArnHasBeenSet = true; m_inferenceSchedulerArn.assign(value); } /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithInferenceSchedulerArn(const Aws::String& value) { SetInferenceSchedulerArn(value); return *this;} /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithInferenceSchedulerArn(Aws::String&& value) { SetInferenceSchedulerArn(std::move(value)); return *this;} /** *The Amazon Resource Name (ARN) of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithInferenceSchedulerArn(const char* value) { SetInferenceSchedulerArn(value); return *this;} /** *Indicates the status of the inference scheduler.
*/ inline const InferenceSchedulerStatus& GetStatus() const{ return m_status; } /** *Indicates the status of the inference scheduler.
*/ inline bool StatusHasBeenSet() const { return m_statusHasBeenSet; } /** *Indicates the status of the inference scheduler.
*/ inline void SetStatus(const InferenceSchedulerStatus& value) { m_statusHasBeenSet = true; m_status = value; } /** *Indicates the status of the inference scheduler.
*/ inline void SetStatus(InferenceSchedulerStatus&& value) { m_statusHasBeenSet = true; m_status = std::move(value); } /** *Indicates the status of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithStatus(const InferenceSchedulerStatus& value) { SetStatus(value); return *this;} /** *Indicates the status of the inference scheduler.
*/ inline InferenceSchedulerSummary& WithStatus(InferenceSchedulerStatus&& value) { SetStatus(std::move(value)); return *this;} /** *A period of time (in minutes) by which inference on the data is delayed after * the data starts. For instance, if an offset delay time of five minutes was * selected, inference will not begin on the data until the first data measurement * after the five minute mark. For example, if five minutes is selected, the * inference scheduler will wake up at the configured frequency with the additional * five minute delay time to check the customer S3 bucket. The customer can upload * data at the same frequency and they don't need to stop and restart the scheduler * when uploading new data.
*/ inline long long GetDataDelayOffsetInMinutes() const{ return m_dataDelayOffsetInMinutes; } /** *A period of time (in minutes) by which inference on the data is delayed after * the data starts. For instance, if an offset delay time of five minutes was * selected, inference will not begin on the data until the first data measurement * after the five minute mark. For example, if five minutes is selected, the * inference scheduler will wake up at the configured frequency with the additional * five minute delay time to check the customer S3 bucket. The customer can upload * data at the same frequency and they don't need to stop and restart the scheduler * when uploading new data.
*/ inline bool DataDelayOffsetInMinutesHasBeenSet() const { return m_dataDelayOffsetInMinutesHasBeenSet; } /** *A period of time (in minutes) by which inference on the data is delayed after * the data starts. For instance, if an offset delay time of five minutes was * selected, inference will not begin on the data until the first data measurement * after the five minute mark. For example, if five minutes is selected, the * inference scheduler will wake up at the configured frequency with the additional * five minute delay time to check the customer S3 bucket. The customer can upload * data at the same frequency and they don't need to stop and restart the scheduler * when uploading new data.
*/ inline void SetDataDelayOffsetInMinutes(long long value) { m_dataDelayOffsetInMinutesHasBeenSet = true; m_dataDelayOffsetInMinutes = value; } /** *A period of time (in minutes) by which inference on the data is delayed after * the data starts. For instance, if an offset delay time of five minutes was * selected, inference will not begin on the data until the first data measurement * after the five minute mark. For example, if five minutes is selected, the * inference scheduler will wake up at the configured frequency with the additional * five minute delay time to check the customer S3 bucket. The customer can upload * data at the same frequency and they don't need to stop and restart the scheduler * when uploading new data.
*/ inline InferenceSchedulerSummary& WithDataDelayOffsetInMinutes(long long value) { SetDataDelayOffsetInMinutes(value); return *this;} /** *How often data is uploaded to the source S3 bucket for the input data. This * value is the length of time between data uploads. For instance, if you select 5 * minutes, Amazon Lookout for Equipment will upload the real-time data to the * source bucket once every 5 minutes. This frequency also determines how often * Amazon Lookout for Equipment starts a scheduled inference on your data. In this * example, it starts once every 5 minutes.
*/ inline const DataUploadFrequency& GetDataUploadFrequency() const{ return m_dataUploadFrequency; } /** *How often data is uploaded to the source S3 bucket for the input data. This * value is the length of time between data uploads. For instance, if you select 5 * minutes, Amazon Lookout for Equipment will upload the real-time data to the * source bucket once every 5 minutes. This frequency also determines how often * Amazon Lookout for Equipment starts a scheduled inference on your data. In this * example, it starts once every 5 minutes.
*/ inline bool DataUploadFrequencyHasBeenSet() const { return m_dataUploadFrequencyHasBeenSet; } /** *How often data is uploaded to the source S3 bucket for the input data. This * value is the length of time between data uploads. For instance, if you select 5 * minutes, Amazon Lookout for Equipment will upload the real-time data to the * source bucket once every 5 minutes. This frequency also determines how often * Amazon Lookout for Equipment starts a scheduled inference on your data. In this * example, it starts once every 5 minutes.
*/ inline void SetDataUploadFrequency(const DataUploadFrequency& value) { m_dataUploadFrequencyHasBeenSet = true; m_dataUploadFrequency = value; } /** *How often data is uploaded to the source S3 bucket for the input data. This * value is the length of time between data uploads. For instance, if you select 5 * minutes, Amazon Lookout for Equipment will upload the real-time data to the * source bucket once every 5 minutes. This frequency also determines how often * Amazon Lookout for Equipment starts a scheduled inference on your data. In this * example, it starts once every 5 minutes.
*/ inline void SetDataUploadFrequency(DataUploadFrequency&& value) { m_dataUploadFrequencyHasBeenSet = true; m_dataUploadFrequency = std::move(value); } /** *How often data is uploaded to the source S3 bucket for the input data. This * value is the length of time between data uploads. For instance, if you select 5 * minutes, Amazon Lookout for Equipment will upload the real-time data to the * source bucket once every 5 minutes. This frequency also determines how often * Amazon Lookout for Equipment starts a scheduled inference on your data. In this * example, it starts once every 5 minutes.
*/ inline InferenceSchedulerSummary& WithDataUploadFrequency(const DataUploadFrequency& value) { SetDataUploadFrequency(value); return *this;} /** *How often data is uploaded to the source S3 bucket for the input data. This * value is the length of time between data uploads. For instance, if you select 5 * minutes, Amazon Lookout for Equipment will upload the real-time data to the * source bucket once every 5 minutes. This frequency also determines how often * Amazon Lookout for Equipment starts a scheduled inference on your data. In this * example, it starts once every 5 minutes.
*/ inline InferenceSchedulerSummary& WithDataUploadFrequency(DataUploadFrequency&& value) { SetDataUploadFrequency(std::move(value)); return *this;} /** *Indicates whether the latest execution for the inference scheduler was * Anomalous (anomalous events found) or Normal (no anomalous events found).
*/ inline const LatestInferenceResult& GetLatestInferenceResult() const{ return m_latestInferenceResult; } /** *Indicates whether the latest execution for the inference scheduler was * Anomalous (anomalous events found) or Normal (no anomalous events found).
*/ inline bool LatestInferenceResultHasBeenSet() const { return m_latestInferenceResultHasBeenSet; } /** *Indicates whether the latest execution for the inference scheduler was * Anomalous (anomalous events found) or Normal (no anomalous events found).
*/ inline void SetLatestInferenceResult(const LatestInferenceResult& value) { m_latestInferenceResultHasBeenSet = true; m_latestInferenceResult = value; } /** *Indicates whether the latest execution for the inference scheduler was * Anomalous (anomalous events found) or Normal (no anomalous events found).
*/ inline void SetLatestInferenceResult(LatestInferenceResult&& value) { m_latestInferenceResultHasBeenSet = true; m_latestInferenceResult = std::move(value); } /** *Indicates whether the latest execution for the inference scheduler was * Anomalous (anomalous events found) or Normal (no anomalous events found).
*/ inline InferenceSchedulerSummary& WithLatestInferenceResult(const LatestInferenceResult& value) { SetLatestInferenceResult(value); return *this;} /** *Indicates whether the latest execution for the inference scheduler was * Anomalous (anomalous events found) or Normal (no anomalous events found).
*/ inline InferenceSchedulerSummary& WithLatestInferenceResult(LatestInferenceResult&& value) { SetLatestInferenceResult(std::move(value)); return *this;} private: Aws::String m_modelName; bool m_modelNameHasBeenSet = false; Aws::String m_modelArn; bool m_modelArnHasBeenSet = false; Aws::String m_inferenceSchedulerName; bool m_inferenceSchedulerNameHasBeenSet = false; Aws::String m_inferenceSchedulerArn; bool m_inferenceSchedulerArnHasBeenSet = false; InferenceSchedulerStatus m_status; bool m_statusHasBeenSet = false; long long m_dataDelayOffsetInMinutes; bool m_dataDelayOffsetInMinutesHasBeenSet = false; DataUploadFrequency m_dataUploadFrequency; bool m_dataUploadFrequencyHasBeenSet = false; LatestInferenceResult m_latestInferenceResult; bool m_latestInferenceResultHasBeenSet = false; }; } // namespace Model } // namespace LookoutEquipment } // namespace Aws