/* * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.lookoutequipment.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** *

* Contains information about the specific inference scheduler, including data delay offset, model name and ARN, status, * and so on. *

* * @see AWS API Documentation */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InferenceSchedulerSummary implements Serializable, Cloneable, StructuredPojo { /** *

* The name of the ML model used for the inference scheduler. *

*/ private String modelName; /** *

* The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. *

*/ private String modelArn; /** *

* The name of the inference scheduler. *

*/ private String inferenceSchedulerName; /** *

* The Amazon Resource Name (ARN) of the inference scheduler. *

*/ private String inferenceSchedulerArn; /** *

* Indicates the status of the inference scheduler. *

*/ private String status; /** *

* A period of time (in minutes) by which inference on the data is delayed after the data starts. For instance, if * an offset delay time of five minutes was selected, inference will not begin on the data until the first data * measurement after the five minute mark. For example, if five minutes is selected, the inference scheduler will * wake up at the configured frequency with the additional five minute delay time to check the customer S3 bucket. * The customer can upload data at the same frequency and they don't need to stop and restart the scheduler when * uploading new data. *

*/ private Long dataDelayOffsetInMinutes; /** *

* How often data is uploaded to the source S3 bucket for the input data. This value is the length of time between * data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the real-time data * to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout for Equipment * starts a scheduled inference on your data. In this example, it starts once every 5 minutes. *

*/ private String dataUploadFrequency; /** *

* Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) or * Normal (no anomalous events found). *

*/ private String latestInferenceResult; /** *

* The name of the ML model used for the inference scheduler. *

* * @param modelName * The name of the ML model used for the inference scheduler. */ public void setModelName(String modelName) { this.modelName = modelName; } /** *

* The name of the ML model used for the inference scheduler. *

* * @return The name of the ML model used for the inference scheduler. */ public String getModelName() { return this.modelName; } /** *

* The name of the ML model used for the inference scheduler. *

* * @param modelName * The name of the ML model used for the inference scheduler. * @return Returns a reference to this object so that method calls can be chained together. */ public InferenceSchedulerSummary withModelName(String modelName) { setModelName(modelName); return this; } /** *

* The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. *

* * @param modelArn * The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. */ public void setModelArn(String modelArn) { this.modelArn = modelArn; } /** *

* The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. *

* * @return The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. */ public String getModelArn() { return this.modelArn; } /** *

* The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. *

* * @param modelArn * The Amazon Resource Name (ARN) of the ML model used by the inference scheduler. * @return Returns a reference to this object so that method calls can be chained together. */ public InferenceSchedulerSummary withModelArn(String modelArn) { setModelArn(modelArn); return this; } /** *

* The name of the inference scheduler. *

* * @param inferenceSchedulerName * The name of the inference scheduler. */ public void setInferenceSchedulerName(String inferenceSchedulerName) { this.inferenceSchedulerName = inferenceSchedulerName; } /** *

* The name of the inference scheduler. *

* * @return The name of the inference scheduler. */ public String getInferenceSchedulerName() { return this.inferenceSchedulerName; } /** *

* The name of the inference scheduler. *

* * @param inferenceSchedulerName * The name of the inference scheduler. * @return Returns a reference to this object so that method calls can be chained together. */ public InferenceSchedulerSummary withInferenceSchedulerName(String inferenceSchedulerName) { setInferenceSchedulerName(inferenceSchedulerName); return this; } /** *

* The Amazon Resource Name (ARN) of the inference scheduler. *

* * @param inferenceSchedulerArn * The Amazon Resource Name (ARN) of the inference scheduler. */ public void setInferenceSchedulerArn(String inferenceSchedulerArn) { this.inferenceSchedulerArn = inferenceSchedulerArn; } /** *

* The Amazon Resource Name (ARN) of the inference scheduler. *

* * @return The Amazon Resource Name (ARN) of the inference scheduler. */ public String getInferenceSchedulerArn() { return this.inferenceSchedulerArn; } /** *

* The Amazon Resource Name (ARN) of the inference scheduler. *

* * @param inferenceSchedulerArn * The Amazon Resource Name (ARN) of the inference scheduler. * @return Returns a reference to this object so that method calls can be chained together. */ public InferenceSchedulerSummary withInferenceSchedulerArn(String inferenceSchedulerArn) { setInferenceSchedulerArn(inferenceSchedulerArn); return this; } /** *

* Indicates the status of the inference scheduler. *

* * @param status * Indicates the status of the inference scheduler. * @see InferenceSchedulerStatus */ public void setStatus(String status) { this.status = status; } /** *

* Indicates the status of the inference scheduler. *

* * @return Indicates the status of the inference scheduler. * @see InferenceSchedulerStatus */ public String getStatus() { return this.status; } /** *

* Indicates the status of the inference scheduler. *

* * @param status * Indicates the status of the inference scheduler. * @return Returns a reference to this object so that method calls can be chained together. * @see InferenceSchedulerStatus */ public InferenceSchedulerSummary withStatus(String status) { setStatus(status); return this; } /** *

* Indicates the status of the inference scheduler. *

* * @param status * Indicates the status of the inference scheduler. * @return Returns a reference to this object so that method calls can be chained together. * @see InferenceSchedulerStatus */ public InferenceSchedulerSummary withStatus(InferenceSchedulerStatus status) { this.status = status.toString(); return this; } /** *

* A period of time (in minutes) by which inference on the data is delayed after the data starts. For instance, if * an offset delay time of five minutes was selected, inference will not begin on the data until the first data * measurement after the five minute mark. For example, if five minutes is selected, the inference scheduler will * wake up at the configured frequency with the additional five minute delay time to check the customer S3 bucket. * The customer can upload data at the same frequency and they don't need to stop and restart the scheduler when * uploading new data. *

* * @param dataDelayOffsetInMinutes * A period of time (in minutes) by which inference on the data is delayed after the data starts. For * instance, if an offset delay time of five minutes was selected, inference will not begin on the data until * the first data measurement after the five minute mark. For example, if five minutes is selected, the * inference scheduler will wake up at the configured frequency with the additional five minute delay time to * check the customer S3 bucket. The customer can upload data at the same frequency and they don't need to * stop and restart the scheduler when uploading new data. */ public void setDataDelayOffsetInMinutes(Long dataDelayOffsetInMinutes) { this.dataDelayOffsetInMinutes = dataDelayOffsetInMinutes; } /** *

* A period of time (in minutes) by which inference on the data is delayed after the data starts. For instance, if * an offset delay time of five minutes was selected, inference will not begin on the data until the first data * measurement after the five minute mark. For example, if five minutes is selected, the inference scheduler will * wake up at the configured frequency with the additional five minute delay time to check the customer S3 bucket. * The customer can upload data at the same frequency and they don't need to stop and restart the scheduler when * uploading new data. *

* * @return A period of time (in minutes) by which inference on the data is delayed after the data starts. For * instance, if an offset delay time of five minutes was selected, inference will not begin on the data * until the first data measurement after the five minute mark. For example, if five minutes is selected, * the inference scheduler will wake up at the configured frequency with the additional five minute delay * time to check the customer S3 bucket. The customer can upload data at the same frequency and they don't * need to stop and restart the scheduler when uploading new data. */ public Long getDataDelayOffsetInMinutes() { return this.dataDelayOffsetInMinutes; } /** *

* A period of time (in minutes) by which inference on the data is delayed after the data starts. For instance, if * an offset delay time of five minutes was selected, inference will not begin on the data until the first data * measurement after the five minute mark. For example, if five minutes is selected, the inference scheduler will * wake up at the configured frequency with the additional five minute delay time to check the customer S3 bucket. * The customer can upload data at the same frequency and they don't need to stop and restart the scheduler when * uploading new data. *

* * @param dataDelayOffsetInMinutes * A period of time (in minutes) by which inference on the data is delayed after the data starts. For * instance, if an offset delay time of five minutes was selected, inference will not begin on the data until * the first data measurement after the five minute mark. For example, if five minutes is selected, the * inference scheduler will wake up at the configured frequency with the additional five minute delay time to * check the customer S3 bucket. The customer can upload data at the same frequency and they don't need to * stop and restart the scheduler when uploading new data. * @return Returns a reference to this object so that method calls can be chained together. */ public InferenceSchedulerSummary withDataDelayOffsetInMinutes(Long dataDelayOffsetInMinutes) { setDataDelayOffsetInMinutes(dataDelayOffsetInMinutes); return this; } /** *

* How often data is uploaded to the source S3 bucket for the input data. This value is the length of time between * data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the real-time data * to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout for Equipment * starts a scheduled inference on your data. In this example, it starts once every 5 minutes. *

* * @param dataUploadFrequency * How often data is uploaded to the source S3 bucket for the input data. This value is the length of time * between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the * real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon * Lookout for Equipment starts a scheduled inference on your data. In this example, it starts once every 5 * minutes. * @see DataUploadFrequency */ public void setDataUploadFrequency(String dataUploadFrequency) { this.dataUploadFrequency = dataUploadFrequency; } /** *

* How often data is uploaded to the source S3 bucket for the input data. This value is the length of time between * data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the real-time data * to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout for Equipment * starts a scheduled inference on your data. In this example, it starts once every 5 minutes. *

* * @return How often data is uploaded to the source S3 bucket for the input data. This value is the length of time * between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the * real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon * Lookout for Equipment starts a scheduled inference on your data. In this example, it starts once every 5 * minutes. * @see DataUploadFrequency */ public String getDataUploadFrequency() { return this.dataUploadFrequency; } /** *

* How often data is uploaded to the source S3 bucket for the input data. This value is the length of time between * data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the real-time data * to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout for Equipment * starts a scheduled inference on your data. In this example, it starts once every 5 minutes. *

* * @param dataUploadFrequency * How often data is uploaded to the source S3 bucket for the input data. This value is the length of time * between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the * real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon * Lookout for Equipment starts a scheduled inference on your data. In this example, it starts once every 5 * minutes. * @return Returns a reference to this object so that method calls can be chained together. * @see DataUploadFrequency */ public InferenceSchedulerSummary withDataUploadFrequency(String dataUploadFrequency) { setDataUploadFrequency(dataUploadFrequency); return this; } /** *

* How often data is uploaded to the source S3 bucket for the input data. This value is the length of time between * data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the real-time data * to the source bucket once every 5 minutes. This frequency also determines how often Amazon Lookout for Equipment * starts a scheduled inference on your data. In this example, it starts once every 5 minutes. *

* * @param dataUploadFrequency * How often data is uploaded to the source S3 bucket for the input data. This value is the length of time * between data uploads. For instance, if you select 5 minutes, Amazon Lookout for Equipment will upload the * real-time data to the source bucket once every 5 minutes. This frequency also determines how often Amazon * Lookout for Equipment starts a scheduled inference on your data. In this example, it starts once every 5 * minutes. * @return Returns a reference to this object so that method calls can be chained together. * @see DataUploadFrequency */ public InferenceSchedulerSummary withDataUploadFrequency(DataUploadFrequency dataUploadFrequency) { this.dataUploadFrequency = dataUploadFrequency.toString(); return this; } /** *

* Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) or * Normal (no anomalous events found). *

* * @param latestInferenceResult * Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) * or Normal (no anomalous events found). * @see LatestInferenceResult */ public void setLatestInferenceResult(String latestInferenceResult) { this.latestInferenceResult = latestInferenceResult; } /** *

* Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) or * Normal (no anomalous events found). *

* * @return Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) * or Normal (no anomalous events found). * @see LatestInferenceResult */ public String getLatestInferenceResult() { return this.latestInferenceResult; } /** *

* Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) or * Normal (no anomalous events found). *

* * @param latestInferenceResult * Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) * or Normal (no anomalous events found). * @return Returns a reference to this object so that method calls can be chained together. * @see LatestInferenceResult */ public InferenceSchedulerSummary withLatestInferenceResult(String latestInferenceResult) { setLatestInferenceResult(latestInferenceResult); return this; } /** *

* Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) or * Normal (no anomalous events found). *

* * @param latestInferenceResult * Indicates whether the latest execution for the inference scheduler was Anomalous (anomalous events found) * or Normal (no anomalous events found). * @return Returns a reference to this object so that method calls can be chained together. * @see LatestInferenceResult */ public InferenceSchedulerSummary withLatestInferenceResult(LatestInferenceResult latestInferenceResult) { this.latestInferenceResult = latestInferenceResult.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getModelName() != null) sb.append("ModelName: ").append(getModelName()).append(","); if (getModelArn() != null) sb.append("ModelArn: ").append(getModelArn()).append(","); if (getInferenceSchedulerName() != null) sb.append("InferenceSchedulerName: ").append(getInferenceSchedulerName()).append(","); if (getInferenceSchedulerArn() != null) sb.append("InferenceSchedulerArn: ").append(getInferenceSchedulerArn()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getDataDelayOffsetInMinutes() != null) sb.append("DataDelayOffsetInMinutes: ").append(getDataDelayOffsetInMinutes()).append(","); if (getDataUploadFrequency() != null) sb.append("DataUploadFrequency: ").append(getDataUploadFrequency()).append(","); if (getLatestInferenceResult() != null) sb.append("LatestInferenceResult: ").append(getLatestInferenceResult()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof InferenceSchedulerSummary == false) return false; InferenceSchedulerSummary other = (InferenceSchedulerSummary) obj; if (other.getModelName() == null ^ this.getModelName() == null) return false; if (other.getModelName() != null && other.getModelName().equals(this.getModelName()) == false) return false; if (other.getModelArn() == null ^ this.getModelArn() == null) return false; if (other.getModelArn() != null && other.getModelArn().equals(this.getModelArn()) == false) return false; if (other.getInferenceSchedulerName() == null ^ this.getInferenceSchedulerName() == null) return false; if (other.getInferenceSchedulerName() != null && other.getInferenceSchedulerName().equals(this.getInferenceSchedulerName()) == false) return false; if (other.getInferenceSchedulerArn() == null ^ this.getInferenceSchedulerArn() == null) return false; if (other.getInferenceSchedulerArn() != null && other.getInferenceSchedulerArn().equals(this.getInferenceSchedulerArn()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getDataDelayOffsetInMinutes() == null ^ this.getDataDelayOffsetInMinutes() == null) return false; if (other.getDataDelayOffsetInMinutes() != null && other.getDataDelayOffsetInMinutes().equals(this.getDataDelayOffsetInMinutes()) == false) return false; if (other.getDataUploadFrequency() == null ^ this.getDataUploadFrequency() == null) return false; if (other.getDataUploadFrequency() != null && other.getDataUploadFrequency().equals(this.getDataUploadFrequency()) == false) return false; if (other.getLatestInferenceResult() == null ^ this.getLatestInferenceResult() == null) return false; if (other.getLatestInferenceResult() != null && other.getLatestInferenceResult().equals(this.getLatestInferenceResult()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getModelName() == null) ? 0 : getModelName().hashCode()); hashCode = prime * hashCode + ((getModelArn() == null) ? 0 : getModelArn().hashCode()); hashCode = prime * hashCode + ((getInferenceSchedulerName() == null) ? 0 : getInferenceSchedulerName().hashCode()); hashCode = prime * hashCode + ((getInferenceSchedulerArn() == null) ? 0 : getInferenceSchedulerArn().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getDataDelayOffsetInMinutes() == null) ? 0 : getDataDelayOffsetInMinutes().hashCode()); hashCode = prime * hashCode + ((getDataUploadFrequency() == null) ? 0 : getDataUploadFrequency().hashCode()); hashCode = prime * hashCode + ((getLatestInferenceResult() == null) ? 0 : getLatestInferenceResult().hashCode()); return hashCode; } @Override public InferenceSchedulerSummary clone() { try { return (InferenceSchedulerSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.lookoutequipment.model.transform.InferenceSchedulerSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }