/* * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see AWS API * Documentation */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateCrawlerRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** *

* Name of the new crawler. *

*/ private String name; /** *

* The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access customer * resources. *

*/ private String role; /** *

* The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. *

*/ private String databaseName; /** *

* A description of the new crawler. *

*/ private String description; /** *

* A list of targets to crawl. *

*/ private CrawlerTargets targets; /** *

* A cron expression used to specify the schedule (see Time-Based Schedules for * Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would specify: * cron(15 12 * * ? *). *

*/ private String schedule; /** *

* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. *

*/ private java.util.List classifiers; /** *

* The table prefix used for catalog tables that are created. *

*/ private String tablePrefix; /** *

* The policy for the crawler's update and deletion behavior. *

*/ private SchemaChangePolicy schemaChangePolicy; /** *

* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. *

*/ private RecrawlPolicy recrawlPolicy; /** *

* Specifies data lineage configuration settings for the crawler. *

*/ private LineageConfiguration lineageConfiguration; /** *

* Specifies Lake Formation configuration settings for the crawler. *

*/ private LakeFormationConfiguration lakeFormationConfiguration; /** *

* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see Setting crawler configuration * options. *

*/ private String configuration; /** *

* The name of the SecurityConfiguration structure to be used by this crawler. *

*/ private String crawlerSecurityConfiguration; /** *

* Name of the new crawler. *

* * @param name * Name of the new crawler. */ public void setName(String name) { this.name = name; } /** *

* Name of the new crawler. *

* * @return Name of the new crawler. */ public String getName() { return this.name; } /** *

* Name of the new crawler. *

* * @param name * Name of the new crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withName(String name) { setName(name); return this; } /** *

* The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access customer * resources. *

* * @param role * The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access * customer resources. */ public void setRole(String role) { this.role = role; } /** *

* The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access customer * resources. *

* * @return The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access * customer resources. */ public String getRole() { return this.role; } /** *

* The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access customer * resources. *

* * @param role * The IAM role or Amazon Resource Name (ARN) of an IAM role that is used by the new crawler to access * customer resources. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withRole(String role) { setRole(role); return this; } /** *

* The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. *

* * @param databaseName * The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. */ public void setDatabaseName(String databaseName) { this.databaseName = databaseName; } /** *

* The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. *

* * @return The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. */ public String getDatabaseName() { return this.databaseName; } /** *

* The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. *

* * @param databaseName * The Glue database where results are stored, such as: * arn:aws:daylight:us-east-1::database/sometable/*. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withDatabaseName(String databaseName) { setDatabaseName(databaseName); return this; } /** *

* A description of the new crawler. *

* * @param description * A description of the new crawler. */ public void setDescription(String description) { this.description = description; } /** *

* A description of the new crawler. *

* * @return A description of the new crawler. */ public String getDescription() { return this.description; } /** *

* A description of the new crawler. *

* * @param description * A description of the new crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withDescription(String description) { setDescription(description); return this; } /** *

* A list of targets to crawl. *

* * @param targets * A list of targets to crawl. */ public void setTargets(CrawlerTargets targets) { this.targets = targets; } /** *

* A list of targets to crawl. *

* * @return A list of targets to crawl. */ public CrawlerTargets getTargets() { return this.targets; } /** *

* A list of targets to crawl. *

* * @param targets * A list of targets to crawl. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withTargets(CrawlerTargets targets) { setTargets(targets); return this; } /** *

* A cron expression used to specify the schedule (see Time-Based Schedules for * Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would specify: * cron(15 12 * * ? *). *

* * @param schedule * A cron expression used to specify the schedule (see Time-Based * Schedules for Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would * specify: cron(15 12 * * ? *). */ public void setSchedule(String schedule) { this.schedule = schedule; } /** *

* A cron expression used to specify the schedule (see Time-Based Schedules for * Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would specify: * cron(15 12 * * ? *). *

* * @return A cron expression used to specify the schedule (see Time-Based * Schedules for Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would * specify: cron(15 12 * * ? *). */ public String getSchedule() { return this.schedule; } /** *

* A cron expression used to specify the schedule (see Time-Based Schedules for * Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would specify: * cron(15 12 * * ? *). *

* * @param schedule * A cron expression used to specify the schedule (see Time-Based * Schedules for Jobs and Crawlers. For example, to run something every day at 12:15 UTC, you would * specify: cron(15 12 * * ? *). * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withSchedule(String schedule) { setSchedule(schedule); return this; } /** *

* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. *

* * @return A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. */ public java.util.List getClassifiers() { return classifiers; } /** *

* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. *

* * @param classifiers * A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. */ public void setClassifiers(java.util.Collection classifiers) { if (classifiers == null) { this.classifiers = null; return; } this.classifiers = new java.util.ArrayList(classifiers); } /** *

* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. *

*

* NOTE: This method appends the values to the existing list (if any). Use * {@link #setClassifiers(java.util.Collection)} or {@link #withClassifiers(java.util.Collection)} if you want to * override the existing values. *

* * @param classifiers * A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withClassifiers(String... classifiers) { if (this.classifiers == null) { setClassifiers(new java.util.ArrayList(classifiers.length)); } for (String ele : classifiers) { this.classifiers.add(ele); } return this; } /** *

* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a * crawl, but these custom classifiers always override the default classifiers for a given classification. *

* * @param classifiers * A list of custom classifiers that the user has registered. By default, all built-in classifiers are * included in a crawl, but these custom classifiers always override the default classifiers for a given * classification. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withClassifiers(java.util.Collection classifiers) { setClassifiers(classifiers); return this; } /** *

* The table prefix used for catalog tables that are created. *

* * @param tablePrefix * The table prefix used for catalog tables that are created. */ public void setTablePrefix(String tablePrefix) { this.tablePrefix = tablePrefix; } /** *

* The table prefix used for catalog tables that are created. *

* * @return The table prefix used for catalog tables that are created. */ public String getTablePrefix() { return this.tablePrefix; } /** *

* The table prefix used for catalog tables that are created. *

* * @param tablePrefix * The table prefix used for catalog tables that are created. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withTablePrefix(String tablePrefix) { setTablePrefix(tablePrefix); return this; } /** *

* The policy for the crawler's update and deletion behavior. *

* * @param schemaChangePolicy * The policy for the crawler's update and deletion behavior. */ public void setSchemaChangePolicy(SchemaChangePolicy schemaChangePolicy) { this.schemaChangePolicy = schemaChangePolicy; } /** *

* The policy for the crawler's update and deletion behavior. *

* * @return The policy for the crawler's update and deletion behavior. */ public SchemaChangePolicy getSchemaChangePolicy() { return this.schemaChangePolicy; } /** *

* The policy for the crawler's update and deletion behavior. *

* * @param schemaChangePolicy * The policy for the crawler's update and deletion behavior. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withSchemaChangePolicy(SchemaChangePolicy schemaChangePolicy) { setSchemaChangePolicy(schemaChangePolicy); return this; } /** *

* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. *

* * @param recrawlPolicy * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were * added since the last crawler run. */ public void setRecrawlPolicy(RecrawlPolicy recrawlPolicy) { this.recrawlPolicy = recrawlPolicy; } /** *

* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. *

* * @return A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were * added since the last crawler run. */ public RecrawlPolicy getRecrawlPolicy() { return this.recrawlPolicy; } /** *

* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since * the last crawler run. *

* * @param recrawlPolicy * A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were * added since the last crawler run. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withRecrawlPolicy(RecrawlPolicy recrawlPolicy) { setRecrawlPolicy(recrawlPolicy); return this; } /** *

* Specifies data lineage configuration settings for the crawler. *

* * @param lineageConfiguration * Specifies data lineage configuration settings for the crawler. */ public void setLineageConfiguration(LineageConfiguration lineageConfiguration) { this.lineageConfiguration = lineageConfiguration; } /** *

* Specifies data lineage configuration settings for the crawler. *

* * @return Specifies data lineage configuration settings for the crawler. */ public LineageConfiguration getLineageConfiguration() { return this.lineageConfiguration; } /** *

* Specifies data lineage configuration settings for the crawler. *

* * @param lineageConfiguration * Specifies data lineage configuration settings for the crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withLineageConfiguration(LineageConfiguration lineageConfiguration) { setLineageConfiguration(lineageConfiguration); return this; } /** *

* Specifies Lake Formation configuration settings for the crawler. *

* * @param lakeFormationConfiguration * Specifies Lake Formation configuration settings for the crawler. */ public void setLakeFormationConfiguration(LakeFormationConfiguration lakeFormationConfiguration) { this.lakeFormationConfiguration = lakeFormationConfiguration; } /** *

* Specifies Lake Formation configuration settings for the crawler. *

* * @return Specifies Lake Formation configuration settings for the crawler. */ public LakeFormationConfiguration getLakeFormationConfiguration() { return this.lakeFormationConfiguration; } /** *

* Specifies Lake Formation configuration settings for the crawler. *

* * @param lakeFormationConfiguration * Specifies Lake Formation configuration settings for the crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withLakeFormationConfiguration(LakeFormationConfiguration lakeFormationConfiguration) { setLakeFormationConfiguration(lakeFormationConfiguration); return this; } /** *

* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see Setting crawler configuration * options. *

* * @param configuration * Crawler configuration information. This versioned JSON string allows users to specify aspects of a * crawler's behavior. For more information, see Setting crawler configuration * options. */ public void setConfiguration(String configuration) { this.configuration = configuration; } /** *

* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see Setting crawler configuration * options. *

* * @return Crawler configuration information. This versioned JSON string allows users to specify aspects of a * crawler's behavior. For more information, see Setting crawler * configuration options. */ public String getConfiguration() { return this.configuration; } /** *

* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's * behavior. For more information, see Setting crawler configuration * options. *

* * @param configuration * Crawler configuration information. This versioned JSON string allows users to specify aspects of a * crawler's behavior. For more information, see Setting crawler configuration * options. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withConfiguration(String configuration) { setConfiguration(configuration); return this; } /** *

* The name of the SecurityConfiguration structure to be used by this crawler. *

* * @param crawlerSecurityConfiguration * The name of the SecurityConfiguration structure to be used by this crawler. */ public void setCrawlerSecurityConfiguration(String crawlerSecurityConfiguration) { this.crawlerSecurityConfiguration = crawlerSecurityConfiguration; } /** *

* The name of the SecurityConfiguration structure to be used by this crawler. *

* * @return The name of the SecurityConfiguration structure to be used by this crawler. */ public String getCrawlerSecurityConfiguration() { return this.crawlerSecurityConfiguration; } /** *

* The name of the SecurityConfiguration structure to be used by this crawler. *

* * @param crawlerSecurityConfiguration * The name of the SecurityConfiguration structure to be used by this crawler. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateCrawlerRequest withCrawlerSecurityConfiguration(String crawlerSecurityConfiguration) { setCrawlerSecurityConfiguration(crawlerSecurityConfiguration); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getRole() != null) sb.append("Role: ").append(getRole()).append(","); if (getDatabaseName() != null) sb.append("DatabaseName: ").append(getDatabaseName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getTargets() != null) sb.append("Targets: ").append(getTargets()).append(","); if (getSchedule() != null) sb.append("Schedule: ").append(getSchedule()).append(","); if (getClassifiers() != null) sb.append("Classifiers: ").append(getClassifiers()).append(","); if (getTablePrefix() != null) sb.append("TablePrefix: ").append(getTablePrefix()).append(","); if (getSchemaChangePolicy() != null) sb.append("SchemaChangePolicy: ").append(getSchemaChangePolicy()).append(","); if (getRecrawlPolicy() != null) sb.append("RecrawlPolicy: ").append(getRecrawlPolicy()).append(","); if (getLineageConfiguration() != null) sb.append("LineageConfiguration: ").append(getLineageConfiguration()).append(","); if (getLakeFormationConfiguration() != null) sb.append("LakeFormationConfiguration: ").append(getLakeFormationConfiguration()).append(","); if (getConfiguration() != null) sb.append("Configuration: ").append(getConfiguration()).append(","); if (getCrawlerSecurityConfiguration() != null) sb.append("CrawlerSecurityConfiguration: ").append(getCrawlerSecurityConfiguration()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateCrawlerRequest == false) return false; UpdateCrawlerRequest other = (UpdateCrawlerRequest) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getRole() == null ^ this.getRole() == null) return false; if (other.getRole() != null && other.getRole().equals(this.getRole()) == false) return false; if (other.getDatabaseName() == null ^ this.getDatabaseName() == null) return false; if (other.getDatabaseName() != null && other.getDatabaseName().equals(this.getDatabaseName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getTargets() == null ^ this.getTargets() == null) return false; if (other.getTargets() != null && other.getTargets().equals(this.getTargets()) == false) return false; if (other.getSchedule() == null ^ this.getSchedule() == null) return false; if (other.getSchedule() != null && other.getSchedule().equals(this.getSchedule()) == false) return false; if (other.getClassifiers() == null ^ this.getClassifiers() == null) return false; if (other.getClassifiers() != null && other.getClassifiers().equals(this.getClassifiers()) == false) return false; if (other.getTablePrefix() == null ^ this.getTablePrefix() == null) return false; if (other.getTablePrefix() != null && other.getTablePrefix().equals(this.getTablePrefix()) == false) return false; if (other.getSchemaChangePolicy() == null ^ this.getSchemaChangePolicy() == null) return false; if (other.getSchemaChangePolicy() != null && other.getSchemaChangePolicy().equals(this.getSchemaChangePolicy()) == false) return false; if (other.getRecrawlPolicy() == null ^ this.getRecrawlPolicy() == null) return false; if (other.getRecrawlPolicy() != null && other.getRecrawlPolicy().equals(this.getRecrawlPolicy()) == false) return false; if (other.getLineageConfiguration() == null ^ this.getLineageConfiguration() == null) return false; if (other.getLineageConfiguration() != null && other.getLineageConfiguration().equals(this.getLineageConfiguration()) == false) return false; if (other.getLakeFormationConfiguration() == null ^ this.getLakeFormationConfiguration() == null) return false; if (other.getLakeFormationConfiguration() != null && other.getLakeFormationConfiguration().equals(this.getLakeFormationConfiguration()) == false) return false; if (other.getConfiguration() == null ^ this.getConfiguration() == null) return false; if (other.getConfiguration() != null && other.getConfiguration().equals(this.getConfiguration()) == false) return false; if (other.getCrawlerSecurityConfiguration() == null ^ this.getCrawlerSecurityConfiguration() == null) return false; if (other.getCrawlerSecurityConfiguration() != null && other.getCrawlerSecurityConfiguration().equals(this.getCrawlerSecurityConfiguration()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getRole() == null) ? 0 : getRole().hashCode()); hashCode = prime * hashCode + ((getDatabaseName() == null) ? 0 : getDatabaseName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getTargets() == null) ? 0 : getTargets().hashCode()); hashCode = prime * hashCode + ((getSchedule() == null) ? 0 : getSchedule().hashCode()); hashCode = prime * hashCode + ((getClassifiers() == null) ? 0 : getClassifiers().hashCode()); hashCode = prime * hashCode + ((getTablePrefix() == null) ? 0 : getTablePrefix().hashCode()); hashCode = prime * hashCode + ((getSchemaChangePolicy() == null) ? 0 : getSchemaChangePolicy().hashCode()); hashCode = prime * hashCode + ((getRecrawlPolicy() == null) ? 0 : getRecrawlPolicy().hashCode()); hashCode = prime * hashCode + ((getLineageConfiguration() == null) ? 0 : getLineageConfiguration().hashCode()); hashCode = prime * hashCode + ((getLakeFormationConfiguration() == null) ? 0 : getLakeFormationConfiguration().hashCode()); hashCode = prime * hashCode + ((getConfiguration() == null) ? 0 : getConfiguration().hashCode()); hashCode = prime * hashCode + ((getCrawlerSecurityConfiguration() == null) ? 0 : getCrawlerSecurityConfiguration().hashCode()); return hashCode; } @Override public UpdateCrawlerRequest clone() { return (UpdateCrawlerRequest) super.clone(); } }