/* * Copyright 2018-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.datasync.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see AWS API * Documentation */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateLocationHdfsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** *
* The Amazon Resource Name (ARN) of the source HDFS cluster location. *
*/ private String locationArn; /** ** A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. *
*/ private String subdirectory; /** ** The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and * renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You * can use only one NameNode. *
*/ private java.util.List* The size of the data blocks to write into the HDFS cluster. *
*/ private Integer blockSize; /** ** The number of DataNodes to replicate the data to when writing to the HDFS cluster. *
*/ private Integer replicationFactor; /** ** The URI of the HDFS cluster's Key Management Server (KMS). *
*/ private String kmsKeyProviderUri; /** ** The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer privacy * settings configured on the Hadoop Distributed File System (HDFS) cluster. *
*/ private QopConfiguration qopConfiguration; /** ** The type of authentication used to determine the identity of the user. *
*/ private String authenticationType; /** ** The user name used to identify the client on the host operating system. *
*/ private String simpleUser; /** ** The Kerberos principal with access to the files and folders on the HDFS cluster. *
*/ private String kerberosPrincipal; /** ** The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted * keys. You can load the keytab from a file by providing the file's address. If you use the CLI, it performs base64 * encoding for you. Otherwise, provide the base64-encoded text. *
*/ private java.nio.ByteBuffer kerberosKeytab; /** *
* The krb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the base64
* encoding for you. Otherwise, provide the base64-encoded text.
*
* The ARNs of the agents that are used to connect to the HDFS cluster. *
*/ private java.util.List* The Amazon Resource Name (ARN) of the source HDFS cluster location. *
* * @param locationArn * The Amazon Resource Name (ARN) of the source HDFS cluster location. */ public void setLocationArn(String locationArn) { this.locationArn = locationArn; } /** ** The Amazon Resource Name (ARN) of the source HDFS cluster location. *
* * @return The Amazon Resource Name (ARN) of the source HDFS cluster location. */ public String getLocationArn() { return this.locationArn; } /** ** The Amazon Resource Name (ARN) of the source HDFS cluster location. *
* * @param locationArn * The Amazon Resource Name (ARN) of the source HDFS cluster location. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withLocationArn(String locationArn) { setLocationArn(locationArn); return this; } /** ** A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. *
* * @param subdirectory * A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. */ public void setSubdirectory(String subdirectory) { this.subdirectory = subdirectory; } /** ** A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. *
* * @return A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. */ public String getSubdirectory() { return this.subdirectory; } /** ** A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. *
* * @param subdirectory * A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS * cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withSubdirectory(String subdirectory) { setSubdirectory(subdirectory); return this; } /** ** The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and * renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You * can use only one NameNode. *
* * @return The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, * and renaming files and directories. The NameNode contains the information to map blocks of data to the * DataNodes. You can use only one NameNode. */ public java.util.List* The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and * renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You * can use only one NameNode. *
* * @param nameNodes * The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, * and renaming files and directories. The NameNode contains the information to map blocks of data to the * DataNodes. You can use only one NameNode. */ public void setNameNodes(java.util.Collection* The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and * renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You * can use only one NameNode. *
** NOTE: This method appends the values to the existing list (if any). Use * {@link #setNameNodes(java.util.Collection)} or {@link #withNameNodes(java.util.Collection)} if you want to * override the existing values. *
* * @param nameNodes * The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, * and renaming files and directories. The NameNode contains the information to map blocks of data to the * DataNodes. You can use only one NameNode. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withNameNodes(HdfsNameNode... nameNodes) { if (this.nameNodes == null) { setNameNodes(new java.util.ArrayList* The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and * renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You * can use only one NameNode. *
* * @param nameNodes * The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, * and renaming files and directories. The NameNode contains the information to map blocks of data to the * DataNodes. You can use only one NameNode. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withNameNodes(java.util.Collection* The size of the data blocks to write into the HDFS cluster. *
* * @param blockSize * The size of the data blocks to write into the HDFS cluster. */ public void setBlockSize(Integer blockSize) { this.blockSize = blockSize; } /** ** The size of the data blocks to write into the HDFS cluster. *
* * @return The size of the data blocks to write into the HDFS cluster. */ public Integer getBlockSize() { return this.blockSize; } /** ** The size of the data blocks to write into the HDFS cluster. *
* * @param blockSize * The size of the data blocks to write into the HDFS cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withBlockSize(Integer blockSize) { setBlockSize(blockSize); return this; } /** ** The number of DataNodes to replicate the data to when writing to the HDFS cluster. *
* * @param replicationFactor * The number of DataNodes to replicate the data to when writing to the HDFS cluster. */ public void setReplicationFactor(Integer replicationFactor) { this.replicationFactor = replicationFactor; } /** ** The number of DataNodes to replicate the data to when writing to the HDFS cluster. *
* * @return The number of DataNodes to replicate the data to when writing to the HDFS cluster. */ public Integer getReplicationFactor() { return this.replicationFactor; } /** ** The number of DataNodes to replicate the data to when writing to the HDFS cluster. *
* * @param replicationFactor * The number of DataNodes to replicate the data to when writing to the HDFS cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withReplicationFactor(Integer replicationFactor) { setReplicationFactor(replicationFactor); return this; } /** ** The URI of the HDFS cluster's Key Management Server (KMS). *
* * @param kmsKeyProviderUri * The URI of the HDFS cluster's Key Management Server (KMS). */ public void setKmsKeyProviderUri(String kmsKeyProviderUri) { this.kmsKeyProviderUri = kmsKeyProviderUri; } /** ** The URI of the HDFS cluster's Key Management Server (KMS). *
* * @return The URI of the HDFS cluster's Key Management Server (KMS). */ public String getKmsKeyProviderUri() { return this.kmsKeyProviderUri; } /** ** The URI of the HDFS cluster's Key Management Server (KMS). *
* * @param kmsKeyProviderUri * The URI of the HDFS cluster's Key Management Server (KMS). * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withKmsKeyProviderUri(String kmsKeyProviderUri) { setKmsKeyProviderUri(kmsKeyProviderUri); return this; } /** ** The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer privacy * settings configured on the Hadoop Distributed File System (HDFS) cluster. *
* * @param qopConfiguration * The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer * privacy settings configured on the Hadoop Distributed File System (HDFS) cluster. */ public void setQopConfiguration(QopConfiguration qopConfiguration) { this.qopConfiguration = qopConfiguration; } /** ** The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer privacy * settings configured on the Hadoop Distributed File System (HDFS) cluster. *
* * @return The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer * privacy settings configured on the Hadoop Distributed File System (HDFS) cluster. */ public QopConfiguration getQopConfiguration() { return this.qopConfiguration; } /** ** The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer privacy * settings configured on the Hadoop Distributed File System (HDFS) cluster. *
* * @param qopConfiguration * The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer * privacy settings configured on the Hadoop Distributed File System (HDFS) cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withQopConfiguration(QopConfiguration qopConfiguration) { setQopConfiguration(qopConfiguration); return this; } /** ** The type of authentication used to determine the identity of the user. *
* * @param authenticationType * The type of authentication used to determine the identity of the user. * @see HdfsAuthenticationType */ public void setAuthenticationType(String authenticationType) { this.authenticationType = authenticationType; } /** ** The type of authentication used to determine the identity of the user. *
* * @return The type of authentication used to determine the identity of the user. * @see HdfsAuthenticationType */ public String getAuthenticationType() { return this.authenticationType; } /** ** The type of authentication used to determine the identity of the user. *
* * @param authenticationType * The type of authentication used to determine the identity of the user. * @return Returns a reference to this object so that method calls can be chained together. * @see HdfsAuthenticationType */ public UpdateLocationHdfsRequest withAuthenticationType(String authenticationType) { setAuthenticationType(authenticationType); return this; } /** ** The type of authentication used to determine the identity of the user. *
* * @param authenticationType * The type of authentication used to determine the identity of the user. * @return Returns a reference to this object so that method calls can be chained together. * @see HdfsAuthenticationType */ public UpdateLocationHdfsRequest withAuthenticationType(HdfsAuthenticationType authenticationType) { this.authenticationType = authenticationType.toString(); return this; } /** ** The user name used to identify the client on the host operating system. *
* * @param simpleUser * The user name used to identify the client on the host operating system. */ public void setSimpleUser(String simpleUser) { this.simpleUser = simpleUser; } /** ** The user name used to identify the client on the host operating system. *
* * @return The user name used to identify the client on the host operating system. */ public String getSimpleUser() { return this.simpleUser; } /** ** The user name used to identify the client on the host operating system. *
* * @param simpleUser * The user name used to identify the client on the host operating system. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withSimpleUser(String simpleUser) { setSimpleUser(simpleUser); return this; } /** ** The Kerberos principal with access to the files and folders on the HDFS cluster. *
* * @param kerberosPrincipal * The Kerberos principal with access to the files and folders on the HDFS cluster. */ public void setKerberosPrincipal(String kerberosPrincipal) { this.kerberosPrincipal = kerberosPrincipal; } /** ** The Kerberos principal with access to the files and folders on the HDFS cluster. *
* * @return The Kerberos principal with access to the files and folders on the HDFS cluster. */ public String getKerberosPrincipal() { return this.kerberosPrincipal; } /** ** The Kerberos principal with access to the files and folders on the HDFS cluster. *
* * @param kerberosPrincipal * The Kerberos principal with access to the files and folders on the HDFS cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withKerberosPrincipal(String kerberosPrincipal) { setKerberosPrincipal(kerberosPrincipal); return this; } /** ** The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted * keys. You can load the keytab from a file by providing the file's address. If you use the CLI, it performs base64 * encoding for you. Otherwise, provide the base64-encoded text. *
** The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. *
** Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. *
* * @param kerberosKeytab * The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the * encrypted keys. You can load the keytab from a file by providing the file's address. If you use the CLI, * it performs base64 encoding for you. Otherwise, provide the base64-encoded text. */ public void setKerberosKeytab(java.nio.ByteBuffer kerberosKeytab) { this.kerberosKeytab = kerberosKeytab; } /** ** The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted * keys. You can load the keytab from a file by providing the file's address. If you use the CLI, it performs base64 * encoding for you. Otherwise, provide the base64-encoded text. *
** {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend * using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent * {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}. * Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the * {@code position}. *
* * @return The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the * encrypted keys. You can load the keytab from a file by providing the file's address. If you use the CLI, * it performs base64 encoding for you. Otherwise, provide the base64-encoded text. */ public java.nio.ByteBuffer getKerberosKeytab() { return this.kerberosKeytab; } /** ** The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted * keys. You can load the keytab from a file by providing the file's address. If you use the CLI, it performs base64 * encoding for you. Otherwise, provide the base64-encoded text. *
** The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. *
** Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. *
* * @param kerberosKeytab * The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the * encrypted keys. You can load the keytab from a file by providing the file's address. If you use the CLI, * it performs base64 encoding for you. Otherwise, provide the base64-encoded text. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withKerberosKeytab(java.nio.ByteBuffer kerberosKeytab) { setKerberosKeytab(kerberosKeytab); return this; } /** *
* The krb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the base64
* encoding for you. Otherwise, provide the base64-encoded text.
*
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. *
** Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. *
* * @param kerberosKrb5Conf * Thekrb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the
* base64 encoding for you. Otherwise, provide the base64-encoded text.
*/
public void setKerberosKrb5Conf(java.nio.ByteBuffer kerberosKrb5Conf) {
this.kerberosKrb5Conf = kerberosKrb5Conf;
}
/**
*
* The krb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the base64
* encoding for you. Otherwise, provide the base64-encoded text.
*
* {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend * using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent * {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}. * Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the * {@code position}. *
* * @return Thekrb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the
* base64 encoding for you. Otherwise, provide the base64-encoded text.
*/
public java.nio.ByteBuffer getKerberosKrb5Conf() {
return this.kerberosKrb5Conf;
}
/**
*
* The krb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the base64
* encoding for you. Otherwise, provide the base64-encoded text.
*
* The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. *
** Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. *
* * @param kerberosKrb5Conf * Thekrb5.conf
file that contains the Kerberos configuration information. You can load the
* krb5.conf
file by providing the file's address. If you're using the CLI, it performs the
* base64 encoding for you. Otherwise, provide the base64-encoded text.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateLocationHdfsRequest withKerberosKrb5Conf(java.nio.ByteBuffer kerberosKrb5Conf) {
setKerberosKrb5Conf(kerberosKrb5Conf);
return this;
}
/**
* * The ARNs of the agents that are used to connect to the HDFS cluster. *
* * @return The ARNs of the agents that are used to connect to the HDFS cluster. */ public java.util.List* The ARNs of the agents that are used to connect to the HDFS cluster. *
* * @param agentArns * The ARNs of the agents that are used to connect to the HDFS cluster. */ public void setAgentArns(java.util.Collection* The ARNs of the agents that are used to connect to the HDFS cluster. *
** NOTE: This method appends the values to the existing list (if any). Use * {@link #setAgentArns(java.util.Collection)} or {@link #withAgentArns(java.util.Collection)} if you want to * override the existing values. *
* * @param agentArns * The ARNs of the agents that are used to connect to the HDFS cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withAgentArns(String... agentArns) { if (this.agentArns == null) { setAgentArns(new java.util.ArrayList* The ARNs of the agents that are used to connect to the HDFS cluster. *
* * @param agentArns * The ARNs of the agents that are used to connect to the HDFS cluster. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateLocationHdfsRequest withAgentArns(java.util.Collection