1. Packages
  2. AWS
  3. API Docs
  4. datasync
  5. LocationHdfs
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

aws.datasync.LocationHdfs

Explore with Pulumi AI

aws logo
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

    Manages an HDFS Location within AWS DataSync.

    NOTE: The DataSync Agents must be available before creating this resource.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.datasync.LocationHdfs("example", {
        agentArns: [exampleAwsDatasyncAgent.arn],
        authenticationType: "SIMPLE",
        simpleUser: "example",
        nameNodes: [{
            hostname: exampleAwsInstance.privateDns,
            port: 80,
        }],
    });
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.datasync.LocationHdfs("example",
        agent_arns=[example_aws_datasync_agent["arn"]],
        authentication_type="SIMPLE",
        simple_user="example",
        name_nodes=[{
            "hostname": example_aws_instance["privateDns"],
            "port": 80,
        }])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/datasync"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
    			AgentArns: pulumi.StringArray{
    				exampleAwsDatasyncAgent.Arn,
    			},
    			AuthenticationType: pulumi.String("SIMPLE"),
    			SimpleUser:         pulumi.String("example"),
    			NameNodes: datasync.LocationHdfsNameNodeArray{
    				&datasync.LocationHdfsNameNodeArgs{
    					Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
    					Port:     pulumi.Int(80),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.DataSync.LocationHdfs("example", new()
        {
            AgentArns = new[]
            {
                exampleAwsDatasyncAgent.Arn,
            },
            AuthenticationType = "SIMPLE",
            SimpleUser = "example",
            NameNodes = new[]
            {
                new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
                {
                    Hostname = exampleAwsInstance.PrivateDns,
                    Port = 80,
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.datasync.LocationHdfs;
    import com.pulumi.aws.datasync.LocationHdfsArgs;
    import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new LocationHdfs("example", LocationHdfsArgs.builder()
                .agentArns(exampleAwsDatasyncAgent.arn())
                .authenticationType("SIMPLE")
                .simpleUser("example")
                .nameNodes(LocationHdfsNameNodeArgs.builder()
                    .hostname(exampleAwsInstance.privateDns())
                    .port(80)
                    .build())
                .build());
    
        }
    }
    
    resources:
      example:
        type: aws:datasync:LocationHdfs
        properties:
          agentArns:
            - ${exampleAwsDatasyncAgent.arn}
          authenticationType: SIMPLE
          simpleUser: example
          nameNodes:
            - hostname: ${exampleAwsInstance.privateDns}
              port: 80
    

    Kerberos Authentication

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    import * as std from "@pulumi/std";
    
    const example = new aws.datasync.LocationHdfs("example", {
        agentArns: [exampleAwsDatasyncAgent.arn],
        authenticationType: "KERBEROS",
        nameNodes: [{
            hostname: exampleAwsInstance.privateDns,
            port: 80,
        }],
        kerberosPrincipal: "user@example.com",
        kerberosKeytabBase64: std.filebase64({
            input: "user.keytab",
        }).then(invoke => invoke.result),
        kerberosKrb5Conf: std.file({
            input: "krb5.conf",
        }).then(invoke => invoke.result),
    });
    
    import pulumi
    import pulumi_aws as aws
    import pulumi_std as std
    
    example = aws.datasync.LocationHdfs("example",
        agent_arns=[example_aws_datasync_agent["arn"]],
        authentication_type="KERBEROS",
        name_nodes=[{
            "hostname": example_aws_instance["privateDns"],
            "port": 80,
        }],
        kerberos_principal="user@example.com",
        kerberos_keytab_base64=std.filebase64(input="user.keytab").result,
        kerberos_krb5_conf=std.file(input="krb5.conf").result)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/datasync"
    	"github.com/pulumi/pulumi-std/sdk/go/std"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		invokeFilebase64, err := std.Filebase64(ctx, &std.Filebase64Args{
    			Input: "user.keytab",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		invokeFile1, err := std.File(ctx, &std.FileArgs{
    			Input: "krb5.conf",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
    			AgentArns: pulumi.StringArray{
    				exampleAwsDatasyncAgent.Arn,
    			},
    			AuthenticationType: pulumi.String("KERBEROS"),
    			NameNodes: datasync.LocationHdfsNameNodeArray{
    				&datasync.LocationHdfsNameNodeArgs{
    					Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
    					Port:     pulumi.Int(80),
    				},
    			},
    			KerberosPrincipal:    pulumi.String("user@example.com"),
    			KerberosKeytabBase64: pulumi.String(invokeFilebase64.Result),
    			KerberosKrb5Conf:     pulumi.String(invokeFile1.Result),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    using Std = Pulumi.Std;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.DataSync.LocationHdfs("example", new()
        {
            AgentArns = new[]
            {
                exampleAwsDatasyncAgent.Arn,
            },
            AuthenticationType = "KERBEROS",
            NameNodes = new[]
            {
                new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
                {
                    Hostname = exampleAwsInstance.PrivateDns,
                    Port = 80,
                },
            },
            KerberosPrincipal = "user@example.com",
            KerberosKeytabBase64 = Std.Filebase64.Invoke(new()
            {
                Input = "user.keytab",
            }).Apply(invoke => invoke.Result),
            KerberosKrb5Conf = Std.File.Invoke(new()
            {
                Input = "krb5.conf",
            }).Apply(invoke => invoke.Result),
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.datasync.LocationHdfs;
    import com.pulumi.aws.datasync.LocationHdfsArgs;
    import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new LocationHdfs("example", LocationHdfsArgs.builder()
                .agentArns(exampleAwsDatasyncAgent.arn())
                .authenticationType("KERBEROS")
                .nameNodes(LocationHdfsNameNodeArgs.builder()
                    .hostname(exampleAwsInstance.privateDns())
                    .port(80)
                    .build())
                .kerberosPrincipal("user@example.com")
                .kerberosKeytabBase64(StdFunctions.filebase64(Filebase64Args.builder()
                    .input("user.keytab")
                    .build()).result())
                .kerberosKrb5Conf(StdFunctions.file(FileArgs.builder()
                    .input("krb5.conf")
                    .build()).result())
                .build());
    
        }
    }
    
    resources:
      example:
        type: aws:datasync:LocationHdfs
        properties:
          agentArns:
            - ${exampleAwsDatasyncAgent.arn}
          authenticationType: KERBEROS
          nameNodes:
            - hostname: ${exampleAwsInstance.privateDns}
              port: 80
          kerberosPrincipal: user@example.com
          kerberosKeytabBase64:
            fn::invoke:
              Function: std:filebase64
              Arguments:
                input: user.keytab
              Return: result
          kerberosKrb5Conf:
            fn::invoke:
              Function: std:file
              Arguments:
                input: krb5.conf
              Return: result
    

    Create LocationHdfs Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new LocationHdfs(name: string, args: LocationHdfsArgs, opts?: CustomResourceOptions);
    @overload
    def LocationHdfs(resource_name: str,
                     args: LocationHdfsArgs,
                     opts: Optional[ResourceOptions] = None)
    
    @overload
    def LocationHdfs(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     agent_arns: Optional[Sequence[str]] = None,
                     name_nodes: Optional[Sequence[LocationHdfsNameNodeArgs]] = None,
                     kerberos_keytab: Optional[str] = None,
                     block_size: Optional[int] = None,
                     kerberos_keytab_base64: Optional[str] = None,
                     kerberos_krb5_conf: Optional[str] = None,
                     kerberos_krb5_conf_base64: Optional[str] = None,
                     kerberos_principal: Optional[str] = None,
                     kms_key_provider_uri: Optional[str] = None,
                     authentication_type: Optional[str] = None,
                     qop_configuration: Optional[LocationHdfsQopConfigurationArgs] = None,
                     replication_factor: Optional[int] = None,
                     simple_user: Optional[str] = None,
                     subdirectory: Optional[str] = None,
                     tags: Optional[Mapping[str, str]] = None)
    func NewLocationHdfs(ctx *Context, name string, args LocationHdfsArgs, opts ...ResourceOption) (*LocationHdfs, error)
    public LocationHdfs(string name, LocationHdfsArgs args, CustomResourceOptions? opts = null)
    public LocationHdfs(String name, LocationHdfsArgs args)
    public LocationHdfs(String name, LocationHdfsArgs args, CustomResourceOptions options)
    
    type: aws:datasync:LocationHdfs
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args LocationHdfsArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args LocationHdfsArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args LocationHdfsArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args LocationHdfsArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args LocationHdfsArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var locationHdfsResource = new Aws.DataSync.LocationHdfs("locationHdfsResource", new()
    {
        AgentArns = new[]
        {
            "string",
        },
        NameNodes = new[]
        {
            new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
            {
                Hostname = "string",
                Port = 0,
            },
        },
        KerberosKeytab = "string",
        BlockSize = 0,
        KerberosKeytabBase64 = "string",
        KerberosKrb5Conf = "string",
        KerberosKrb5ConfBase64 = "string",
        KerberosPrincipal = "string",
        KmsKeyProviderUri = "string",
        AuthenticationType = "string",
        QopConfiguration = new Aws.DataSync.Inputs.LocationHdfsQopConfigurationArgs
        {
            DataTransferProtection = "string",
            RpcProtection = "string",
        },
        ReplicationFactor = 0,
        SimpleUser = "string",
        Subdirectory = "string",
        Tags = 
        {
            { "string", "string" },
        },
    });
    
    example, err := datasync.NewLocationHdfs(ctx, "locationHdfsResource", &datasync.LocationHdfsArgs{
    	AgentArns: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	NameNodes: datasync.LocationHdfsNameNodeArray{
    		&datasync.LocationHdfsNameNodeArgs{
    			Hostname: pulumi.String("string"),
    			Port:     pulumi.Int(0),
    		},
    	},
    	KerberosKeytab:         pulumi.String("string"),
    	BlockSize:              pulumi.Int(0),
    	KerberosKeytabBase64:   pulumi.String("string"),
    	KerberosKrb5Conf:       pulumi.String("string"),
    	KerberosKrb5ConfBase64: pulumi.String("string"),
    	KerberosPrincipal:      pulumi.String("string"),
    	KmsKeyProviderUri:      pulumi.String("string"),
    	AuthenticationType:     pulumi.String("string"),
    	QopConfiguration: &datasync.LocationHdfsQopConfigurationArgs{
    		DataTransferProtection: pulumi.String("string"),
    		RpcProtection:          pulumi.String("string"),
    	},
    	ReplicationFactor: pulumi.Int(0),
    	SimpleUser:        pulumi.String("string"),
    	Subdirectory:      pulumi.String("string"),
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    })
    
    var locationHdfsResource = new LocationHdfs("locationHdfsResource", LocationHdfsArgs.builder()
        .agentArns("string")
        .nameNodes(LocationHdfsNameNodeArgs.builder()
            .hostname("string")
            .port(0)
            .build())
        .kerberosKeytab("string")
        .blockSize(0)
        .kerberosKeytabBase64("string")
        .kerberosKrb5Conf("string")
        .kerberosKrb5ConfBase64("string")
        .kerberosPrincipal("string")
        .kmsKeyProviderUri("string")
        .authenticationType("string")
        .qopConfiguration(LocationHdfsQopConfigurationArgs.builder()
            .dataTransferProtection("string")
            .rpcProtection("string")
            .build())
        .replicationFactor(0)
        .simpleUser("string")
        .subdirectory("string")
        .tags(Map.of("string", "string"))
        .build());
    
    location_hdfs_resource = aws.datasync.LocationHdfs("locationHdfsResource",
        agent_arns=["string"],
        name_nodes=[{
            "hostname": "string",
            "port": 0,
        }],
        kerberos_keytab="string",
        block_size=0,
        kerberos_keytab_base64="string",
        kerberos_krb5_conf="string",
        kerberos_krb5_conf_base64="string",
        kerberos_principal="string",
        kms_key_provider_uri="string",
        authentication_type="string",
        qop_configuration={
            "data_transfer_protection": "string",
            "rpc_protection": "string",
        },
        replication_factor=0,
        simple_user="string",
        subdirectory="string",
        tags={
            "string": "string",
        })
    
    const locationHdfsResource = new aws.datasync.LocationHdfs("locationHdfsResource", {
        agentArns: ["string"],
        nameNodes: [{
            hostname: "string",
            port: 0,
        }],
        kerberosKeytab: "string",
        blockSize: 0,
        kerberosKeytabBase64: "string",
        kerberosKrb5Conf: "string",
        kerberosKrb5ConfBase64: "string",
        kerberosPrincipal: "string",
        kmsKeyProviderUri: "string",
        authenticationType: "string",
        qopConfiguration: {
            dataTransferProtection: "string",
            rpcProtection: "string",
        },
        replicationFactor: 0,
        simpleUser: "string",
        subdirectory: "string",
        tags: {
            string: "string",
        },
    });
    
    type: aws:datasync:LocationHdfs
    properties:
        agentArns:
            - string
        authenticationType: string
        blockSize: 0
        kerberosKeytab: string
        kerberosKeytabBase64: string
        kerberosKrb5Conf: string
        kerberosKrb5ConfBase64: string
        kerberosPrincipal: string
        kmsKeyProviderUri: string
        nameNodes:
            - hostname: string
              port: 0
        qopConfiguration:
            dataTransferProtection: string
            rpcProtection: string
        replicationFactor: 0
        simpleUser: string
        subdirectory: string
        tags:
            string: string
    

    LocationHdfs Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The LocationHdfs resource accepts the following input properties:

    AgentArns List<string>
    A list of DataSync Agent ARNs with which this location will be associated.
    NameNodes List<LocationHdfsNameNode>
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    AuthenticationType string
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    BlockSize int
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    KerberosKeytab string
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    KerberosKeytabBase64 string
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    KerberosKrb5Conf string
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    KerberosKrb5ConfBase64 string
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    KerberosPrincipal string
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    KmsKeyProviderUri string
    The URI of the HDFS cluster's Key Management Server (KMS).
    QopConfiguration LocationHdfsQopConfiguration
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    ReplicationFactor int
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    SimpleUser string
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    Subdirectory string
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    Tags Dictionary<string, string>
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    AgentArns []string
    A list of DataSync Agent ARNs with which this location will be associated.
    NameNodes []LocationHdfsNameNodeArgs
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    AuthenticationType string
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    BlockSize int
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    KerberosKeytab string
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    KerberosKeytabBase64 string
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    KerberosKrb5Conf string
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    KerberosKrb5ConfBase64 string
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    KerberosPrincipal string
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    KmsKeyProviderUri string
    The URI of the HDFS cluster's Key Management Server (KMS).
    QopConfiguration LocationHdfsQopConfigurationArgs
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    ReplicationFactor int
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    SimpleUser string
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    Subdirectory string
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    Tags map[string]string
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    agentArns List<String>
    A list of DataSync Agent ARNs with which this location will be associated.
    nameNodes List<LocationHdfsNameNode>
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    authenticationType String
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    blockSize Integer
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberosKeytab String
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberosKeytabBase64 String
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberosKrb5Conf String
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberosKrb5ConfBase64 String
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberosPrincipal String
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kmsKeyProviderUri String
    The URI of the HDFS cluster's Key Management Server (KMS).
    qopConfiguration LocationHdfsQopConfiguration
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replicationFactor Integer
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simpleUser String
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory String
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags Map<String,String>
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    agentArns string[]
    A list of DataSync Agent ARNs with which this location will be associated.
    nameNodes LocationHdfsNameNode[]
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    authenticationType string
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    blockSize number
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberosKeytab string
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberosKeytabBase64 string
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberosKrb5Conf string
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberosKrb5ConfBase64 string
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberosPrincipal string
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kmsKeyProviderUri string
    The URI of the HDFS cluster's Key Management Server (KMS).
    qopConfiguration LocationHdfsQopConfiguration
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replicationFactor number
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simpleUser string
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory string
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags {[key: string]: string}
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    agent_arns Sequence[str]
    A list of DataSync Agent ARNs with which this location will be associated.
    name_nodes Sequence[LocationHdfsNameNodeArgs]
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    authentication_type str
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    block_size int
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberos_keytab str
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberos_keytab_base64 str
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberos_krb5_conf str
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberos_krb5_conf_base64 str
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberos_principal str
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kms_key_provider_uri str
    The URI of the HDFS cluster's Key Management Server (KMS).
    qop_configuration LocationHdfsQopConfigurationArgs
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replication_factor int
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simple_user str
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory str
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags Mapping[str, str]
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    agentArns List<String>
    A list of DataSync Agent ARNs with which this location will be associated.
    nameNodes List<Property Map>
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    authenticationType String
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    blockSize Number
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberosKeytab String
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberosKeytabBase64 String
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberosKrb5Conf String
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberosKrb5ConfBase64 String
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberosPrincipal String
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kmsKeyProviderUri String
    The URI of the HDFS cluster's Key Management Server (KMS).
    qopConfiguration Property Map
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replicationFactor Number
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simpleUser String
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory String
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags Map<String>
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the LocationHdfs resource produces the following output properties:

    Arn string
    Amazon Resource Name (ARN) of the DataSync Location.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll Dictionary<string, string>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Uri string
    Arn string
    Amazon Resource Name (ARN) of the DataSync Location.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll map[string]string
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Uri string
    arn String
    Amazon Resource Name (ARN) of the DataSync Location.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String,String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri String
    arn string
    Amazon Resource Name (ARN) of the DataSync Location.
    id string
    The provider-assigned unique ID for this managed resource.
    tagsAll {[key: string]: string}
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri string
    arn str
    Amazon Resource Name (ARN) of the DataSync Location.
    id str
    The provider-assigned unique ID for this managed resource.
    tags_all Mapping[str, str]
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri str
    arn String
    Amazon Resource Name (ARN) of the DataSync Location.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri String

    Look up Existing LocationHdfs Resource

    Get an existing LocationHdfs resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: LocationHdfsState, opts?: CustomResourceOptions): LocationHdfs
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            agent_arns: Optional[Sequence[str]] = None,
            arn: Optional[str] = None,
            authentication_type: Optional[str] = None,
            block_size: Optional[int] = None,
            kerberos_keytab: Optional[str] = None,
            kerberos_keytab_base64: Optional[str] = None,
            kerberos_krb5_conf: Optional[str] = None,
            kerberos_krb5_conf_base64: Optional[str] = None,
            kerberos_principal: Optional[str] = None,
            kms_key_provider_uri: Optional[str] = None,
            name_nodes: Optional[Sequence[LocationHdfsNameNodeArgs]] = None,
            qop_configuration: Optional[LocationHdfsQopConfigurationArgs] = None,
            replication_factor: Optional[int] = None,
            simple_user: Optional[str] = None,
            subdirectory: Optional[str] = None,
            tags: Optional[Mapping[str, str]] = None,
            tags_all: Optional[Mapping[str, str]] = None,
            uri: Optional[str] = None) -> LocationHdfs
    func GetLocationHdfs(ctx *Context, name string, id IDInput, state *LocationHdfsState, opts ...ResourceOption) (*LocationHdfs, error)
    public static LocationHdfs Get(string name, Input<string> id, LocationHdfsState? state, CustomResourceOptions? opts = null)
    public static LocationHdfs get(String name, Output<String> id, LocationHdfsState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AgentArns List<string>
    A list of DataSync Agent ARNs with which this location will be associated.
    Arn string
    Amazon Resource Name (ARN) of the DataSync Location.
    AuthenticationType string
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    BlockSize int
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    KerberosKeytab string
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    KerberosKeytabBase64 string
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    KerberosKrb5Conf string
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    KerberosKrb5ConfBase64 string
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    KerberosPrincipal string
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    KmsKeyProviderUri string
    The URI of the HDFS cluster's Key Management Server (KMS).
    NameNodes List<LocationHdfsNameNode>
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    QopConfiguration LocationHdfsQopConfiguration
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    ReplicationFactor int
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    SimpleUser string
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    Subdirectory string
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    Tags Dictionary<string, string>
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll Dictionary<string, string>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Uri string
    AgentArns []string
    A list of DataSync Agent ARNs with which this location will be associated.
    Arn string
    Amazon Resource Name (ARN) of the DataSync Location.
    AuthenticationType string
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    BlockSize int
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    KerberosKeytab string
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    KerberosKeytabBase64 string
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    KerberosKrb5Conf string
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    KerberosKrb5ConfBase64 string
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    KerberosPrincipal string
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    KmsKeyProviderUri string
    The URI of the HDFS cluster's Key Management Server (KMS).
    NameNodes []LocationHdfsNameNodeArgs
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    QopConfiguration LocationHdfsQopConfigurationArgs
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    ReplicationFactor int
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    SimpleUser string
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    Subdirectory string
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    Tags map[string]string
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll map[string]string
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Uri string
    agentArns List<String>
    A list of DataSync Agent ARNs with which this location will be associated.
    arn String
    Amazon Resource Name (ARN) of the DataSync Location.
    authenticationType String
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    blockSize Integer
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberosKeytab String
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberosKeytabBase64 String
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberosKrb5Conf String
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberosKrb5ConfBase64 String
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberosPrincipal String
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kmsKeyProviderUri String
    The URI of the HDFS cluster's Key Management Server (KMS).
    nameNodes List<LocationHdfsNameNode>
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    qopConfiguration LocationHdfsQopConfiguration
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replicationFactor Integer
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simpleUser String
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory String
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags Map<String,String>
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String,String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri String
    agentArns string[]
    A list of DataSync Agent ARNs with which this location will be associated.
    arn string
    Amazon Resource Name (ARN) of the DataSync Location.
    authenticationType string
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    blockSize number
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberosKeytab string
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberosKeytabBase64 string
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberosKrb5Conf string
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberosKrb5ConfBase64 string
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberosPrincipal string
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kmsKeyProviderUri string
    The URI of the HDFS cluster's Key Management Server (KMS).
    nameNodes LocationHdfsNameNode[]
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    qopConfiguration LocationHdfsQopConfiguration
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replicationFactor number
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simpleUser string
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory string
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags {[key: string]: string}
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll {[key: string]: string}
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri string
    agent_arns Sequence[str]
    A list of DataSync Agent ARNs with which this location will be associated.
    arn str
    Amazon Resource Name (ARN) of the DataSync Location.
    authentication_type str
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    block_size int
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberos_keytab str
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberos_keytab_base64 str
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberos_krb5_conf str
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberos_krb5_conf_base64 str
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberos_principal str
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kms_key_provider_uri str
    The URI of the HDFS cluster's Key Management Server (KMS).
    name_nodes Sequence[LocationHdfsNameNodeArgs]
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    qop_configuration LocationHdfsQopConfigurationArgs
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replication_factor int
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simple_user str
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory str
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags Mapping[str, str]
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tags_all Mapping[str, str]
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri str
    agentArns List<String>
    A list of DataSync Agent ARNs with which this location will be associated.
    arn String
    Amazon Resource Name (ARN) of the DataSync Location.
    authenticationType String
    The type of authentication used to determine the identity of the user. Valid values are SIMPLE and KERBEROS.
    blockSize Number
    The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
    kerberosKeytab String
    The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab_base64) is required.
    kerberosKeytabBase64 String
    Use instead of kerberos_keytab to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_keytab) is required.
    kerberosKrb5Conf String
    The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64 instead whenever the value is not a valid UTF-8 string. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf_base64) is required.
    kerberosKrb5ConfBase64 String
    Use instead of kerberos_krb5_conf to pass base64-encoded binary data directly. If KERBEROS is specified for authentication_type, this parameter (or kerberos_krb5_conf) is required.
    kerberosPrincipal String
    The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROS is specified for authentication_type, this parameter is required.
    kmsKeyProviderUri String
    The URI of the HDFS cluster's Key Management Server (KMS).
    nameNodes List<Property Map>
    The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
    qopConfiguration Property Map
    The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configuration isn't specified, rpc_protection and data_transfer_protection default to PRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
    replicationFactor Number
    The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
    simpleUser String
    The user name used to identify the client on the host operating system. If SIMPLE is specified for authentication_type, this parameter is required.
    subdirectory String
    A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
    tags Map<String>
    Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    uri String

    Supporting Types

    LocationHdfsNameNode, LocationHdfsNameNodeArgs

    Hostname string
    The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
    Port int
    The port that the NameNode uses to listen to client requests.
    Hostname string
    The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
    Port int
    The port that the NameNode uses to listen to client requests.
    hostname String
    The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
    port Integer
    The port that the NameNode uses to listen to client requests.
    hostname string
    The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
    port number
    The port that the NameNode uses to listen to client requests.
    hostname str
    The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
    port int
    The port that the NameNode uses to listen to client requests.
    hostname String
    The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
    port Number
    The port that the NameNode uses to listen to client requests.

    LocationHdfsQopConfiguration, LocationHdfsQopConfigurationArgs

    DataTransferProtection string
    The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    RpcProtection string
    The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    DataTransferProtection string
    The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    RpcProtection string
    The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    dataTransferProtection String
    The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    rpcProtection String
    The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    dataTransferProtection string
    The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    rpcProtection string
    The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    data_transfer_protection str
    The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    rpc_protection str
    The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    dataTransferProtection String
    The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.
    rpcProtection String
    The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED, AUTHENTICATION, INTEGRITY and PRIVACY.

    Import

    Using pulumi import, import aws_datasync_location_hdfs using the Amazon Resource Name (ARN). For example:

    $ pulumi import aws:datasync/locationHdfs:LocationHdfs example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi