aws.datasync.LocationHdfs
Explore with Pulumi AI
Manages an HDFS Location within AWS DataSync.
NOTE: The DataSync Agents must be available before creating this resource.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.datasync.LocationHdfs("example", {
agentArns: [exampleAwsDatasyncAgent.arn],
authenticationType: "SIMPLE",
simpleUser: "example",
nameNodes: [{
hostname: exampleAwsInstance.privateDns,
port: 80,
}],
});
import pulumi
import pulumi_aws as aws
example = aws.datasync.LocationHdfs("example",
agent_arns=[example_aws_datasync_agent["arn"]],
authentication_type="SIMPLE",
simple_user="example",
name_nodes=[{
"hostname": example_aws_instance["privateDns"],
"port": 80,
}])
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/datasync"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
AgentArns: pulumi.StringArray{
exampleAwsDatasyncAgent.Arn,
},
AuthenticationType: pulumi.String("SIMPLE"),
SimpleUser: pulumi.String("example"),
NameNodes: datasync.LocationHdfsNameNodeArray{
&datasync.LocationHdfsNameNodeArgs{
Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
Port: pulumi.Int(80),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.DataSync.LocationHdfs("example", new()
{
AgentArns = new[]
{
exampleAwsDatasyncAgent.Arn,
},
AuthenticationType = "SIMPLE",
SimpleUser = "example",
NameNodes = new[]
{
new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
{
Hostname = exampleAwsInstance.PrivateDns,
Port = 80,
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.datasync.LocationHdfs;
import com.pulumi.aws.datasync.LocationHdfsArgs;
import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new LocationHdfs("example", LocationHdfsArgs.builder()
.agentArns(exampleAwsDatasyncAgent.arn())
.authenticationType("SIMPLE")
.simpleUser("example")
.nameNodes(LocationHdfsNameNodeArgs.builder()
.hostname(exampleAwsInstance.privateDns())
.port(80)
.build())
.build());
}
}
resources:
example:
type: aws:datasync:LocationHdfs
properties:
agentArns:
- ${exampleAwsDatasyncAgent.arn}
authenticationType: SIMPLE
simpleUser: example
nameNodes:
- hostname: ${exampleAwsInstance.privateDns}
port: 80
Kerberos Authentication
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
import * as std from "@pulumi/std";
const example = new aws.datasync.LocationHdfs("example", {
agentArns: [exampleAwsDatasyncAgent.arn],
authenticationType: "KERBEROS",
nameNodes: [{
hostname: exampleAwsInstance.privateDns,
port: 80,
}],
kerberosPrincipal: "user@example.com",
kerberosKeytabBase64: std.filebase64({
input: "user.keytab",
}).then(invoke => invoke.result),
kerberosKrb5Conf: std.file({
input: "krb5.conf",
}).then(invoke => invoke.result),
});
import pulumi
import pulumi_aws as aws
import pulumi_std as std
example = aws.datasync.LocationHdfs("example",
agent_arns=[example_aws_datasync_agent["arn"]],
authentication_type="KERBEROS",
name_nodes=[{
"hostname": example_aws_instance["privateDns"],
"port": 80,
}],
kerberos_principal="user@example.com",
kerberos_keytab_base64=std.filebase64(input="user.keytab").result,
kerberos_krb5_conf=std.file(input="krb5.conf").result)
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/datasync"
"github.com/pulumi/pulumi-std/sdk/go/std"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
invokeFilebase64, err := std.Filebase64(ctx, &std.Filebase64Args{
Input: "user.keytab",
}, nil)
if err != nil {
return err
}
invokeFile1, err := std.File(ctx, &std.FileArgs{
Input: "krb5.conf",
}, nil)
if err != nil {
return err
}
_, err = datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
AgentArns: pulumi.StringArray{
exampleAwsDatasyncAgent.Arn,
},
AuthenticationType: pulumi.String("KERBEROS"),
NameNodes: datasync.LocationHdfsNameNodeArray{
&datasync.LocationHdfsNameNodeArgs{
Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
Port: pulumi.Int(80),
},
},
KerberosPrincipal: pulumi.String("user@example.com"),
KerberosKeytabBase64: pulumi.String(invokeFilebase64.Result),
KerberosKrb5Conf: pulumi.String(invokeFile1.Result),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
using Std = Pulumi.Std;
return await Deployment.RunAsync(() =>
{
var example = new Aws.DataSync.LocationHdfs("example", new()
{
AgentArns = new[]
{
exampleAwsDatasyncAgent.Arn,
},
AuthenticationType = "KERBEROS",
NameNodes = new[]
{
new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
{
Hostname = exampleAwsInstance.PrivateDns,
Port = 80,
},
},
KerberosPrincipal = "user@example.com",
KerberosKeytabBase64 = Std.Filebase64.Invoke(new()
{
Input = "user.keytab",
}).Apply(invoke => invoke.Result),
KerberosKrb5Conf = Std.File.Invoke(new()
{
Input = "krb5.conf",
}).Apply(invoke => invoke.Result),
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.datasync.LocationHdfs;
import com.pulumi.aws.datasync.LocationHdfsArgs;
import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new LocationHdfs("example", LocationHdfsArgs.builder()
.agentArns(exampleAwsDatasyncAgent.arn())
.authenticationType("KERBEROS")
.nameNodes(LocationHdfsNameNodeArgs.builder()
.hostname(exampleAwsInstance.privateDns())
.port(80)
.build())
.kerberosPrincipal("user@example.com")
.kerberosKeytabBase64(StdFunctions.filebase64(Filebase64Args.builder()
.input("user.keytab")
.build()).result())
.kerberosKrb5Conf(StdFunctions.file(FileArgs.builder()
.input("krb5.conf")
.build()).result())
.build());
}
}
resources:
example:
type: aws:datasync:LocationHdfs
properties:
agentArns:
- ${exampleAwsDatasyncAgent.arn}
authenticationType: KERBEROS
nameNodes:
- hostname: ${exampleAwsInstance.privateDns}
port: 80
kerberosPrincipal: user@example.com
kerberosKeytabBase64:
fn::invoke:
Function: std:filebase64
Arguments:
input: user.keytab
Return: result
kerberosKrb5Conf:
fn::invoke:
Function: std:file
Arguments:
input: krb5.conf
Return: result
Create LocationHdfs Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new LocationHdfs(name: string, args: LocationHdfsArgs, opts?: CustomResourceOptions);
@overload
def LocationHdfs(resource_name: str,
args: LocationHdfsArgs,
opts: Optional[ResourceOptions] = None)
@overload
def LocationHdfs(resource_name: str,
opts: Optional[ResourceOptions] = None,
agent_arns: Optional[Sequence[str]] = None,
name_nodes: Optional[Sequence[LocationHdfsNameNodeArgs]] = None,
kerberos_keytab: Optional[str] = None,
block_size: Optional[int] = None,
kerberos_keytab_base64: Optional[str] = None,
kerberos_krb5_conf: Optional[str] = None,
kerberos_krb5_conf_base64: Optional[str] = None,
kerberos_principal: Optional[str] = None,
kms_key_provider_uri: Optional[str] = None,
authentication_type: Optional[str] = None,
qop_configuration: Optional[LocationHdfsQopConfigurationArgs] = None,
replication_factor: Optional[int] = None,
simple_user: Optional[str] = None,
subdirectory: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None)
func NewLocationHdfs(ctx *Context, name string, args LocationHdfsArgs, opts ...ResourceOption) (*LocationHdfs, error)
public LocationHdfs(string name, LocationHdfsArgs args, CustomResourceOptions? opts = null)
public LocationHdfs(String name, LocationHdfsArgs args)
public LocationHdfs(String name, LocationHdfsArgs args, CustomResourceOptions options)
type: aws:datasync:LocationHdfs
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var locationHdfsResource = new Aws.DataSync.LocationHdfs("locationHdfsResource", new()
{
AgentArns = new[]
{
"string",
},
NameNodes = new[]
{
new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
{
Hostname = "string",
Port = 0,
},
},
KerberosKeytab = "string",
BlockSize = 0,
KerberosKeytabBase64 = "string",
KerberosKrb5Conf = "string",
KerberosKrb5ConfBase64 = "string",
KerberosPrincipal = "string",
KmsKeyProviderUri = "string",
AuthenticationType = "string",
QopConfiguration = new Aws.DataSync.Inputs.LocationHdfsQopConfigurationArgs
{
DataTransferProtection = "string",
RpcProtection = "string",
},
ReplicationFactor = 0,
SimpleUser = "string",
Subdirectory = "string",
Tags =
{
{ "string", "string" },
},
});
example, err := datasync.NewLocationHdfs(ctx, "locationHdfsResource", &datasync.LocationHdfsArgs{
AgentArns: pulumi.StringArray{
pulumi.String("string"),
},
NameNodes: datasync.LocationHdfsNameNodeArray{
&datasync.LocationHdfsNameNodeArgs{
Hostname: pulumi.String("string"),
Port: pulumi.Int(0),
},
},
KerberosKeytab: pulumi.String("string"),
BlockSize: pulumi.Int(0),
KerberosKeytabBase64: pulumi.String("string"),
KerberosKrb5Conf: pulumi.String("string"),
KerberosKrb5ConfBase64: pulumi.String("string"),
KerberosPrincipal: pulumi.String("string"),
KmsKeyProviderUri: pulumi.String("string"),
AuthenticationType: pulumi.String("string"),
QopConfiguration: &datasync.LocationHdfsQopConfigurationArgs{
DataTransferProtection: pulumi.String("string"),
RpcProtection: pulumi.String("string"),
},
ReplicationFactor: pulumi.Int(0),
SimpleUser: pulumi.String("string"),
Subdirectory: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
})
var locationHdfsResource = new LocationHdfs("locationHdfsResource", LocationHdfsArgs.builder()
.agentArns("string")
.nameNodes(LocationHdfsNameNodeArgs.builder()
.hostname("string")
.port(0)
.build())
.kerberosKeytab("string")
.blockSize(0)
.kerberosKeytabBase64("string")
.kerberosKrb5Conf("string")
.kerberosKrb5ConfBase64("string")
.kerberosPrincipal("string")
.kmsKeyProviderUri("string")
.authenticationType("string")
.qopConfiguration(LocationHdfsQopConfigurationArgs.builder()
.dataTransferProtection("string")
.rpcProtection("string")
.build())
.replicationFactor(0)
.simpleUser("string")
.subdirectory("string")
.tags(Map.of("string", "string"))
.build());
location_hdfs_resource = aws.datasync.LocationHdfs("locationHdfsResource",
agent_arns=["string"],
name_nodes=[{
"hostname": "string",
"port": 0,
}],
kerberos_keytab="string",
block_size=0,
kerberos_keytab_base64="string",
kerberos_krb5_conf="string",
kerberos_krb5_conf_base64="string",
kerberos_principal="string",
kms_key_provider_uri="string",
authentication_type="string",
qop_configuration={
"data_transfer_protection": "string",
"rpc_protection": "string",
},
replication_factor=0,
simple_user="string",
subdirectory="string",
tags={
"string": "string",
})
const locationHdfsResource = new aws.datasync.LocationHdfs("locationHdfsResource", {
agentArns: ["string"],
nameNodes: [{
hostname: "string",
port: 0,
}],
kerberosKeytab: "string",
blockSize: 0,
kerberosKeytabBase64: "string",
kerberosKrb5Conf: "string",
kerberosKrb5ConfBase64: "string",
kerberosPrincipal: "string",
kmsKeyProviderUri: "string",
authenticationType: "string",
qopConfiguration: {
dataTransferProtection: "string",
rpcProtection: "string",
},
replicationFactor: 0,
simpleUser: "string",
subdirectory: "string",
tags: {
string: "string",
},
});
type: aws:datasync:LocationHdfs
properties:
agentArns:
- string
authenticationType: string
blockSize: 0
kerberosKeytab: string
kerberosKeytabBase64: string
kerberosKrb5Conf: string
kerberosKrb5ConfBase64: string
kerberosPrincipal: string
kmsKeyProviderUri: string
nameNodes:
- hostname: string
port: 0
qopConfiguration:
dataTransferProtection: string
rpcProtection: string
replicationFactor: 0
simpleUser: string
subdirectory: string
tags:
string: string
LocationHdfs Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The LocationHdfs resource accepts the following input properties:
- Agent
Arns List<string> - A list of DataSync Agent ARNs with which this location will be associated.
- Name
Nodes List<LocationHdfs Name Node> - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- Authentication
Type string - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - Block
Size int - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- Kerberos
Keytab string - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - Kerberos
Keytab stringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - Kerberos
Krb5Conf string - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - Kerberos
Krb5Conf stringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - Kerberos
Principal string - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - Kms
Key stringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- Qop
Configuration LocationHdfs Qop Configuration - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - Replication
Factor int - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- Simple
User string - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Dictionary<string, string>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- Agent
Arns []string - A list of DataSync Agent ARNs with which this location will be associated.
- Name
Nodes []LocationHdfs Name Node Args - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- Authentication
Type string - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - Block
Size int - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- Kerberos
Keytab string - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - Kerberos
Keytab stringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - Kerberos
Krb5Conf string - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - Kerberos
Krb5Conf stringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - Kerberos
Principal string - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - Kms
Key stringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- Qop
Configuration LocationHdfs Qop Configuration Args - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - Replication
Factor int - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- Simple
User string - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- map[string]string
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- agent
Arns List<String> - A list of DataSync Agent ARNs with which this location will be associated.
- name
Nodes List<LocationHdfs Name Node> - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authentication
Type String - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block
Size Integer - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos
Keytab String - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos
Keytab StringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos
Krb5Conf String - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos
Krb5Conf StringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos
Principal String - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms
Key StringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- qop
Configuration LocationHdfs Qop Configuration - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication
Factor Integer - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple
User String - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String,String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- agent
Arns string[] - A list of DataSync Agent ARNs with which this location will be associated.
- name
Nodes LocationHdfs Name Node[] - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authentication
Type string - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block
Size number - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos
Keytab string - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos
Keytab stringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos
Krb5Conf string - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos
Krb5Conf stringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos
Principal string - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms
Key stringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- qop
Configuration LocationHdfs Qop Configuration - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication
Factor number - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple
User string - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- {[key: string]: string}
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- agent_
arns Sequence[str] - A list of DataSync Agent ARNs with which this location will be associated.
- name_
nodes Sequence[LocationHdfs Name Node Args] - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authentication_
type str - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block_
size int - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos_
keytab str - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos_
keytab_ strbase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos_
krb5_ strconf - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos_
krb5_ strconf_ base64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos_
principal str - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms_
key_ strprovider_ uri - The URI of the HDFS cluster's Key Management Server (KMS).
- qop_
configuration LocationHdfs Qop Configuration Args - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication_
factor int - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple_
user str - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory str
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Mapping[str, str]
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- agent
Arns List<String> - A list of DataSync Agent ARNs with which this location will be associated.
- name
Nodes List<Property Map> - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authentication
Type String - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block
Size Number - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos
Keytab String - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos
Keytab StringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos
Krb5Conf String - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos
Krb5Conf StringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos
Principal String - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms
Key StringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- qop
Configuration Property Map - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication
Factor Number - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple
User String - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Outputs
All input properties are implicitly available as output properties. Additionally, the LocationHdfs resource produces the following output properties:
Look up Existing LocationHdfs Resource
Get an existing LocationHdfs resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LocationHdfsState, opts?: CustomResourceOptions): LocationHdfs
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
agent_arns: Optional[Sequence[str]] = None,
arn: Optional[str] = None,
authentication_type: Optional[str] = None,
block_size: Optional[int] = None,
kerberos_keytab: Optional[str] = None,
kerberos_keytab_base64: Optional[str] = None,
kerberos_krb5_conf: Optional[str] = None,
kerberos_krb5_conf_base64: Optional[str] = None,
kerberos_principal: Optional[str] = None,
kms_key_provider_uri: Optional[str] = None,
name_nodes: Optional[Sequence[LocationHdfsNameNodeArgs]] = None,
qop_configuration: Optional[LocationHdfsQopConfigurationArgs] = None,
replication_factor: Optional[int] = None,
simple_user: Optional[str] = None,
subdirectory: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
uri: Optional[str] = None) -> LocationHdfs
func GetLocationHdfs(ctx *Context, name string, id IDInput, state *LocationHdfsState, opts ...ResourceOption) (*LocationHdfs, error)
public static LocationHdfs Get(string name, Input<string> id, LocationHdfsState? state, CustomResourceOptions? opts = null)
public static LocationHdfs get(String name, Output<String> id, LocationHdfsState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Agent
Arns List<string> - A list of DataSync Agent ARNs with which this location will be associated.
- Arn string
- Amazon Resource Name (ARN) of the DataSync Location.
- Authentication
Type string - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - Block
Size int - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- Kerberos
Keytab string - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - Kerberos
Keytab stringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - Kerberos
Krb5Conf string - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - Kerberos
Krb5Conf stringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - Kerberos
Principal string - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - Kms
Key stringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- Name
Nodes List<LocationHdfs Name Node> - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- Qop
Configuration LocationHdfs Qop Configuration - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - Replication
Factor int - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- Simple
User string - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Dictionary<string, string>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Uri string
- Agent
Arns []string - A list of DataSync Agent ARNs with which this location will be associated.
- Arn string
- Amazon Resource Name (ARN) of the DataSync Location.
- Authentication
Type string - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - Block
Size int - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- Kerberos
Keytab string - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - Kerberos
Keytab stringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - Kerberos
Krb5Conf string - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - Kerberos
Krb5Conf stringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - Kerberos
Principal string - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - Kms
Key stringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- Name
Nodes []LocationHdfs Name Node Args - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- Qop
Configuration LocationHdfs Qop Configuration Args - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - Replication
Factor int - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- Simple
User string - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- map[string]string
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - map[string]string
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Uri string
- agent
Arns List<String> - A list of DataSync Agent ARNs with which this location will be associated.
- arn String
- Amazon Resource Name (ARN) of the DataSync Location.
- authentication
Type String - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block
Size Integer - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos
Keytab String - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos
Keytab StringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos
Krb5Conf String - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos
Krb5Conf StringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos
Principal String - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms
Key StringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- name
Nodes List<LocationHdfs Name Node> - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qop
Configuration LocationHdfs Qop Configuration - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication
Factor Integer - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple
User String - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String,String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String,String>
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - uri String
- agent
Arns string[] - A list of DataSync Agent ARNs with which this location will be associated.
- arn string
- Amazon Resource Name (ARN) of the DataSync Location.
- authentication
Type string - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block
Size number - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos
Keytab string - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos
Keytab stringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos
Krb5Conf string - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos
Krb5Conf stringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos
Principal string - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms
Key stringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- name
Nodes LocationHdfs Name Node[] - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qop
Configuration LocationHdfs Qop Configuration - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication
Factor number - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple
User string - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- {[key: string]: string}
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - uri string
- agent_
arns Sequence[str] - A list of DataSync Agent ARNs with which this location will be associated.
- arn str
- Amazon Resource Name (ARN) of the DataSync Location.
- authentication_
type str - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block_
size int - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos_
keytab str - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos_
keytab_ strbase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos_
krb5_ strconf - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos_
krb5_ strconf_ base64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos_
principal str - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms_
key_ strprovider_ uri - The URI of the HDFS cluster's Key Management Server (KMS).
- name_
nodes Sequence[LocationHdfs Name Node Args] - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qop_
configuration LocationHdfs Qop Configuration Args - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication_
factor int - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple_
user str - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory str
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Mapping[str, str]
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Mapping[str, str]
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - uri str
- agent
Arns List<String> - A list of DataSync Agent ARNs with which this location will be associated.
- arn String
- Amazon Resource Name (ARN) of the DataSync Location.
- authentication
Type String - The type of authentication used to determine the identity of the user. Valid values are
SIMPLE
andKERBEROS
. - block
Size Number - The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos
Keytab String - The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use
kerberos_keytab_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab_base64
) is required. - kerberos
Keytab StringBase64 - Use instead of
kerberos_keytab
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_keytab
) is required. - kerberos
Krb5Conf String - The krb5.conf file that contains the Kerberos configuration information. Use
kerberos_krb5_conf_base64
instead whenever the value is not a valid UTF-8 string. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf_base64
) is required. - kerberos
Krb5Conf StringBase64 - Use instead of
kerberos_krb5_conf
to pass base64-encoded binary data directly. IfKERBEROS
is specified forauthentication_type
, this parameter (orkerberos_krb5_conf
) is required. - kerberos
Principal String - The Kerberos principal with access to the files and folders on the HDFS cluster. If
KERBEROS
is specified forauthentication_type
, this parameter is required. - kms
Key StringProvider Uri - The URI of the HDFS cluster's Key Management Server (KMS).
- name
Nodes List<Property Map> - The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qop
Configuration Property Map - The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If
qop_configuration
isn't specified,rpc_protection
anddata_transfer_protection
default toPRIVACY
. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below. - replication
Factor Number - The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple
User String - The user name used to identify the client on the host operating system. If
SIMPLE
is specified forauthentication_type
, this parameter is required. - subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String>
- A map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - uri String
Supporting Types
LocationHdfsNameNode, LocationHdfsNameNodeArgs
- Hostname string
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- Port int
- The port that the NameNode uses to listen to client requests.
- Hostname string
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- Port int
- The port that the NameNode uses to listen to client requests.
- hostname String
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port Integer
- The port that the NameNode uses to listen to client requests.
- hostname string
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port number
- The port that the NameNode uses to listen to client requests.
- hostname str
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port int
- The port that the NameNode uses to listen to client requests.
- hostname String
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port Number
- The port that the NameNode uses to listen to client requests.
LocationHdfsQopConfiguration, LocationHdfsQopConfigurationArgs
- Data
Transfer stringProtection - The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
. - Rpc
Protection string - The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
.
- Data
Transfer stringProtection - The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
. - Rpc
Protection string - The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
.
- data
Transfer StringProtection - The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
. - rpc
Protection String - The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
.
- data
Transfer stringProtection - The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
. - rpc
Protection string - The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
.
- data_
transfer_ strprotection - The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
. - rpc_
protection str - The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
.
- data
Transfer StringProtection - The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
. - rpc
Protection String - The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are
DISABLED
,AUTHENTICATION
,INTEGRITY
andPRIVACY
.
Import
Using pulumi import
, import aws_datasync_location_hdfs
using the Amazon Resource Name (ARN). For example:
$ pulumi import aws:datasync/locationHdfs:LocationHdfs example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aws
Terraform Provider.