We recommend using Azure Native.
azure.hdinsight.SparkCluster
Explore with Pulumi AI
Manages a HDInsight Spark Cluster.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
name: "example-resources",
location: "West Europe",
});
const exampleAccount = new azure.storage.Account("example", {
name: "hdinsightstor",
resourceGroupName: example.name,
location: example.location,
accountTier: "Standard",
accountReplicationType: "LRS",
});
const exampleContainer = new azure.storage.Container("example", {
name: "hdinsight",
storageAccountName: exampleAccount.name,
containerAccessType: "private",
});
const exampleSparkCluster = new azure.hdinsight.SparkCluster("example", {
name: "example-hdicluster",
resourceGroupName: example.name,
location: example.location,
clusterVersion: "3.6",
tier: "Standard",
componentVersion: {
spark: "2.3",
},
gateway: {
username: "acctestusrgw",
password: "Password123!",
},
storageAccounts: [{
storageContainerId: exampleContainer.id,
storageAccountKey: exampleAccount.primaryAccessKey,
isDefault: true,
}],
roles: {
headNode: {
vmSize: "Standard_A3",
username: "acctestusrvm",
password: "AccTestvdSC4daf986!",
},
workerNode: {
vmSize: "Standard_A3",
username: "acctestusrvm",
password: "AccTestvdSC4daf986!",
targetInstanceCount: 3,
},
zookeeperNode: {
vmSize: "Medium",
username: "acctestusrvm",
password: "AccTestvdSC4daf986!",
},
},
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
name="example-resources",
location="West Europe")
example_account = azure.storage.Account("example",
name="hdinsightstor",
resource_group_name=example.name,
location=example.location,
account_tier="Standard",
account_replication_type="LRS")
example_container = azure.storage.Container("example",
name="hdinsight",
storage_account_name=example_account.name,
container_access_type="private")
example_spark_cluster = azure.hdinsight.SparkCluster("example",
name="example-hdicluster",
resource_group_name=example.name,
location=example.location,
cluster_version="3.6",
tier="Standard",
component_version={
"spark": "2.3",
},
gateway={
"username": "acctestusrgw",
"password": "Password123!",
},
storage_accounts=[{
"storage_container_id": example_container.id,
"storage_account_key": example_account.primary_access_key,
"is_default": True,
}],
roles={
"head_node": {
"vm_size": "Standard_A3",
"username": "acctestusrvm",
"password": "AccTestvdSC4daf986!",
},
"worker_node": {
"vm_size": "Standard_A3",
"username": "acctestusrvm",
"password": "AccTestvdSC4daf986!",
"target_instance_count": 3,
},
"zookeeper_node": {
"vm_size": "Medium",
"username": "acctestusrvm",
"password": "AccTestvdSC4daf986!",
},
})
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/hdinsight"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
Name: pulumi.String("example-resources"),
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
Name: pulumi.String("hdinsightstor"),
ResourceGroupName: example.Name,
Location: example.Location,
AccountTier: pulumi.String("Standard"),
AccountReplicationType: pulumi.String("LRS"),
})
if err != nil {
return err
}
exampleContainer, err := storage.NewContainer(ctx, "example", &storage.ContainerArgs{
Name: pulumi.String("hdinsight"),
StorageAccountName: exampleAccount.Name,
ContainerAccessType: pulumi.String("private"),
})
if err != nil {
return err
}
_, err = hdinsight.NewSparkCluster(ctx, "example", &hdinsight.SparkClusterArgs{
Name: pulumi.String("example-hdicluster"),
ResourceGroupName: example.Name,
Location: example.Location,
ClusterVersion: pulumi.String("3.6"),
Tier: pulumi.String("Standard"),
ComponentVersion: &hdinsight.SparkClusterComponentVersionArgs{
Spark: pulumi.String("2.3"),
},
Gateway: &hdinsight.SparkClusterGatewayArgs{
Username: pulumi.String("acctestusrgw"),
Password: pulumi.String("Password123!"),
},
StorageAccounts: hdinsight.SparkClusterStorageAccountArray{
&hdinsight.SparkClusterStorageAccountArgs{
StorageContainerId: exampleContainer.ID(),
StorageAccountKey: exampleAccount.PrimaryAccessKey,
IsDefault: pulumi.Bool(true),
},
},
Roles: &hdinsight.SparkClusterRolesArgs{
HeadNode: &hdinsight.SparkClusterRolesHeadNodeArgs{
VmSize: pulumi.String("Standard_A3"),
Username: pulumi.String("acctestusrvm"),
Password: pulumi.String("AccTestvdSC4daf986!"),
},
WorkerNode: &hdinsight.SparkClusterRolesWorkerNodeArgs{
VmSize: pulumi.String("Standard_A3"),
Username: pulumi.String("acctestusrvm"),
Password: pulumi.String("AccTestvdSC4daf986!"),
TargetInstanceCount: pulumi.Int(3),
},
ZookeeperNode: &hdinsight.SparkClusterRolesZookeeperNodeArgs{
VmSize: pulumi.String("Medium"),
Username: pulumi.String("acctestusrvm"),
Password: pulumi.String("AccTestvdSC4daf986!"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var example = new Azure.Core.ResourceGroup("example", new()
{
Name = "example-resources",
Location = "West Europe",
});
var exampleAccount = new Azure.Storage.Account("example", new()
{
Name = "hdinsightstor",
ResourceGroupName = example.Name,
Location = example.Location,
AccountTier = "Standard",
AccountReplicationType = "LRS",
});
var exampleContainer = new Azure.Storage.Container("example", new()
{
Name = "hdinsight",
StorageAccountName = exampleAccount.Name,
ContainerAccessType = "private",
});
var exampleSparkCluster = new Azure.HDInsight.SparkCluster("example", new()
{
Name = "example-hdicluster",
ResourceGroupName = example.Name,
Location = example.Location,
ClusterVersion = "3.6",
Tier = "Standard",
ComponentVersion = new Azure.HDInsight.Inputs.SparkClusterComponentVersionArgs
{
Spark = "2.3",
},
Gateway = new Azure.HDInsight.Inputs.SparkClusterGatewayArgs
{
Username = "acctestusrgw",
Password = "Password123!",
},
StorageAccounts = new[]
{
new Azure.HDInsight.Inputs.SparkClusterStorageAccountArgs
{
StorageContainerId = exampleContainer.Id,
StorageAccountKey = exampleAccount.PrimaryAccessKey,
IsDefault = true,
},
},
Roles = new Azure.HDInsight.Inputs.SparkClusterRolesArgs
{
HeadNode = new Azure.HDInsight.Inputs.SparkClusterRolesHeadNodeArgs
{
VmSize = "Standard_A3",
Username = "acctestusrvm",
Password = "AccTestvdSC4daf986!",
},
WorkerNode = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeArgs
{
VmSize = "Standard_A3",
Username = "acctestusrvm",
Password = "AccTestvdSC4daf986!",
TargetInstanceCount = 3,
},
ZookeeperNode = new Azure.HDInsight.Inputs.SparkClusterRolesZookeeperNodeArgs
{
VmSize = "Medium",
Username = "acctestusrvm",
Password = "AccTestvdSC4daf986!",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.storage.Container;
import com.pulumi.azure.storage.ContainerArgs;
import com.pulumi.azure.hdinsight.SparkCluster;
import com.pulumi.azure.hdinsight.SparkClusterArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterComponentVersionArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterGatewayArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterStorageAccountArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesHeadNodeArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesWorkerNodeArgs;
import com.pulumi.azure.hdinsight.inputs.SparkClusterRolesZookeeperNodeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new ResourceGroup("example", ResourceGroupArgs.builder()
.name("example-resources")
.location("West Europe")
.build());
var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
.name("hdinsightstor")
.resourceGroupName(example.name())
.location(example.location())
.accountTier("Standard")
.accountReplicationType("LRS")
.build());
var exampleContainer = new Container("exampleContainer", ContainerArgs.builder()
.name("hdinsight")
.storageAccountName(exampleAccount.name())
.containerAccessType("private")
.build());
var exampleSparkCluster = new SparkCluster("exampleSparkCluster", SparkClusterArgs.builder()
.name("example-hdicluster")
.resourceGroupName(example.name())
.location(example.location())
.clusterVersion("3.6")
.tier("Standard")
.componentVersion(SparkClusterComponentVersionArgs.builder()
.spark("2.3")
.build())
.gateway(SparkClusterGatewayArgs.builder()
.username("acctestusrgw")
.password("Password123!")
.build())
.storageAccounts(SparkClusterStorageAccountArgs.builder()
.storageContainerId(exampleContainer.id())
.storageAccountKey(exampleAccount.primaryAccessKey())
.isDefault(true)
.build())
.roles(SparkClusterRolesArgs.builder()
.headNode(SparkClusterRolesHeadNodeArgs.builder()
.vmSize("Standard_A3")
.username("acctestusrvm")
.password("AccTestvdSC4daf986!")
.build())
.workerNode(SparkClusterRolesWorkerNodeArgs.builder()
.vmSize("Standard_A3")
.username("acctestusrvm")
.password("AccTestvdSC4daf986!")
.targetInstanceCount(3)
.build())
.zookeeperNode(SparkClusterRolesZookeeperNodeArgs.builder()
.vmSize("Medium")
.username("acctestusrvm")
.password("AccTestvdSC4daf986!")
.build())
.build())
.build());
}
}
resources:
example:
type: azure:core:ResourceGroup
properties:
name: example-resources
location: West Europe
exampleAccount:
type: azure:storage:Account
name: example
properties:
name: hdinsightstor
resourceGroupName: ${example.name}
location: ${example.location}
accountTier: Standard
accountReplicationType: LRS
exampleContainer:
type: azure:storage:Container
name: example
properties:
name: hdinsight
storageAccountName: ${exampleAccount.name}
containerAccessType: private
exampleSparkCluster:
type: azure:hdinsight:SparkCluster
name: example
properties:
name: example-hdicluster
resourceGroupName: ${example.name}
location: ${example.location}
clusterVersion: '3.6'
tier: Standard
componentVersion:
spark: '2.3'
gateway:
username: acctestusrgw
password: Password123!
storageAccounts:
- storageContainerId: ${exampleContainer.id}
storageAccountKey: ${exampleAccount.primaryAccessKey}
isDefault: true
roles:
headNode:
vmSize: Standard_A3
username: acctestusrvm
password: AccTestvdSC4daf986!
workerNode:
vmSize: Standard_A3
username: acctestusrvm
password: AccTestvdSC4daf986!
targetInstanceCount: 3
zookeeperNode:
vmSize: Medium
username: acctestusrvm
password: AccTestvdSC4daf986!
Create SparkCluster Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new SparkCluster(name: string, args: SparkClusterArgs, opts?: CustomResourceOptions);
@overload
def SparkCluster(resource_name: str,
args: SparkClusterArgs,
opts: Optional[ResourceOptions] = None)
@overload
def SparkCluster(resource_name: str,
opts: Optional[ResourceOptions] = None,
gateway: Optional[SparkClusterGatewayArgs] = None,
component_version: Optional[SparkClusterComponentVersionArgs] = None,
tier: Optional[str] = None,
roles: Optional[SparkClusterRolesArgs] = None,
resource_group_name: Optional[str] = None,
cluster_version: Optional[str] = None,
name: Optional[str] = None,
encryption_in_transit_enabled: Optional[bool] = None,
metastores: Optional[SparkClusterMetastoresArgs] = None,
monitor: Optional[SparkClusterMonitorArgs] = None,
extension: Optional[SparkClusterExtensionArgs] = None,
network: Optional[SparkClusterNetworkArgs] = None,
private_link_configuration: Optional[SparkClusterPrivateLinkConfigurationArgs] = None,
location: Optional[str] = None,
disk_encryptions: Optional[Sequence[SparkClusterDiskEncryptionArgs]] = None,
security_profile: Optional[SparkClusterSecurityProfileArgs] = None,
storage_account_gen2: Optional[SparkClusterStorageAccountGen2Args] = None,
storage_accounts: Optional[Sequence[SparkClusterStorageAccountArgs]] = None,
tags: Optional[Mapping[str, str]] = None,
compute_isolation: Optional[SparkClusterComputeIsolationArgs] = None,
tls_min_version: Optional[str] = None)
func NewSparkCluster(ctx *Context, name string, args SparkClusterArgs, opts ...ResourceOption) (*SparkCluster, error)
public SparkCluster(string name, SparkClusterArgs args, CustomResourceOptions? opts = null)
public SparkCluster(String name, SparkClusterArgs args)
public SparkCluster(String name, SparkClusterArgs args, CustomResourceOptions options)
type: azure:hdinsight:SparkCluster
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args SparkClusterArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SparkClusterArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SparkClusterArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SparkClusterArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SparkClusterArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var sparkClusterResource = new Azure.HDInsight.SparkCluster("sparkClusterResource", new()
{
Gateway = new Azure.HDInsight.Inputs.SparkClusterGatewayArgs
{
Password = "string",
Username = "string",
},
ComponentVersion = new Azure.HDInsight.Inputs.SparkClusterComponentVersionArgs
{
Spark = "string",
},
Tier = "string",
Roles = new Azure.HDInsight.Inputs.SparkClusterRolesArgs
{
HeadNode = new Azure.HDInsight.Inputs.SparkClusterRolesHeadNodeArgs
{
Username = "string",
VmSize = "string",
Password = "string",
ScriptActions = new[]
{
new Azure.HDInsight.Inputs.SparkClusterRolesHeadNodeScriptActionArgs
{
Name = "string",
Uri = "string",
Parameters = "string",
},
},
SshKeys = new[]
{
"string",
},
SubnetId = "string",
VirtualNetworkId = "string",
},
WorkerNode = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeArgs
{
TargetInstanceCount = 0,
Username = "string",
VmSize = "string",
Autoscale = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleArgs
{
Capacity = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs
{
MaxInstanceCount = 0,
MinInstanceCount = 0,
},
Recurrence = new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs
{
Schedules = new[]
{
new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs
{
Days = new[]
{
"string",
},
TargetInstanceCount = 0,
Time = "string",
},
},
Timezone = "string",
},
},
Password = "string",
ScriptActions = new[]
{
new Azure.HDInsight.Inputs.SparkClusterRolesWorkerNodeScriptActionArgs
{
Name = "string",
Uri = "string",
Parameters = "string",
},
},
SshKeys = new[]
{
"string",
},
SubnetId = "string",
VirtualNetworkId = "string",
},
ZookeeperNode = new Azure.HDInsight.Inputs.SparkClusterRolesZookeeperNodeArgs
{
Username = "string",
VmSize = "string",
Password = "string",
ScriptActions = new[]
{
new Azure.HDInsight.Inputs.SparkClusterRolesZookeeperNodeScriptActionArgs
{
Name = "string",
Uri = "string",
Parameters = "string",
},
},
SshKeys = new[]
{
"string",
},
SubnetId = "string",
VirtualNetworkId = "string",
},
},
ResourceGroupName = "string",
ClusterVersion = "string",
Name = "string",
EncryptionInTransitEnabled = false,
Metastores = new Azure.HDInsight.Inputs.SparkClusterMetastoresArgs
{
Ambari = new Azure.HDInsight.Inputs.SparkClusterMetastoresAmbariArgs
{
DatabaseName = "string",
Password = "string",
Server = "string",
Username = "string",
},
Hive = new Azure.HDInsight.Inputs.SparkClusterMetastoresHiveArgs
{
DatabaseName = "string",
Password = "string",
Server = "string",
Username = "string",
},
Oozie = new Azure.HDInsight.Inputs.SparkClusterMetastoresOozieArgs
{
DatabaseName = "string",
Password = "string",
Server = "string",
Username = "string",
},
},
Monitor = new Azure.HDInsight.Inputs.SparkClusterMonitorArgs
{
LogAnalyticsWorkspaceId = "string",
PrimaryKey = "string",
},
Extension = new Azure.HDInsight.Inputs.SparkClusterExtensionArgs
{
LogAnalyticsWorkspaceId = "string",
PrimaryKey = "string",
},
Network = new Azure.HDInsight.Inputs.SparkClusterNetworkArgs
{
ConnectionDirection = "string",
PrivateLinkEnabled = false,
},
PrivateLinkConfiguration = new Azure.HDInsight.Inputs.SparkClusterPrivateLinkConfigurationArgs
{
GroupId = "string",
IpConfiguration = new Azure.HDInsight.Inputs.SparkClusterPrivateLinkConfigurationIpConfigurationArgs
{
Name = "string",
Primary = false,
PrivateIpAddress = "string",
PrivateIpAllocationMethod = "string",
SubnetId = "string",
},
Name = "string",
},
Location = "string",
DiskEncryptions = new[]
{
new Azure.HDInsight.Inputs.SparkClusterDiskEncryptionArgs
{
EncryptionAlgorithm = "string",
EncryptionAtHostEnabled = false,
KeyVaultKeyId = "string",
KeyVaultManagedIdentityId = "string",
},
},
SecurityProfile = new Azure.HDInsight.Inputs.SparkClusterSecurityProfileArgs
{
AaddsResourceId = "string",
DomainName = "string",
DomainUserPassword = "string",
DomainUsername = "string",
LdapsUrls = new[]
{
"string",
},
MsiResourceId = "string",
ClusterUsersGroupDns = new[]
{
"string",
},
},
StorageAccountGen2 = new Azure.HDInsight.Inputs.SparkClusterStorageAccountGen2Args
{
FilesystemId = "string",
IsDefault = false,
ManagedIdentityResourceId = "string",
StorageResourceId = "string",
},
StorageAccounts = new[]
{
new Azure.HDInsight.Inputs.SparkClusterStorageAccountArgs
{
IsDefault = false,
StorageAccountKey = "string",
StorageContainerId = "string",
StorageResourceId = "string",
},
},
Tags =
{
{ "string", "string" },
},
ComputeIsolation = new Azure.HDInsight.Inputs.SparkClusterComputeIsolationArgs
{
ComputeIsolationEnabled = false,
HostSku = "string",
},
TlsMinVersion = "string",
});
example, err := hdinsight.NewSparkCluster(ctx, "sparkClusterResource", &hdinsight.SparkClusterArgs{
Gateway: &hdinsight.SparkClusterGatewayArgs{
Password: pulumi.String("string"),
Username: pulumi.String("string"),
},
ComponentVersion: &hdinsight.SparkClusterComponentVersionArgs{
Spark: pulumi.String("string"),
},
Tier: pulumi.String("string"),
Roles: &hdinsight.SparkClusterRolesArgs{
HeadNode: &hdinsight.SparkClusterRolesHeadNodeArgs{
Username: pulumi.String("string"),
VmSize: pulumi.String("string"),
Password: pulumi.String("string"),
ScriptActions: hdinsight.SparkClusterRolesHeadNodeScriptActionArray{
&hdinsight.SparkClusterRolesHeadNodeScriptActionArgs{
Name: pulumi.String("string"),
Uri: pulumi.String("string"),
Parameters: pulumi.String("string"),
},
},
SshKeys: pulumi.StringArray{
pulumi.String("string"),
},
SubnetId: pulumi.String("string"),
VirtualNetworkId: pulumi.String("string"),
},
WorkerNode: &hdinsight.SparkClusterRolesWorkerNodeArgs{
TargetInstanceCount: pulumi.Int(0),
Username: pulumi.String("string"),
VmSize: pulumi.String("string"),
Autoscale: &hdinsight.SparkClusterRolesWorkerNodeAutoscaleArgs{
Capacity: &hdinsight.SparkClusterRolesWorkerNodeAutoscaleCapacityArgs{
MaxInstanceCount: pulumi.Int(0),
MinInstanceCount: pulumi.Int(0),
},
Recurrence: &hdinsight.SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs{
Schedules: hdinsight.SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArray{
&hdinsight.SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs{
Days: pulumi.StringArray{
pulumi.String("string"),
},
TargetInstanceCount: pulumi.Int(0),
Time: pulumi.String("string"),
},
},
Timezone: pulumi.String("string"),
},
},
Password: pulumi.String("string"),
ScriptActions: hdinsight.SparkClusterRolesWorkerNodeScriptActionArray{
&hdinsight.SparkClusterRolesWorkerNodeScriptActionArgs{
Name: pulumi.String("string"),
Uri: pulumi.String("string"),
Parameters: pulumi.String("string"),
},
},
SshKeys: pulumi.StringArray{
pulumi.String("string"),
},
SubnetId: pulumi.String("string"),
VirtualNetworkId: pulumi.String("string"),
},
ZookeeperNode: &hdinsight.SparkClusterRolesZookeeperNodeArgs{
Username: pulumi.String("string"),
VmSize: pulumi.String("string"),
Password: pulumi.String("string"),
ScriptActions: hdinsight.SparkClusterRolesZookeeperNodeScriptActionArray{
&hdinsight.SparkClusterRolesZookeeperNodeScriptActionArgs{
Name: pulumi.String("string"),
Uri: pulumi.String("string"),
Parameters: pulumi.String("string"),
},
},
SshKeys: pulumi.StringArray{
pulumi.String("string"),
},
SubnetId: pulumi.String("string"),
VirtualNetworkId: pulumi.String("string"),
},
},
ResourceGroupName: pulumi.String("string"),
ClusterVersion: pulumi.String("string"),
Name: pulumi.String("string"),
EncryptionInTransitEnabled: pulumi.Bool(false),
Metastores: &hdinsight.SparkClusterMetastoresArgs{
Ambari: &hdinsight.SparkClusterMetastoresAmbariArgs{
DatabaseName: pulumi.String("string"),
Password: pulumi.String("string"),
Server: pulumi.String("string"),
Username: pulumi.String("string"),
},
Hive: &hdinsight.SparkClusterMetastoresHiveArgs{
DatabaseName: pulumi.String("string"),
Password: pulumi.String("string"),
Server: pulumi.String("string"),
Username: pulumi.String("string"),
},
Oozie: &hdinsight.SparkClusterMetastoresOozieArgs{
DatabaseName: pulumi.String("string"),
Password: pulumi.String("string"),
Server: pulumi.String("string"),
Username: pulumi.String("string"),
},
},
Monitor: &hdinsight.SparkClusterMonitorArgs{
LogAnalyticsWorkspaceId: pulumi.String("string"),
PrimaryKey: pulumi.String("string"),
},
Extension: &hdinsight.SparkClusterExtensionArgs{
LogAnalyticsWorkspaceId: pulumi.String("string"),
PrimaryKey: pulumi.String("string"),
},
Network: &hdinsight.SparkClusterNetworkArgs{
ConnectionDirection: pulumi.String("string"),
PrivateLinkEnabled: pulumi.Bool(false),
},
PrivateLinkConfiguration: &hdinsight.SparkClusterPrivateLinkConfigurationArgs{
GroupId: pulumi.String("string"),
IpConfiguration: &hdinsight.SparkClusterPrivateLinkConfigurationIpConfigurationArgs{
Name: pulumi.String("string"),
Primary: pulumi.Bool(false),
PrivateIpAddress: pulumi.String("string"),
PrivateIpAllocationMethod: pulumi.String("string"),
SubnetId: pulumi.String("string"),
},
Name: pulumi.String("string"),
},
Location: pulumi.String("string"),
DiskEncryptions: hdinsight.SparkClusterDiskEncryptionArray{
&hdinsight.SparkClusterDiskEncryptionArgs{
EncryptionAlgorithm: pulumi.String("string"),
EncryptionAtHostEnabled: pulumi.Bool(false),
KeyVaultKeyId: pulumi.String("string"),
KeyVaultManagedIdentityId: pulumi.String("string"),
},
},
SecurityProfile: &hdinsight.SparkClusterSecurityProfileArgs{
AaddsResourceId: pulumi.String("string"),
DomainName: pulumi.String("string"),
DomainUserPassword: pulumi.String("string"),
DomainUsername: pulumi.String("string"),
LdapsUrls: pulumi.StringArray{
pulumi.String("string"),
},
MsiResourceId: pulumi.String("string"),
ClusterUsersGroupDns: pulumi.StringArray{
pulumi.String("string"),
},
},
StorageAccountGen2: &hdinsight.SparkClusterStorageAccountGen2Args{
FilesystemId: pulumi.String("string"),
IsDefault: pulumi.Bool(false),
ManagedIdentityResourceId: pulumi.String("string"),
StorageResourceId: pulumi.String("string"),
},
StorageAccounts: hdinsight.SparkClusterStorageAccountArray{
&hdinsight.SparkClusterStorageAccountArgs{
IsDefault: pulumi.Bool(false),
StorageAccountKey: pulumi.String("string"),
StorageContainerId: pulumi.String("string"),
StorageResourceId: pulumi.String("string"),
},
},
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
ComputeIsolation: &hdinsight.SparkClusterComputeIsolationArgs{
ComputeIsolationEnabled: pulumi.Bool(false),
HostSku: pulumi.String("string"),
},
TlsMinVersion: pulumi.String("string"),
})
var sparkClusterResource = new SparkCluster("sparkClusterResource", SparkClusterArgs.builder()
.gateway(SparkClusterGatewayArgs.builder()
.password("string")
.username("string")
.build())
.componentVersion(SparkClusterComponentVersionArgs.builder()
.spark("string")
.build())
.tier("string")
.roles(SparkClusterRolesArgs.builder()
.headNode(SparkClusterRolesHeadNodeArgs.builder()
.username("string")
.vmSize("string")
.password("string")
.scriptActions(SparkClusterRolesHeadNodeScriptActionArgs.builder()
.name("string")
.uri("string")
.parameters("string")
.build())
.sshKeys("string")
.subnetId("string")
.virtualNetworkId("string")
.build())
.workerNode(SparkClusterRolesWorkerNodeArgs.builder()
.targetInstanceCount(0)
.username("string")
.vmSize("string")
.autoscale(SparkClusterRolesWorkerNodeAutoscaleArgs.builder()
.capacity(SparkClusterRolesWorkerNodeAutoscaleCapacityArgs.builder()
.maxInstanceCount(0)
.minInstanceCount(0)
.build())
.recurrence(SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs.builder()
.schedules(SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs.builder()
.days("string")
.targetInstanceCount(0)
.time("string")
.build())
.timezone("string")
.build())
.build())
.password("string")
.scriptActions(SparkClusterRolesWorkerNodeScriptActionArgs.builder()
.name("string")
.uri("string")
.parameters("string")
.build())
.sshKeys("string")
.subnetId("string")
.virtualNetworkId("string")
.build())
.zookeeperNode(SparkClusterRolesZookeeperNodeArgs.builder()
.username("string")
.vmSize("string")
.password("string")
.scriptActions(SparkClusterRolesZookeeperNodeScriptActionArgs.builder()
.name("string")
.uri("string")
.parameters("string")
.build())
.sshKeys("string")
.subnetId("string")
.virtualNetworkId("string")
.build())
.build())
.resourceGroupName("string")
.clusterVersion("string")
.name("string")
.encryptionInTransitEnabled(false)
.metastores(SparkClusterMetastoresArgs.builder()
.ambari(SparkClusterMetastoresAmbariArgs.builder()
.databaseName("string")
.password("string")
.server("string")
.username("string")
.build())
.hive(SparkClusterMetastoresHiveArgs.builder()
.databaseName("string")
.password("string")
.server("string")
.username("string")
.build())
.oozie(SparkClusterMetastoresOozieArgs.builder()
.databaseName("string")
.password("string")
.server("string")
.username("string")
.build())
.build())
.monitor(SparkClusterMonitorArgs.builder()
.logAnalyticsWorkspaceId("string")
.primaryKey("string")
.build())
.extension(SparkClusterExtensionArgs.builder()
.logAnalyticsWorkspaceId("string")
.primaryKey("string")
.build())
.network(SparkClusterNetworkArgs.builder()
.connectionDirection("string")
.privateLinkEnabled(false)
.build())
.privateLinkConfiguration(SparkClusterPrivateLinkConfigurationArgs.builder()
.groupId("string")
.ipConfiguration(SparkClusterPrivateLinkConfigurationIpConfigurationArgs.builder()
.name("string")
.primary(false)
.privateIpAddress("string")
.privateIpAllocationMethod("string")
.subnetId("string")
.build())
.name("string")
.build())
.location("string")
.diskEncryptions(SparkClusterDiskEncryptionArgs.builder()
.encryptionAlgorithm("string")
.encryptionAtHostEnabled(false)
.keyVaultKeyId("string")
.keyVaultManagedIdentityId("string")
.build())
.securityProfile(SparkClusterSecurityProfileArgs.builder()
.aaddsResourceId("string")
.domainName("string")
.domainUserPassword("string")
.domainUsername("string")
.ldapsUrls("string")
.msiResourceId("string")
.clusterUsersGroupDns("string")
.build())
.storageAccountGen2(SparkClusterStorageAccountGen2Args.builder()
.filesystemId("string")
.isDefault(false)
.managedIdentityResourceId("string")
.storageResourceId("string")
.build())
.storageAccounts(SparkClusterStorageAccountArgs.builder()
.isDefault(false)
.storageAccountKey("string")
.storageContainerId("string")
.storageResourceId("string")
.build())
.tags(Map.of("string", "string"))
.computeIsolation(SparkClusterComputeIsolationArgs.builder()
.computeIsolationEnabled(false)
.hostSku("string")
.build())
.tlsMinVersion("string")
.build());
spark_cluster_resource = azure.hdinsight.SparkCluster("sparkClusterResource",
gateway={
"password": "string",
"username": "string",
},
component_version={
"spark": "string",
},
tier="string",
roles={
"head_node": {
"username": "string",
"vm_size": "string",
"password": "string",
"script_actions": [{
"name": "string",
"uri": "string",
"parameters": "string",
}],
"ssh_keys": ["string"],
"subnet_id": "string",
"virtual_network_id": "string",
},
"worker_node": {
"target_instance_count": 0,
"username": "string",
"vm_size": "string",
"autoscale": {
"capacity": {
"max_instance_count": 0,
"min_instance_count": 0,
},
"recurrence": {
"schedules": [{
"days": ["string"],
"target_instance_count": 0,
"time": "string",
}],
"timezone": "string",
},
},
"password": "string",
"script_actions": [{
"name": "string",
"uri": "string",
"parameters": "string",
}],
"ssh_keys": ["string"],
"subnet_id": "string",
"virtual_network_id": "string",
},
"zookeeper_node": {
"username": "string",
"vm_size": "string",
"password": "string",
"script_actions": [{
"name": "string",
"uri": "string",
"parameters": "string",
}],
"ssh_keys": ["string"],
"subnet_id": "string",
"virtual_network_id": "string",
},
},
resource_group_name="string",
cluster_version="string",
name="string",
encryption_in_transit_enabled=False,
metastores={
"ambari": {
"database_name": "string",
"password": "string",
"server": "string",
"username": "string",
},
"hive": {
"database_name": "string",
"password": "string",
"server": "string",
"username": "string",
},
"oozie": {
"database_name": "string",
"password": "string",
"server": "string",
"username": "string",
},
},
monitor={
"log_analytics_workspace_id": "string",
"primary_key": "string",
},
extension={
"log_analytics_workspace_id": "string",
"primary_key": "string",
},
network={
"connection_direction": "string",
"private_link_enabled": False,
},
private_link_configuration={
"group_id": "string",
"ip_configuration": {
"name": "string",
"primary": False,
"private_ip_address": "string",
"private_ip_allocation_method": "string",
"subnet_id": "string",
},
"name": "string",
},
location="string",
disk_encryptions=[{
"encryption_algorithm": "string",
"encryption_at_host_enabled": False,
"key_vault_key_id": "string",
"key_vault_managed_identity_id": "string",
}],
security_profile={
"aadds_resource_id": "string",
"domain_name": "string",
"domain_user_password": "string",
"domain_username": "string",
"ldaps_urls": ["string"],
"msi_resource_id": "string",
"cluster_users_group_dns": ["string"],
},
storage_account_gen2={
"filesystem_id": "string",
"is_default": False,
"managed_identity_resource_id": "string",
"storage_resource_id": "string",
},
storage_accounts=[{
"is_default": False,
"storage_account_key": "string",
"storage_container_id": "string",
"storage_resource_id": "string",
}],
tags={
"string": "string",
},
compute_isolation={
"compute_isolation_enabled": False,
"host_sku": "string",
},
tls_min_version="string")
const sparkClusterResource = new azure.hdinsight.SparkCluster("sparkClusterResource", {
gateway: {
password: "string",
username: "string",
},
componentVersion: {
spark: "string",
},
tier: "string",
roles: {
headNode: {
username: "string",
vmSize: "string",
password: "string",
scriptActions: [{
name: "string",
uri: "string",
parameters: "string",
}],
sshKeys: ["string"],
subnetId: "string",
virtualNetworkId: "string",
},
workerNode: {
targetInstanceCount: 0,
username: "string",
vmSize: "string",
autoscale: {
capacity: {
maxInstanceCount: 0,
minInstanceCount: 0,
},
recurrence: {
schedules: [{
days: ["string"],
targetInstanceCount: 0,
time: "string",
}],
timezone: "string",
},
},
password: "string",
scriptActions: [{
name: "string",
uri: "string",
parameters: "string",
}],
sshKeys: ["string"],
subnetId: "string",
virtualNetworkId: "string",
},
zookeeperNode: {
username: "string",
vmSize: "string",
password: "string",
scriptActions: [{
name: "string",
uri: "string",
parameters: "string",
}],
sshKeys: ["string"],
subnetId: "string",
virtualNetworkId: "string",
},
},
resourceGroupName: "string",
clusterVersion: "string",
name: "string",
encryptionInTransitEnabled: false,
metastores: {
ambari: {
databaseName: "string",
password: "string",
server: "string",
username: "string",
},
hive: {
databaseName: "string",
password: "string",
server: "string",
username: "string",
},
oozie: {
databaseName: "string",
password: "string",
server: "string",
username: "string",
},
},
monitor: {
logAnalyticsWorkspaceId: "string",
primaryKey: "string",
},
extension: {
logAnalyticsWorkspaceId: "string",
primaryKey: "string",
},
network: {
connectionDirection: "string",
privateLinkEnabled: false,
},
privateLinkConfiguration: {
groupId: "string",
ipConfiguration: {
name: "string",
primary: false,
privateIpAddress: "string",
privateIpAllocationMethod: "string",
subnetId: "string",
},
name: "string",
},
location: "string",
diskEncryptions: [{
encryptionAlgorithm: "string",
encryptionAtHostEnabled: false,
keyVaultKeyId: "string",
keyVaultManagedIdentityId: "string",
}],
securityProfile: {
aaddsResourceId: "string",
domainName: "string",
domainUserPassword: "string",
domainUsername: "string",
ldapsUrls: ["string"],
msiResourceId: "string",
clusterUsersGroupDns: ["string"],
},
storageAccountGen2: {
filesystemId: "string",
isDefault: false,
managedIdentityResourceId: "string",
storageResourceId: "string",
},
storageAccounts: [{
isDefault: false,
storageAccountKey: "string",
storageContainerId: "string",
storageResourceId: "string",
}],
tags: {
string: "string",
},
computeIsolation: {
computeIsolationEnabled: false,
hostSku: "string",
},
tlsMinVersion: "string",
});
type: azure:hdinsight:SparkCluster
properties:
clusterVersion: string
componentVersion:
spark: string
computeIsolation:
computeIsolationEnabled: false
hostSku: string
diskEncryptions:
- encryptionAlgorithm: string
encryptionAtHostEnabled: false
keyVaultKeyId: string
keyVaultManagedIdentityId: string
encryptionInTransitEnabled: false
extension:
logAnalyticsWorkspaceId: string
primaryKey: string
gateway:
password: string
username: string
location: string
metastores:
ambari:
databaseName: string
password: string
server: string
username: string
hive:
databaseName: string
password: string
server: string
username: string
oozie:
databaseName: string
password: string
server: string
username: string
monitor:
logAnalyticsWorkspaceId: string
primaryKey: string
name: string
network:
connectionDirection: string
privateLinkEnabled: false
privateLinkConfiguration:
groupId: string
ipConfiguration:
name: string
primary: false
privateIpAddress: string
privateIpAllocationMethod: string
subnetId: string
name: string
resourceGroupName: string
roles:
headNode:
password: string
scriptActions:
- name: string
parameters: string
uri: string
sshKeys:
- string
subnetId: string
username: string
virtualNetworkId: string
vmSize: string
workerNode:
autoscale:
capacity:
maxInstanceCount: 0
minInstanceCount: 0
recurrence:
schedules:
- days:
- string
targetInstanceCount: 0
time: string
timezone: string
password: string
scriptActions:
- name: string
parameters: string
uri: string
sshKeys:
- string
subnetId: string
targetInstanceCount: 0
username: string
virtualNetworkId: string
vmSize: string
zookeeperNode:
password: string
scriptActions:
- name: string
parameters: string
uri: string
sshKeys:
- string
subnetId: string
username: string
virtualNetworkId: string
vmSize: string
securityProfile:
aaddsResourceId: string
clusterUsersGroupDns:
- string
domainName: string
domainUserPassword: string
domainUsername: string
ldapsUrls:
- string
msiResourceId: string
storageAccountGen2:
filesystemId: string
isDefault: false
managedIdentityResourceId: string
storageResourceId: string
storageAccounts:
- isDefault: false
storageAccountKey: string
storageContainerId: string
storageResourceId: string
tags:
string: string
tier: string
tlsMinVersion: string
SparkCluster Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The SparkCluster resource accepts the following input properties:
- Cluster
Version string - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- Component
Version SparkCluster Component Version - A
component_version
block as defined below. - Gateway
Spark
Cluster Gateway - A
gateway
block as defined below. - Resource
Group stringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Roles
Spark
Cluster Roles - A
roles
block as defined below. - Tier string
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - Compute
Isolation SparkCluster Compute Isolation - A
compute_isolation
block as defined below. - Disk
Encryptions List<SparkCluster Disk Encryption> - One or more
disk_encryption
block as defined below. - Encryption
In boolTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- Extension
Spark
Cluster Extension - An
extension
block as defined below. - Location string
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Metastores
Spark
Cluster Metastores - A
metastores
block as defined below. - Monitor
Spark
Cluster Monitor - A
monitor
block as defined below. - Name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Network
Spark
Cluster Network - A
network
block as defined below. - Private
Link SparkConfiguration Cluster Private Link Configuration - A
private_link_configuration
block as defined below. - Security
Profile SparkCluster Security Profile - A
security_profile
block as defined below. Changing this forces a new resource to be created. - Storage
Account SparkGen2 Cluster Storage Account Gen2 - A
storage_account_gen2
block as defined below. - Storage
Accounts List<SparkCluster Storage Account> - One or more
storage_account
block as defined below. - Dictionary<string, string>
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- Tls
Min stringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- Cluster
Version string - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- Component
Version SparkCluster Component Version Args - A
component_version
block as defined below. - Gateway
Spark
Cluster Gateway Args - A
gateway
block as defined below. - Resource
Group stringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Roles
Spark
Cluster Roles Args - A
roles
block as defined below. - Tier string
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - Compute
Isolation SparkCluster Compute Isolation Args - A
compute_isolation
block as defined below. - Disk
Encryptions []SparkCluster Disk Encryption Args - One or more
disk_encryption
block as defined below. - Encryption
In boolTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- Extension
Spark
Cluster Extension Args - An
extension
block as defined below. - Location string
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Metastores
Spark
Cluster Metastores Args - A
metastores
block as defined below. - Monitor
Spark
Cluster Monitor Args - A
monitor
block as defined below. - Name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Network
Spark
Cluster Network Args - A
network
block as defined below. - Private
Link SparkConfiguration Cluster Private Link Configuration Args - A
private_link_configuration
block as defined below. - Security
Profile SparkCluster Security Profile Args - A
security_profile
block as defined below. Changing this forces a new resource to be created. - Storage
Account SparkGen2 Cluster Storage Account Gen2Args - A
storage_account_gen2
block as defined below. - Storage
Accounts []SparkCluster Storage Account Args - One or more
storage_account
block as defined below. - map[string]string
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- Tls
Min stringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster
Version String - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component
Version SparkCluster Component Version - A
component_version
block as defined below. - gateway
Spark
Cluster Gateway - A
gateway
block as defined below. - resource
Group StringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles
Spark
Cluster Roles - A
roles
block as defined below. - tier String
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - compute
Isolation SparkCluster Compute Isolation - A
compute_isolation
block as defined below. - disk
Encryptions List<SparkCluster Disk Encryption> - One or more
disk_encryption
block as defined below. - encryption
In BooleanTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension
Spark
Cluster Extension - An
extension
block as defined below. - location String
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores
Spark
Cluster Metastores - A
metastores
block as defined below. - monitor
Spark
Cluster Monitor - A
monitor
block as defined below. - name String
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network
Spark
Cluster Network - A
network
block as defined below. - private
Link SparkConfiguration Cluster Private Link Configuration - A
private_link_configuration
block as defined below. - security
Profile SparkCluster Security Profile - A
security_profile
block as defined below. Changing this forces a new resource to be created. - storage
Account SparkGen2 Cluster Storage Account Gen2 - A
storage_account_gen2
block as defined below. - storage
Accounts List<SparkCluster Storage Account> - One or more
storage_account
block as defined below. - Map<String,String>
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tls
Min StringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster
Version string - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component
Version SparkCluster Component Version - A
component_version
block as defined below. - gateway
Spark
Cluster Gateway - A
gateway
block as defined below. - resource
Group stringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles
Spark
Cluster Roles - A
roles
block as defined below. - tier string
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - compute
Isolation SparkCluster Compute Isolation - A
compute_isolation
block as defined below. - disk
Encryptions SparkCluster Disk Encryption[] - One or more
disk_encryption
block as defined below. - encryption
In booleanTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension
Spark
Cluster Extension - An
extension
block as defined below. - location string
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores
Spark
Cluster Metastores - A
metastores
block as defined below. - monitor
Spark
Cluster Monitor - A
monitor
block as defined below. - name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network
Spark
Cluster Network - A
network
block as defined below. - private
Link SparkConfiguration Cluster Private Link Configuration - A
private_link_configuration
block as defined below. - security
Profile SparkCluster Security Profile - A
security_profile
block as defined below. Changing this forces a new resource to be created. - storage
Account SparkGen2 Cluster Storage Account Gen2 - A
storage_account_gen2
block as defined below. - storage
Accounts SparkCluster Storage Account[] - One or more
storage_account
block as defined below. - {[key: string]: string}
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tls
Min stringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster_
version str - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component_
version SparkCluster Component Version Args - A
component_version
block as defined below. - gateway
Spark
Cluster Gateway Args - A
gateway
block as defined below. - resource_
group_ strname - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles
Spark
Cluster Roles Args - A
roles
block as defined below. - tier str
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - compute_
isolation SparkCluster Compute Isolation Args - A
compute_isolation
block as defined below. - disk_
encryptions Sequence[SparkCluster Disk Encryption Args] - One or more
disk_encryption
block as defined below. - encryption_
in_ booltransit_ enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension
Spark
Cluster Extension Args - An
extension
block as defined below. - location str
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores
Spark
Cluster Metastores Args - A
metastores
block as defined below. - monitor
Spark
Cluster Monitor Args - A
monitor
block as defined below. - name str
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network
Spark
Cluster Network Args - A
network
block as defined below. - private_
link_ Sparkconfiguration Cluster Private Link Configuration Args - A
private_link_configuration
block as defined below. - security_
profile SparkCluster Security Profile Args - A
security_profile
block as defined below. Changing this forces a new resource to be created. - storage_
account_ Sparkgen2 Cluster Storage Account Gen2Args - A
storage_account_gen2
block as defined below. - storage_
accounts Sequence[SparkCluster Storage Account Args] - One or more
storage_account
block as defined below. - Mapping[str, str]
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tls_
min_ strversion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster
Version String - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component
Version Property Map - A
component_version
block as defined below. - gateway Property Map
- A
gateway
block as defined below. - resource
Group StringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles Property Map
- A
roles
block as defined below. - tier String
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - compute
Isolation Property Map - A
compute_isolation
block as defined below. - disk
Encryptions List<Property Map> - One or more
disk_encryption
block as defined below. - encryption
In BooleanTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension Property Map
- An
extension
block as defined below. - location String
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores Property Map
- A
metastores
block as defined below. - monitor Property Map
- A
monitor
block as defined below. - name String
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network Property Map
- A
network
block as defined below. - private
Link Property MapConfiguration - A
private_link_configuration
block as defined below. - security
Profile Property Map - A
security_profile
block as defined below. Changing this forces a new resource to be created. - storage
Account Property MapGen2 - A
storage_account_gen2
block as defined below. - storage
Accounts List<Property Map> - One or more
storage_account
block as defined below. - Map<String>
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tls
Min StringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
Outputs
All input properties are implicitly available as output properties. Additionally, the SparkCluster resource produces the following output properties:
- Https
Endpoint string - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- Id string
- The provider-assigned unique ID for this managed resource.
- Ssh
Endpoint string - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- Https
Endpoint string - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- Id string
- The provider-assigned unique ID for this managed resource.
- Ssh
Endpoint string - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- https
Endpoint String - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- id String
- The provider-assigned unique ID for this managed resource.
- ssh
Endpoint String - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- https
Endpoint string - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- id string
- The provider-assigned unique ID for this managed resource.
- ssh
Endpoint string - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- https_
endpoint str - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- id str
- The provider-assigned unique ID for this managed resource.
- ssh_
endpoint str - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- https
Endpoint String - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- id String
- The provider-assigned unique ID for this managed resource.
- ssh
Endpoint String - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
Look up Existing SparkCluster Resource
Get an existing SparkCluster resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SparkClusterState, opts?: CustomResourceOptions): SparkCluster
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
cluster_version: Optional[str] = None,
component_version: Optional[SparkClusterComponentVersionArgs] = None,
compute_isolation: Optional[SparkClusterComputeIsolationArgs] = None,
disk_encryptions: Optional[Sequence[SparkClusterDiskEncryptionArgs]] = None,
encryption_in_transit_enabled: Optional[bool] = None,
extension: Optional[SparkClusterExtensionArgs] = None,
gateway: Optional[SparkClusterGatewayArgs] = None,
https_endpoint: Optional[str] = None,
location: Optional[str] = None,
metastores: Optional[SparkClusterMetastoresArgs] = None,
monitor: Optional[SparkClusterMonitorArgs] = None,
name: Optional[str] = None,
network: Optional[SparkClusterNetworkArgs] = None,
private_link_configuration: Optional[SparkClusterPrivateLinkConfigurationArgs] = None,
resource_group_name: Optional[str] = None,
roles: Optional[SparkClusterRolesArgs] = None,
security_profile: Optional[SparkClusterSecurityProfileArgs] = None,
ssh_endpoint: Optional[str] = None,
storage_account_gen2: Optional[SparkClusterStorageAccountGen2Args] = None,
storage_accounts: Optional[Sequence[SparkClusterStorageAccountArgs]] = None,
tags: Optional[Mapping[str, str]] = None,
tier: Optional[str] = None,
tls_min_version: Optional[str] = None) -> SparkCluster
func GetSparkCluster(ctx *Context, name string, id IDInput, state *SparkClusterState, opts ...ResourceOption) (*SparkCluster, error)
public static SparkCluster Get(string name, Input<string> id, SparkClusterState? state, CustomResourceOptions? opts = null)
public static SparkCluster get(String name, Output<String> id, SparkClusterState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Cluster
Version string - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- Component
Version SparkCluster Component Version - A
component_version
block as defined below. - Compute
Isolation SparkCluster Compute Isolation - A
compute_isolation
block as defined below. - Disk
Encryptions List<SparkCluster Disk Encryption> - One or more
disk_encryption
block as defined below. - Encryption
In boolTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- Extension
Spark
Cluster Extension - An
extension
block as defined below. - Gateway
Spark
Cluster Gateway - A
gateway
block as defined below. - Https
Endpoint string - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- Location string
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Metastores
Spark
Cluster Metastores - A
metastores
block as defined below. - Monitor
Spark
Cluster Monitor - A
monitor
block as defined below. - Name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Network
Spark
Cluster Network - A
network
block as defined below. - Private
Link SparkConfiguration Cluster Private Link Configuration - A
private_link_configuration
block as defined below. - Resource
Group stringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Roles
Spark
Cluster Roles - A
roles
block as defined below. - Security
Profile SparkCluster Security Profile - A
security_profile
block as defined below. Changing this forces a new resource to be created. - Ssh
Endpoint string - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- Storage
Account SparkGen2 Cluster Storage Account Gen2 - A
storage_account_gen2
block as defined below. - Storage
Accounts List<SparkCluster Storage Account> - One or more
storage_account
block as defined below. - Dictionary<string, string>
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- Tier string
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - Tls
Min stringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- Cluster
Version string - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- Component
Version SparkCluster Component Version Args - A
component_version
block as defined below. - Compute
Isolation SparkCluster Compute Isolation Args - A
compute_isolation
block as defined below. - Disk
Encryptions []SparkCluster Disk Encryption Args - One or more
disk_encryption
block as defined below. - Encryption
In boolTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- Extension
Spark
Cluster Extension Args - An
extension
block as defined below. - Gateway
Spark
Cluster Gateway Args - A
gateway
block as defined below. - Https
Endpoint string - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- Location string
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Metastores
Spark
Cluster Metastores Args - A
metastores
block as defined below. - Monitor
Spark
Cluster Monitor Args - A
monitor
block as defined below. - Name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Network
Spark
Cluster Network Args - A
network
block as defined below. - Private
Link SparkConfiguration Cluster Private Link Configuration Args - A
private_link_configuration
block as defined below. - Resource
Group stringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- Roles
Spark
Cluster Roles Args - A
roles
block as defined below. - Security
Profile SparkCluster Security Profile Args - A
security_profile
block as defined below. Changing this forces a new resource to be created. - Ssh
Endpoint string - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- Storage
Account SparkGen2 Cluster Storage Account Gen2Args - A
storage_account_gen2
block as defined below. - Storage
Accounts []SparkCluster Storage Account Args - One or more
storage_account
block as defined below. - map[string]string
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- Tier string
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - Tls
Min stringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster
Version String - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component
Version SparkCluster Component Version - A
component_version
block as defined below. - compute
Isolation SparkCluster Compute Isolation - A
compute_isolation
block as defined below. - disk
Encryptions List<SparkCluster Disk Encryption> - One or more
disk_encryption
block as defined below. - encryption
In BooleanTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension
Spark
Cluster Extension - An
extension
block as defined below. - gateway
Spark
Cluster Gateway - A
gateway
block as defined below. - https
Endpoint String - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- location String
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores
Spark
Cluster Metastores - A
metastores
block as defined below. - monitor
Spark
Cluster Monitor - A
monitor
block as defined below. - name String
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network
Spark
Cluster Network - A
network
block as defined below. - private
Link SparkConfiguration Cluster Private Link Configuration - A
private_link_configuration
block as defined below. - resource
Group StringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles
Spark
Cluster Roles - A
roles
block as defined below. - security
Profile SparkCluster Security Profile - A
security_profile
block as defined below. Changing this forces a new resource to be created. - ssh
Endpoint String - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- storage
Account SparkGen2 Cluster Storage Account Gen2 - A
storage_account_gen2
block as defined below. - storage
Accounts List<SparkCluster Storage Account> - One or more
storage_account
block as defined below. - Map<String,String>
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tier String
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - tls
Min StringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster
Version string - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component
Version SparkCluster Component Version - A
component_version
block as defined below. - compute
Isolation SparkCluster Compute Isolation - A
compute_isolation
block as defined below. - disk
Encryptions SparkCluster Disk Encryption[] - One or more
disk_encryption
block as defined below. - encryption
In booleanTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension
Spark
Cluster Extension - An
extension
block as defined below. - gateway
Spark
Cluster Gateway - A
gateway
block as defined below. - https
Endpoint string - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- location string
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores
Spark
Cluster Metastores - A
metastores
block as defined below. - monitor
Spark
Cluster Monitor - A
monitor
block as defined below. - name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network
Spark
Cluster Network - A
network
block as defined below. - private
Link SparkConfiguration Cluster Private Link Configuration - A
private_link_configuration
block as defined below. - resource
Group stringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles
Spark
Cluster Roles - A
roles
block as defined below. - security
Profile SparkCluster Security Profile - A
security_profile
block as defined below. Changing this forces a new resource to be created. - ssh
Endpoint string - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- storage
Account SparkGen2 Cluster Storage Account Gen2 - A
storage_account_gen2
block as defined below. - storage
Accounts SparkCluster Storage Account[] - One or more
storage_account
block as defined below. - {[key: string]: string}
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tier string
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - tls
Min stringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster_
version str - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component_
version SparkCluster Component Version Args - A
component_version
block as defined below. - compute_
isolation SparkCluster Compute Isolation Args - A
compute_isolation
block as defined below. - disk_
encryptions Sequence[SparkCluster Disk Encryption Args] - One or more
disk_encryption
block as defined below. - encryption_
in_ booltransit_ enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension
Spark
Cluster Extension Args - An
extension
block as defined below. - gateway
Spark
Cluster Gateway Args - A
gateway
block as defined below. - https_
endpoint str - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- location str
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores
Spark
Cluster Metastores Args - A
metastores
block as defined below. - monitor
Spark
Cluster Monitor Args - A
monitor
block as defined below. - name str
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network
Spark
Cluster Network Args - A
network
block as defined below. - private_
link_ Sparkconfiguration Cluster Private Link Configuration Args - A
private_link_configuration
block as defined below. - resource_
group_ strname - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles
Spark
Cluster Roles Args - A
roles
block as defined below. - security_
profile SparkCluster Security Profile Args - A
security_profile
block as defined below. Changing this forces a new resource to be created. - ssh_
endpoint str - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- storage_
account_ Sparkgen2 Cluster Storage Account Gen2Args - A
storage_account_gen2
block as defined below. - storage_
accounts Sequence[SparkCluster Storage Account Args] - One or more
storage_account
block as defined below. - Mapping[str, str]
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tier str
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - tls_
min_ strversion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
- cluster
Version String - Specifies the Version of HDInsights which should be used for this Cluster. Changing this forces a new resource to be created.
- component
Version Property Map - A
component_version
block as defined below. - compute
Isolation Property Map - A
compute_isolation
block as defined below. - disk
Encryptions List<Property Map> - One or more
disk_encryption
block as defined below. - encryption
In BooleanTransit Enabled - Whether encryption in transit is enabled for this Cluster. Changing this forces a new resource to be created.
- extension Property Map
- An
extension
block as defined below. - gateway Property Map
- A
gateway
block as defined below. - https
Endpoint String - The HTTPS Connectivity Endpoint for this HDInsight Spark Cluster.
- location String
- Specifies the Azure Region which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- metastores Property Map
- A
metastores
block as defined below. - monitor Property Map
- A
monitor
block as defined below. - name String
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- network Property Map
- A
network
block as defined below. - private
Link Property MapConfiguration - A
private_link_configuration
block as defined below. - resource
Group StringName - Specifies the name of the Resource Group in which this HDInsight Spark Cluster should exist. Changing this forces a new resource to be created.
- roles Property Map
- A
roles
block as defined below. - security
Profile Property Map - A
security_profile
block as defined below. Changing this forces a new resource to be created. - ssh
Endpoint String - The SSH Connectivity Endpoint for this HDInsight Spark Cluster.
- storage
Account Property MapGen2 - A
storage_account_gen2
block as defined below. - storage
Accounts List<Property Map> - One or more
storage_account
block as defined below. - Map<String>
- A map of Tags which should be assigned to this HDInsight Spark Cluster.
- tier String
- Specifies the Tier which should be used for this HDInsight Spark Cluster. Possible values are
Standard
orPremium
. Changing this forces a new resource to be created. - tls
Min StringVersion The minimal supported TLS version. Possible values are 1.0, 1.1 or 1.2. Changing this forces a new resource to be created.
NOTE: Starting on June 30, 2020, Azure HDInsight will enforce TLS 1.2 or later versions for all HTTPS connections. For more information, see Azure HDInsight TLS 1.2 Enforcement.
Supporting Types
SparkClusterComponentVersion, SparkClusterComponentVersionArgs
- Spark string
- The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Spark string
- The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- spark String
- The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- spark string
- The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- spark str
- The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- spark String
- The version of Spark which should be used for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
SparkClusterComputeIsolation, SparkClusterComputeIsolationArgs
- Compute
Isolation boolEnabled - This field indicates whether enable compute isolation or not. Possible values are
true
orfalse
. - Host
Sku string - The name of the host SKU.
- Compute
Isolation boolEnabled - This field indicates whether enable compute isolation or not. Possible values are
true
orfalse
. - Host
Sku string - The name of the host SKU.
- compute
Isolation BooleanEnabled - This field indicates whether enable compute isolation or not. Possible values are
true
orfalse
. - host
Sku String - The name of the host SKU.
- compute
Isolation booleanEnabled - This field indicates whether enable compute isolation or not. Possible values are
true
orfalse
. - host
Sku string - The name of the host SKU.
- compute_
isolation_ boolenabled - This field indicates whether enable compute isolation or not. Possible values are
true
orfalse
. - host_
sku str - The name of the host SKU.
- compute
Isolation BooleanEnabled - This field indicates whether enable compute isolation or not. Possible values are
true
orfalse
. - host
Sku String - The name of the host SKU.
SparkClusterDiskEncryption, SparkClusterDiskEncryptionArgs
- Encryption
Algorithm string - This is an algorithm identifier for encryption. Possible values are
RSA1_5
,RSA-OAEP
,RSA-OAEP-256
. - Encryption
At boolHost Enabled - This is indicator to show whether resource disk encryption is enabled.
- Key
Vault stringKey Id - The ID of the key vault key.
- Key
Vault stringManaged Identity Id - This is the resource ID of Managed Identity used to access the key vault.
- Encryption
Algorithm string - This is an algorithm identifier for encryption. Possible values are
RSA1_5
,RSA-OAEP
,RSA-OAEP-256
. - Encryption
At boolHost Enabled - This is indicator to show whether resource disk encryption is enabled.
- Key
Vault stringKey Id - The ID of the key vault key.
- Key
Vault stringManaged Identity Id - This is the resource ID of Managed Identity used to access the key vault.
- encryption
Algorithm String - This is an algorithm identifier for encryption. Possible values are
RSA1_5
,RSA-OAEP
,RSA-OAEP-256
. - encryption
At BooleanHost Enabled - This is indicator to show whether resource disk encryption is enabled.
- key
Vault StringKey Id - The ID of the key vault key.
- key
Vault StringManaged Identity Id - This is the resource ID of Managed Identity used to access the key vault.
- encryption
Algorithm string - This is an algorithm identifier for encryption. Possible values are
RSA1_5
,RSA-OAEP
,RSA-OAEP-256
. - encryption
At booleanHost Enabled - This is indicator to show whether resource disk encryption is enabled.
- key
Vault stringKey Id - The ID of the key vault key.
- key
Vault stringManaged Identity Id - This is the resource ID of Managed Identity used to access the key vault.
- encryption_
algorithm str - This is an algorithm identifier for encryption. Possible values are
RSA1_5
,RSA-OAEP
,RSA-OAEP-256
. - encryption_
at_ boolhost_ enabled - This is indicator to show whether resource disk encryption is enabled.
- key_
vault_ strkey_ id - The ID of the key vault key.
- key_
vault_ strmanaged_ identity_ id - This is the resource ID of Managed Identity used to access the key vault.
- encryption
Algorithm String - This is an algorithm identifier for encryption. Possible values are
RSA1_5
,RSA-OAEP
,RSA-OAEP-256
. - encryption
At BooleanHost Enabled - This is indicator to show whether resource disk encryption is enabled.
- key
Vault StringKey Id - The ID of the key vault key.
- key
Vault StringManaged Identity Id - This is the resource ID of Managed Identity used to access the key vault.
SparkClusterExtension, SparkClusterExtensionArgs
- Log
Analytics stringWorkspace Id - The workspace ID of the log analytics extension.
- Primary
Key string - The workspace key of the log analytics extension.
- Log
Analytics stringWorkspace Id - The workspace ID of the log analytics extension.
- Primary
Key string - The workspace key of the log analytics extension.
- log
Analytics StringWorkspace Id - The workspace ID of the log analytics extension.
- primary
Key String - The workspace key of the log analytics extension.
- log
Analytics stringWorkspace Id - The workspace ID of the log analytics extension.
- primary
Key string - The workspace key of the log analytics extension.
- log_
analytics_ strworkspace_ id - The workspace ID of the log analytics extension.
- primary_
key str - The workspace key of the log analytics extension.
- log
Analytics StringWorkspace Id - The workspace ID of the log analytics extension.
- primary
Key String - The workspace key of the log analytics extension.
SparkClusterGateway, SparkClusterGatewayArgs
SparkClusterMetastores, SparkClusterMetastoresArgs
- Ambari
Spark
Cluster Metastores Ambari - An
ambari
block as defined below. - Hive
Spark
Cluster Metastores Hive - A
hive
block as defined below. - Oozie
Spark
Cluster Metastores Oozie - An
oozie
block as defined below.
- Ambari
Spark
Cluster Metastores Ambari - An
ambari
block as defined below. - Hive
Spark
Cluster Metastores Hive - A
hive
block as defined below. - Oozie
Spark
Cluster Metastores Oozie - An
oozie
block as defined below.
- ambari
Spark
Cluster Metastores Ambari - An
ambari
block as defined below. - hive
Spark
Cluster Metastores Hive - A
hive
block as defined below. - oozie
Spark
Cluster Metastores Oozie - An
oozie
block as defined below.
- ambari
Spark
Cluster Metastores Ambari - An
ambari
block as defined below. - hive
Spark
Cluster Metastores Hive - A
hive
block as defined below. - oozie
Spark
Cluster Metastores Oozie - An
oozie
block as defined below.
- ambari
Spark
Cluster Metastores Ambari - An
ambari
block as defined below. - hive
Spark
Cluster Metastores Hive - A
hive
block as defined below. - oozie
Spark
Cluster Metastores Oozie - An
oozie
block as defined below.
- ambari Property Map
- An
ambari
block as defined below. - hive Property Map
- A
hive
block as defined below. - oozie Property Map
- An
oozie
block as defined below.
SparkClusterMetastoresAmbari, SparkClusterMetastoresAmbariArgs
- Database
Name string - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- Password string
- The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- Server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
- Username string
- The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- Database
Name string - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- Password string
- The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- Server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
- Username string
- The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name String - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password String
- The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server String
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
- username String
- The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name string - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password string
- The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
- username string
- The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database_
name str - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password str
- The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server str
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
- username str
- The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name String - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password String
- The external Ambari metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server String
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Ambari metastore. Changing this forces a new resource to be created.
- username String
- The external Ambari metastore's existing SQL server admin username. Changing this forces a new resource to be created.
SparkClusterMetastoresHive, SparkClusterMetastoresHiveArgs
- Database
Name string - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- Password string
- The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- Server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
- Username string
- The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- Database
Name string - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- Password string
- The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- Server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
- Username string
- The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name String - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password String
- The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server String
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
- username String
- The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name string - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password string
- The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
- username string
- The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database_
name str - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password str
- The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server str
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
- username str
- The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name String - The external Hive metastore's existing SQL database. Changing this forces a new resource to be created.
- password String
- The external Hive metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server String
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Hive metastore. Changing this forces a new resource to be created.
- username String
- The external Hive metastore's existing SQL server admin username. Changing this forces a new resource to be created.
SparkClusterMetastoresOozie, SparkClusterMetastoresOozieArgs
- Database
Name string - The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
- Password string
- The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- Server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
- Username string
- The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- Database
Name string - The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
- Password string
- The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- Server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
- Username string
- The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name String - The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
- password String
- The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server String
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
- username String
- The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name string - The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
- password string
- The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server string
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
- username string
- The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database_
name str - The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
- password str
- The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server str
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
- username str
- The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
- database
Name String - The external Oozie metastore's existing SQL database. Changing this forces a new resource to be created.
- password String
- The external Oozie metastore's existing SQL server admin password. Changing this forces a new resource to be created.
- server String
- The fully-qualified domain name (FQDN) of the SQL server to use for the external Oozie metastore. Changing this forces a new resource to be created.
- username String
- The external Oozie metastore's existing SQL server admin username. Changing this forces a new resource to be created.
SparkClusterMonitor, SparkClusterMonitorArgs
- Log
Analytics stringWorkspace Id - The Operations Management Suite (OMS) workspace ID.
- Primary
Key string - The Operations Management Suite (OMS) workspace key.
- Log
Analytics stringWorkspace Id - The Operations Management Suite (OMS) workspace ID.
- Primary
Key string - The Operations Management Suite (OMS) workspace key.
- log
Analytics StringWorkspace Id - The Operations Management Suite (OMS) workspace ID.
- primary
Key String - The Operations Management Suite (OMS) workspace key.
- log
Analytics stringWorkspace Id - The Operations Management Suite (OMS) workspace ID.
- primary
Key string - The Operations Management Suite (OMS) workspace key.
- log_
analytics_ strworkspace_ id - The Operations Management Suite (OMS) workspace ID.
- primary_
key str - The Operations Management Suite (OMS) workspace key.
- log
Analytics StringWorkspace Id - The Operations Management Suite (OMS) workspace ID.
- primary
Key String - The Operations Management Suite (OMS) workspace key.
SparkClusterNetwork, SparkClusterNetworkArgs
- Connection
Direction string The direction of the resource provider connection. Possible values include
Inbound
orOutbound
. Defaults toInbound
. Changing this forces a new resource to be created.NOTE: To enabled the private link the
connection_direction
must be set toOutbound
.- Private
Link boolEnabled - Is the private link enabled? Possible values include
true
orfalse
. Defaults tofalse
. Changing this forces a new resource to be created.
- Connection
Direction string The direction of the resource provider connection. Possible values include
Inbound
orOutbound
. Defaults toInbound
. Changing this forces a new resource to be created.NOTE: To enabled the private link the
connection_direction
must be set toOutbound
.- Private
Link boolEnabled - Is the private link enabled? Possible values include
true
orfalse
. Defaults tofalse
. Changing this forces a new resource to be created.
- connection
Direction String The direction of the resource provider connection. Possible values include
Inbound
orOutbound
. Defaults toInbound
. Changing this forces a new resource to be created.NOTE: To enabled the private link the
connection_direction
must be set toOutbound
.- private
Link BooleanEnabled - Is the private link enabled? Possible values include
true
orfalse
. Defaults tofalse
. Changing this forces a new resource to be created.
- connection
Direction string The direction of the resource provider connection. Possible values include
Inbound
orOutbound
. Defaults toInbound
. Changing this forces a new resource to be created.NOTE: To enabled the private link the
connection_direction
must be set toOutbound
.- private
Link booleanEnabled - Is the private link enabled? Possible values include
true
orfalse
. Defaults tofalse
. Changing this forces a new resource to be created.
- connection_
direction str The direction of the resource provider connection. Possible values include
Inbound
orOutbound
. Defaults toInbound
. Changing this forces a new resource to be created.NOTE: To enabled the private link the
connection_direction
must be set toOutbound
.- private_
link_ boolenabled - Is the private link enabled? Possible values include
true
orfalse
. Defaults tofalse
. Changing this forces a new resource to be created.
- connection
Direction String The direction of the resource provider connection. Possible values include
Inbound
orOutbound
. Defaults toInbound
. Changing this forces a new resource to be created.NOTE: To enabled the private link the
connection_direction
must be set toOutbound
.- private
Link BooleanEnabled - Is the private link enabled? Possible values include
true
orfalse
. Defaults tofalse
. Changing this forces a new resource to be created.
SparkClusterPrivateLinkConfiguration, SparkClusterPrivateLinkConfigurationArgs
- Group
Id string - The ID of the private link service group.
- Ip
Configuration SparkCluster Private Link Configuration Ip Configuration - Name string
- The name of the private link configuration.
- Group
Id string - The ID of the private link service group.
- Ip
Configuration SparkCluster Private Link Configuration Ip Configuration - Name string
- The name of the private link configuration.
- group
Id String - The ID of the private link service group.
- ip
Configuration SparkCluster Private Link Configuration Ip Configuration - name String
- The name of the private link configuration.
- group
Id string - The ID of the private link service group.
- ip
Configuration SparkCluster Private Link Configuration Ip Configuration - name string
- The name of the private link configuration.
- group_
id str - The ID of the private link service group.
- ip_
configuration SparkCluster Private Link Configuration Ip Configuration - name str
- The name of the private link configuration.
- group
Id String - The ID of the private link service group.
- ip
Configuration Property Map - name String
- The name of the private link configuration.
SparkClusterPrivateLinkConfigurationIpConfiguration, SparkClusterPrivateLinkConfigurationIpConfigurationArgs
- Name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Primary bool
- Indicates whether this IP configuration is primary.
- Private
Ip stringAddress - The private IP address of the IP configuration.
- Private
Ip stringAllocation Method - The private IP allocation method. The only possible value now is
Dynamic
. - Subnet
Id string
- Name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- Primary bool
- Indicates whether this IP configuration is primary.
- Private
Ip stringAddress - The private IP address of the IP configuration.
- Private
Ip stringAllocation Method - The private IP allocation method. The only possible value now is
Dynamic
. - Subnet
Id string
- name String
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- primary Boolean
- Indicates whether this IP configuration is primary.
- private
Ip StringAddress - The private IP address of the IP configuration.
- private
Ip StringAllocation Method - The private IP allocation method. The only possible value now is
Dynamic
. - subnet
Id String
- name string
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- primary boolean
- Indicates whether this IP configuration is primary.
- private
Ip stringAddress - The private IP address of the IP configuration.
- private
Ip stringAllocation Method - The private IP allocation method. The only possible value now is
Dynamic
. - subnet
Id string
- name str
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- primary bool
- Indicates whether this IP configuration is primary.
- private_
ip_ straddress - The private IP address of the IP configuration.
- private_
ip_ strallocation_ method - The private IP allocation method. The only possible value now is
Dynamic
. - subnet_
id str
- name String
- Specifies the name for this HDInsight Spark Cluster. Changing this forces a new resource to be created.
- primary Boolean
- Indicates whether this IP configuration is primary.
- private
Ip StringAddress - The private IP address of the IP configuration.
- private
Ip StringAllocation Method - The private IP allocation method. The only possible value now is
Dynamic
. - subnet
Id String
SparkClusterRoles, SparkClusterRolesArgs
- Head
Node SparkCluster Roles Head Node - A
head_node
block as defined above. - Worker
Node SparkCluster Roles Worker Node - A
worker_node
block as defined below. - Zookeeper
Node SparkCluster Roles Zookeeper Node - A
zookeeper_node
block as defined below.
- Head
Node SparkCluster Roles Head Node - A
head_node
block as defined above. - Worker
Node SparkCluster Roles Worker Node - A
worker_node
block as defined below. - Zookeeper
Node SparkCluster Roles Zookeeper Node - A
zookeeper_node
block as defined below.
- head
Node SparkCluster Roles Head Node - A
head_node
block as defined above. - worker
Node SparkCluster Roles Worker Node - A
worker_node
block as defined below. - zookeeper
Node SparkCluster Roles Zookeeper Node - A
zookeeper_node
block as defined below.
- head
Node SparkCluster Roles Head Node - A
head_node
block as defined above. - worker
Node SparkCluster Roles Worker Node - A
worker_node
block as defined below. - zookeeper
Node SparkCluster Roles Zookeeper Node - A
zookeeper_node
block as defined below.
- head_
node SparkCluster Roles Head Node - A
head_node
block as defined above. - worker_
node SparkCluster Roles Worker Node - A
worker_node
block as defined below. - zookeeper_
node SparkCluster Roles Zookeeper Node - A
zookeeper_node
block as defined below.
- head
Node Property Map - A
head_node
block as defined above. - worker
Node Property Map - A
worker_node
block as defined below. - zookeeper
Node Property Map - A
zookeeper_node
block as defined below.
SparkClusterRolesHeadNode, SparkClusterRolesHeadNodeArgs
- Username string
- The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
- Vm
Size string - The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - Password string
The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- Script
Actions List<SparkCluster Roles Head Node Script Action> - The script action which will run on the cluster. One or more
script_actions
blocks as defined below. - Ssh
Keys List<string> A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- Subnet
Id string - The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- Virtual
Network stringId - The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- Username string
- The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
- Vm
Size string - The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - Password string
The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- Script
Actions []SparkCluster Roles Head Node Script Action - The script action which will run on the cluster. One or more
script_actions
blocks as defined below. - Ssh
Keys []string A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- Subnet
Id string - The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- Virtual
Network stringId - The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- username String
- The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
- vm
Size String - The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password String
The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions List<SparkCluster Roles Head Node Script Action> - The script action which will run on the cluster. One or more
script_actions
blocks as defined below. - ssh
Keys List<String> A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id String - The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network StringId - The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- username string
- The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
- vm
Size string - The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password string
The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions SparkCluster Roles Head Node Script Action[] - The script action which will run on the cluster. One or more
script_actions
blocks as defined below. - ssh
Keys string[] A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id string - The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network stringId - The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- username str
- The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
- vm_
size str - The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password str
The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script_
actions Sequence[SparkCluster Roles Head Node Script Action] - The script action which will run on the cluster. One or more
script_actions
blocks as defined below. - ssh_
keys Sequence[str] A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet_
id str - The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual_
network_ strid - The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- username String
- The Username of the local administrator for the Head Nodes. Changing this forces a new resource to be created.
- vm
Size String - The Size of the Virtual Machine which should be used as the Head Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password String
The Password associated with the local administrator for the Head Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions List<Property Map> - The script action which will run on the cluster. One or more
script_actions
blocks as defined below. - ssh
Keys List<String> A list of SSH Keys which should be used for the local administrator on the Head Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id String - The ID of the Subnet within the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network StringId - The ID of the Virtual Network where the Head Nodes should be provisioned within. Changing this forces a new resource to be created.
SparkClusterRolesHeadNodeScriptAction, SparkClusterRolesHeadNodeScriptActionArgs
- Name string
- The name of the script action.
- Uri string
- The URI to the script.
- Parameters string
- The parameters for the script provided.
- Name string
- The name of the script action.
- Uri string
- The URI to the script.
- Parameters string
- The parameters for the script provided.
- name String
- The name of the script action.
- uri String
- The URI to the script.
- parameters String
- The parameters for the script provided.
- name string
- The name of the script action.
- uri string
- The URI to the script.
- parameters string
- The parameters for the script provided.
- name str
- The name of the script action.
- uri str
- The URI to the script.
- parameters str
- The parameters for the script provided.
- name String
- The name of the script action.
- uri String
- The URI to the script.
- parameters String
- The parameters for the script provided.
SparkClusterRolesWorkerNode, SparkClusterRolesWorkerNodeArgs
- Target
Instance intCount - The number of instances which should be run for the Worker Nodes.
- Username string
- The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
- Vm
Size string - The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - Autoscale
Spark
Cluster Roles Worker Node Autoscale - A
autoscale
block as defined below. - Password string
The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- Script
Actions List<SparkCluster Roles Worker Node Script Action> - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - Ssh
Keys List<string> A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- Subnet
Id string - The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- Virtual
Network stringId - The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- Target
Instance intCount - The number of instances which should be run for the Worker Nodes.
- Username string
- The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
- Vm
Size string - The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - Autoscale
Spark
Cluster Roles Worker Node Autoscale - A
autoscale
block as defined below. - Password string
The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- Script
Actions []SparkCluster Roles Worker Node Script Action - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - Ssh
Keys []string A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- Subnet
Id string - The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- Virtual
Network stringId - The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- target
Instance IntegerCount - The number of instances which should be run for the Worker Nodes.
- username String
- The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
- vm
Size String - The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - autoscale
Spark
Cluster Roles Worker Node Autoscale - A
autoscale
block as defined below. - password String
The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions List<SparkCluster Roles Worker Node Script Action> - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh
Keys List<String> A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id String - The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network StringId - The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- target
Instance numberCount - The number of instances which should be run for the Worker Nodes.
- username string
- The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
- vm
Size string - The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - autoscale
Spark
Cluster Roles Worker Node Autoscale - A
autoscale
block as defined below. - password string
The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions SparkCluster Roles Worker Node Script Action[] - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh
Keys string[] A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id string - The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network stringId - The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- target_
instance_ intcount - The number of instances which should be run for the Worker Nodes.
- username str
- The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
- vm_
size str - The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - autoscale
Spark
Cluster Roles Worker Node Autoscale - A
autoscale
block as defined below. - password str
The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script_
actions Sequence[SparkCluster Roles Worker Node Script Action] - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh_
keys Sequence[str] A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet_
id str - The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual_
network_ strid - The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- target
Instance NumberCount - The number of instances which should be run for the Worker Nodes.
- username String
- The Username of the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
- vm
Size String - The Size of the Virtual Machine which should be used as the Worker Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - autoscale Property Map
- A
autoscale
block as defined below. - password String
The Password associated with the local administrator for the Worker Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions List<Property Map> - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh
Keys List<String> A list of SSH Keys which should be used for the local administrator on the Worker Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id String - The ID of the Subnet within the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network StringId - The ID of the Virtual Network where the Worker Nodes should be provisioned within. Changing this forces a new resource to be created.
SparkClusterRolesWorkerNodeAutoscale, SparkClusterRolesWorkerNodeAutoscaleArgs
- Capacity
Spark
Cluster Roles Worker Node Autoscale Capacity - A
capacity
block as defined below. - Recurrence
Spark
Cluster Roles Worker Node Autoscale Recurrence A
recurrence
block as defined below.NOTE: Either a
capacity
orrecurrence
block must be specified - but not both.
- Capacity
Spark
Cluster Roles Worker Node Autoscale Capacity - A
capacity
block as defined below. - Recurrence
Spark
Cluster Roles Worker Node Autoscale Recurrence A
recurrence
block as defined below.NOTE: Either a
capacity
orrecurrence
block must be specified - but not both.
- capacity
Spark
Cluster Roles Worker Node Autoscale Capacity - A
capacity
block as defined below. - recurrence
Spark
Cluster Roles Worker Node Autoscale Recurrence A
recurrence
block as defined below.NOTE: Either a
capacity
orrecurrence
block must be specified - but not both.
- capacity
Spark
Cluster Roles Worker Node Autoscale Capacity - A
capacity
block as defined below. - recurrence
Spark
Cluster Roles Worker Node Autoscale Recurrence A
recurrence
block as defined below.NOTE: Either a
capacity
orrecurrence
block must be specified - but not both.
- capacity
Spark
Cluster Roles Worker Node Autoscale Capacity - A
capacity
block as defined below. - recurrence
Spark
Cluster Roles Worker Node Autoscale Recurrence A
recurrence
block as defined below.NOTE: Either a
capacity
orrecurrence
block must be specified - but not both.
- capacity Property Map
- A
capacity
block as defined below. - recurrence Property Map
A
recurrence
block as defined below.NOTE: Either a
capacity
orrecurrence
block must be specified - but not both.
SparkClusterRolesWorkerNodeAutoscaleCapacity, SparkClusterRolesWorkerNodeAutoscaleCapacityArgs
- Max
Instance intCount - The maximum number of worker nodes to autoscale to based on the cluster's activity.
- Min
Instance intCount - The minimum number of worker nodes to autoscale to based on the cluster's activity.
- Max
Instance intCount - The maximum number of worker nodes to autoscale to based on the cluster's activity.
- Min
Instance intCount - The minimum number of worker nodes to autoscale to based on the cluster's activity.
- max
Instance IntegerCount - The maximum number of worker nodes to autoscale to based on the cluster's activity.
- min
Instance IntegerCount - The minimum number of worker nodes to autoscale to based on the cluster's activity.
- max
Instance numberCount - The maximum number of worker nodes to autoscale to based on the cluster's activity.
- min
Instance numberCount - The minimum number of worker nodes to autoscale to based on the cluster's activity.
- max_
instance_ intcount - The maximum number of worker nodes to autoscale to based on the cluster's activity.
- min_
instance_ intcount - The minimum number of worker nodes to autoscale to based on the cluster's activity.
- max
Instance NumberCount - The maximum number of worker nodes to autoscale to based on the cluster's activity.
- min
Instance NumberCount - The minimum number of worker nodes to autoscale to based on the cluster's activity.
SparkClusterRolesWorkerNodeAutoscaleRecurrence, SparkClusterRolesWorkerNodeAutoscaleRecurrenceArgs
- Schedules
List<Spark
Cluster Roles Worker Node Autoscale Recurrence Schedule> - A list of
schedule
blocks as defined below. - Timezone string
- The time zone for the autoscale schedule times.
- Schedules
[]Spark
Cluster Roles Worker Node Autoscale Recurrence Schedule - A list of
schedule
blocks as defined below. - Timezone string
- The time zone for the autoscale schedule times.
- schedules
List<Spark
Cluster Roles Worker Node Autoscale Recurrence Schedule> - A list of
schedule
blocks as defined below. - timezone String
- The time zone for the autoscale schedule times.
- schedules
Spark
Cluster Roles Worker Node Autoscale Recurrence Schedule[] - A list of
schedule
blocks as defined below. - timezone string
- The time zone for the autoscale schedule times.
- schedules
Sequence[Spark
Cluster Roles Worker Node Autoscale Recurrence Schedule] - A list of
schedule
blocks as defined below. - timezone str
- The time zone for the autoscale schedule times.
- schedules List<Property Map>
- A list of
schedule
blocks as defined below. - timezone String
- The time zone for the autoscale schedule times.
SparkClusterRolesWorkerNodeAutoscaleRecurrenceSchedule, SparkClusterRolesWorkerNodeAutoscaleRecurrenceScheduleArgs
- Days List<string>
- The days of the week to perform autoscale. Possible values are
Monday
,Tuesday
,Wednesday
,Thursday
,Friday
,Saturday
andSunday
. - Target
Instance intCount - The number of worker nodes to autoscale at the specified time.
- Time string
- The time of day to perform the autoscale in 24hour format.
- Days []string
- The days of the week to perform autoscale. Possible values are
Monday
,Tuesday
,Wednesday
,Thursday
,Friday
,Saturday
andSunday
. - Target
Instance intCount - The number of worker nodes to autoscale at the specified time.
- Time string
- The time of day to perform the autoscale in 24hour format.
- days List<String>
- The days of the week to perform autoscale. Possible values are
Monday
,Tuesday
,Wednesday
,Thursday
,Friday
,Saturday
andSunday
. - target
Instance IntegerCount - The number of worker nodes to autoscale at the specified time.
- time String
- The time of day to perform the autoscale in 24hour format.
- days string[]
- The days of the week to perform autoscale. Possible values are
Monday
,Tuesday
,Wednesday
,Thursday
,Friday
,Saturday
andSunday
. - target
Instance numberCount - The number of worker nodes to autoscale at the specified time.
- time string
- The time of day to perform the autoscale in 24hour format.
- days Sequence[str]
- The days of the week to perform autoscale. Possible values are
Monday
,Tuesday
,Wednesday
,Thursday
,Friday
,Saturday
andSunday
. - target_
instance_ intcount - The number of worker nodes to autoscale at the specified time.
- time str
- The time of day to perform the autoscale in 24hour format.
- days List<String>
- The days of the week to perform autoscale. Possible values are
Monday
,Tuesday
,Wednesday
,Thursday
,Friday
,Saturday
andSunday
. - target
Instance NumberCount - The number of worker nodes to autoscale at the specified time.
- time String
- The time of day to perform the autoscale in 24hour format.
SparkClusterRolesWorkerNodeScriptAction, SparkClusterRolesWorkerNodeScriptActionArgs
- Name string
- The name of the script action.
- Uri string
- The URI to the script.
- Parameters string
- The parameters for the script provided.
- Name string
- The name of the script action.
- Uri string
- The URI to the script.
- Parameters string
- The parameters for the script provided.
- name String
- The name of the script action.
- uri String
- The URI to the script.
- parameters String
- The parameters for the script provided.
- name string
- The name of the script action.
- uri string
- The URI to the script.
- parameters string
- The parameters for the script provided.
- name str
- The name of the script action.
- uri str
- The URI to the script.
- parameters str
- The parameters for the script provided.
- name String
- The name of the script action.
- uri String
- The URI to the script.
- parameters String
- The parameters for the script provided.
SparkClusterRolesZookeeperNode, SparkClusterRolesZookeeperNodeArgs
- Username string
- The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
- Vm
Size string - The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - Password string
The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- Script
Actions List<SparkCluster Roles Zookeeper Node Script Action> - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - Ssh
Keys List<string> A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- Subnet
Id string - The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- Virtual
Network stringId - The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- Username string
- The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
- Vm
Size string - The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - Password string
The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- Script
Actions []SparkCluster Roles Zookeeper Node Script Action - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - Ssh
Keys []string A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- Subnet
Id string - The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- Virtual
Network stringId - The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- username String
- The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
- vm
Size String - The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password String
The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions List<SparkCluster Roles Zookeeper Node Script Action> - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh
Keys List<String> A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id String - The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network StringId - The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- username string
- The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
- vm
Size string - The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password string
The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions SparkCluster Roles Zookeeper Node Script Action[] - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh
Keys string[] A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id string - The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network stringId - The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- username str
- The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
- vm_
size str - The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password str
The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script_
actions Sequence[SparkCluster Roles Zookeeper Node Script Action] - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh_
keys Sequence[str] A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet_
id str - The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual_
network_ strid - The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- username String
- The Username of the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
- vm
Size String - The Size of the Virtual Machine which should be used as the Zookeeper Nodes. Possible values are
ExtraSmall
,Small
,Medium
,Large
,ExtraLarge
,A5
,A6
,A7
,A8
,A9
,A10
,A11
,Standard_A1_V2
,Standard_A2_V2
,Standard_A2m_V2
,Standard_A3
,Standard_A4_V2
,Standard_A4m_V2
,Standard_A8_V2
,Standard_A8m_V2
,Standard_D1
,Standard_D2
,Standard_D3
,Standard_D4
,Standard_D11
,Standard_D12
,Standard_D13
,Standard_D14
,Standard_D1_V2
,Standard_D2_V2
,Standard_D3_V2
,Standard_D4_V2
,Standard_D5_V2
,Standard_D11_V2
,Standard_D12_V2
,Standard_D13_V2
,Standard_D14_V2
,Standard_DS1_V2
,Standard_DS2_V2
,Standard_DS3_V2
,Standard_DS4_V2
,Standard_DS5_V2
,Standard_DS11_V2
,Standard_DS12_V2
,Standard_DS13_V2
,Standard_DS14_V2
,Standard_E2_V3
,Standard_E4_V3
,Standard_E8_V3
,Standard_E16_V3
,Standard_E20_V3
,Standard_E32_V3
,Standard_E64_V3
,Standard_E64i_V3
,Standard_E2s_V3
,Standard_E4s_V3
,Standard_E8s_V3
,Standard_E16s_V3
,Standard_E20s_V3
,Standard_E32s_V3
,Standard_E64s_V3
,Standard_E64is_V3
,Standard_D2a_V4
,Standard_D4a_V4
,Standard_D8a_V4
,Standard_D16a_V4
,Standard_D32a_V4
,Standard_D48a_V4
,Standard_D64a_V4
,Standard_D96a_V4
,Standard_E2a_V4
,Standard_E4a_V4
,Standard_E8a_V4
,Standard_E16a_V4
,Standard_E20a_V4
,Standard_E32a_V4
,Standard_E48a_V4
,Standard_E64a_V4
,Standard_E96a_V4
,Standard_D2ads_V5
,Standard_D4ads_V5
,Standard_D8ads_V5
,Standard_D16ads_V5
,Standard_D32ads_V5
,Standard_D48ads_V5
,Standard_D64ads_V5
,Standard_D96ads_V5
,Standard_E2ads_V5
,Standard_E4ads_V5
,Standard_E8ads_V5
,Standard_E16ads_V5
,Standard_E20ads_V5
,Standard_E32ads_V5
,Standard_E48ads_V5
,Standard_E64ads_V5
,Standard_E96ads_V5
,Standard_G1
,Standard_G2
,Standard_G3
,Standard_G4
,Standard_G5
,Standard_F2s_V2
,Standard_F4s_V2
,Standard_F8s_V2
,Standard_F16s_V2
,Standard_F32s_V2
,Standard_F64s_V2
,Standard_F72s_V2
,Standard_GS1
,Standard_GS2
,Standard_GS3
,Standard_GS4
,Standard_GS5
andStandard_NC24
. Changing this forces a new resource to be created. - password String
The Password associated with the local administrator for the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: If specified, this password must be at least 10 characters in length and must contain at least one digit, one uppercase and one lower case letter, one non-alphanumeric character (except characters ' " ` ).
- script
Actions List<Property Map> - The script action which will run on the cluster. One or more
script_actions
blocks as defined above. - ssh
Keys List<String> A list of SSH Keys which should be used for the local administrator on the Zookeeper Nodes. Changing this forces a new resource to be created.
NOTE: Either a
password
or one or moressh_keys
must be specified - but not both.- subnet
Id String - The ID of the Subnet within the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
- virtual
Network StringId - The ID of the Virtual Network where the Zookeeper Nodes should be provisioned within. Changing this forces a new resource to be created.
SparkClusterRolesZookeeperNodeScriptAction, SparkClusterRolesZookeeperNodeScriptActionArgs
- Name string
- The name of the script action.
- Uri string
- The URI to the script.
- Parameters string
- The parameters for the script provided.
- Name string
- The name of the script action.
- Uri string
- The URI to the script.
- Parameters string
- The parameters for the script provided.
- name String
- The name of the script action.
- uri String
- The URI to the script.
- parameters String
- The parameters for the script provided.
- name string
- The name of the script action.
- uri string
- The URI to the script.
- parameters string
- The parameters for the script provided.
- name str
- The name of the script action.
- uri str
- The URI to the script.
- parameters str
- The parameters for the script provided.
- name String
- The name of the script action.
- uri String
- The URI to the script.
- parameters String
- The parameters for the script provided.
SparkClusterSecurityProfile, SparkClusterSecurityProfileArgs
- Aadds
Resource stringId - The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
- Domain
Name string - The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- Domain
User stringPassword - The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- Domain
Username string - The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- Ldaps
Urls List<string> - A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
- Msi
Resource stringId - The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
- Cluster
Users List<string>Group Dns - A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
- Aadds
Resource stringId - The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
- Domain
Name string - The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- Domain
User stringPassword - The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- Domain
Username string - The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- Ldaps
Urls []string - A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
- Msi
Resource stringId - The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
- Cluster
Users []stringGroup Dns - A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
- aadds
Resource StringId - The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
- domain
Name String - The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain
User StringPassword - The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain
Username String - The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- ldaps
Urls List<String> - A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
- msi
Resource StringId - The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
- cluster
Users List<String>Group Dns - A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
- aadds
Resource stringId - The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
- domain
Name string - The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain
User stringPassword - The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain
Username string - The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- ldaps
Urls string[] - A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
- msi
Resource stringId - The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
- cluster
Users string[]Group Dns - A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
- aadds_
resource_ strid - The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
- domain_
name str - The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain_
user_ strpassword - The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain_
username str - The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- ldaps_
urls Sequence[str] - A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
- msi_
resource_ strid - The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
- cluster_
users_ Sequence[str]group_ dns - A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
- aadds
Resource StringId - The resource ID of the Azure Active Directory Domain Service. Changing this forces a new resource to be created.
- domain
Name String - The name of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain
User StringPassword - The user password of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- domain
Username String - The username of the Azure Active Directory Domain. Changing this forces a new resource to be created.
- ldaps
Urls List<String> - A list of the LDAPS URLs to communicate with the Azure Active Directory. Changing this forces a new resource to be created.
- msi
Resource StringId - The User Assigned Identity for the HDInsight Cluster. Changing this forces a new resource to be created.
- cluster
Users List<String>Group Dns - A list of the distinguished names for the cluster user groups. Changing this forces a new resource to be created.
SparkClusterStorageAccount, SparkClusterStorageAccountArgs
- Is
Default bool Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- Storage
Account stringKey - The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
- Storage
Container stringId The ID of the Storage Container. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- Storage
Resource stringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- Is
Default bool Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- Storage
Account stringKey - The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
- Storage
Container stringId The ID of the Storage Container. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- Storage
Resource stringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- is
Default Boolean Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- storage
Account StringKey - The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
- storage
Container StringId The ID of the Storage Container. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage
Resource StringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- is
Default boolean Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- storage
Account stringKey - The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
- storage
Container stringId The ID of the Storage Container. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage
Resource stringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- is_
default bool Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- storage_
account_ strkey - The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
- storage_
container_ strid The ID of the Storage Container. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage_
resource_ strid - The ID of the Storage Account. Changing this forces a new resource to be created.
- is
Default Boolean Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- storage
Account StringKey - The Access Key which should be used to connect to the Storage Account. Changing this forces a new resource to be created.
- storage
Container StringId The ID of the Storage Container. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage
Resource StringId - The ID of the Storage Account. Changing this forces a new resource to be created.
SparkClusterStorageAccountGen2, SparkClusterStorageAccountGen2Args
- Filesystem
Id string - The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
- Is
Default bool Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- Managed
Identity stringResource Id The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- Storage
Resource stringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- Filesystem
Id string - The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
- Is
Default bool Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- Managed
Identity stringResource Id The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- Storage
Resource stringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- filesystem
Id String - The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
- is
Default Boolean Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- managed
Identity StringResource Id The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage
Resource StringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- filesystem
Id string - The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
- is
Default boolean Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- managed
Identity stringResource Id The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage
Resource stringId - The ID of the Storage Account. Changing this forces a new resource to be created.
- filesystem_
id str - The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
- is_
default bool Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- managed_
identity_ strresource_ id The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage_
resource_ strid - The ID of the Storage Account. Changing this forces a new resource to be created.
- filesystem
Id String - The ID of the Gen2 Filesystem. Changing this forces a new resource to be created.
- is
Default Boolean Is this the Default Storage Account for the HDInsight Hadoop Cluster? Changing this forces a new resource to be created.
NOTE: One of the
storage_account
orstorage_account_gen2
blocks must be marked as the default.- managed
Identity StringResource Id The ID of Managed Identity to use for accessing the Gen2 filesystem. Changing this forces a new resource to be created.
NOTE: This can be obtained from the
id
of theazure.storage.Container
resource.- storage
Resource StringId - The ID of the Storage Account. Changing this forces a new resource to be created.
Import
HDInsight Spark Clusters can be imported using the resource id
, e.g.
$ pulumi import azure:hdinsight/sparkCluster:SparkCluster example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/mygroup1/providers/Microsoft.HDInsight/clusters/cluster1
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
azurerm
Terraform Provider.