We recommend using Azure Native.
azure.datafactory.LinkedServiceAzureDatabricks
Explore with Pulumi AI
Manages a Linked Service (connection) between Azure Databricks and Azure Data Factory.
Example Usage
With Managed Identity & New Cluster
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
name: "example",
location: "East US",
});
//Create a Linked Service using managed identity and new cluster config
const exampleFactory = new azure.datafactory.Factory("example", {
name: "TestDtaFactory92783401247",
location: example.location,
resourceGroupName: example.name,
identity: {
type: "SystemAssigned",
},
});
//Create a databricks instance
const exampleWorkspace = new azure.databricks.Workspace("example", {
name: "databricks-test",
resourceGroupName: example.name,
location: example.location,
sku: "standard",
});
const msiLinked = new azure.datafactory.LinkedServiceAzureDatabricks("msi_linked", {
name: "ADBLinkedServiceViaMSI",
dataFactoryId: exampleFactory.id,
description: "ADB Linked Service via MSI",
adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
msiWorkSpaceResourceId: exampleWorkspace.id,
newClusterConfig: {
nodeType: "Standard_NC12",
clusterVersion: "5.5.x-gpu-scala2.11",
minNumberOfWorkers: 1,
maxNumberOfWorkers: 5,
driverNodeType: "Standard_NC12",
logDestination: "dbfs:/logs",
customTags: {
custom_tag1: "sct_value_1",
custom_tag2: "sct_value_2",
},
sparkConfig: {
config1: "value1",
config2: "value2",
},
sparkEnvironmentVariables: {
envVar1: "value1",
envVar2: "value2",
},
initScripts: [
"init.sh",
"init2.sh",
],
},
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
name="example",
location="East US")
#Create a Linked Service using managed identity and new cluster config
example_factory = azure.datafactory.Factory("example",
name="TestDtaFactory92783401247",
location=example.location,
resource_group_name=example.name,
identity={
"type": "SystemAssigned",
})
#Create a databricks instance
example_workspace = azure.databricks.Workspace("example",
name="databricks-test",
resource_group_name=example.name,
location=example.location,
sku="standard")
msi_linked = azure.datafactory.LinkedServiceAzureDatabricks("msi_linked",
name="ADBLinkedServiceViaMSI",
data_factory_id=example_factory.id,
description="ADB Linked Service via MSI",
adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"),
msi_work_space_resource_id=example_workspace.id,
new_cluster_config={
"node_type": "Standard_NC12",
"cluster_version": "5.5.x-gpu-scala2.11",
"min_number_of_workers": 1,
"max_number_of_workers": 5,
"driver_node_type": "Standard_NC12",
"log_destination": "dbfs:/logs",
"custom_tags": {
"custom_tag1": "sct_value_1",
"custom_tag2": "sct_value_2",
},
"spark_config": {
"config1": "value1",
"config2": "value2",
},
"spark_environment_variables": {
"envVar1": "value1",
"envVar2": "value2",
},
"init_scripts": [
"init.sh",
"init2.sh",
],
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/databricks"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
Name: pulumi.String("example"),
Location: pulumi.String("East US"),
})
if err != nil {
return err
}
// Create a Linked Service using managed identity and new cluster config
exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
Name: pulumi.String("TestDtaFactory92783401247"),
Location: example.Location,
ResourceGroupName: example.Name,
Identity: &datafactory.FactoryIdentityArgs{
Type: pulumi.String("SystemAssigned"),
},
})
if err != nil {
return err
}
// Create a databricks instance
exampleWorkspace, err := databricks.NewWorkspace(ctx, "example", &databricks.WorkspaceArgs{
Name: pulumi.String("databricks-test"),
ResourceGroupName: example.Name,
Location: example.Location,
Sku: pulumi.String("standard"),
})
if err != nil {
return err
}
_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "msi_linked", &datafactory.LinkedServiceAzureDatabricksArgs{
Name: pulumi.String("ADBLinkedServiceViaMSI"),
DataFactoryId: exampleFactory.ID(),
Description: pulumi.String("ADB Linked Service via MSI"),
AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
return fmt.Sprintf("https://%v", workspaceUrl), nil
}).(pulumi.StringOutput),
MsiWorkSpaceResourceId: exampleWorkspace.ID(),
NewClusterConfig: &datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs{
NodeType: pulumi.String("Standard_NC12"),
ClusterVersion: pulumi.String("5.5.x-gpu-scala2.11"),
MinNumberOfWorkers: pulumi.Int(1),
MaxNumberOfWorkers: pulumi.Int(5),
DriverNodeType: pulumi.String("Standard_NC12"),
LogDestination: pulumi.String("dbfs:/logs"),
CustomTags: pulumi.StringMap{
"custom_tag1": pulumi.String("sct_value_1"),
"custom_tag2": pulumi.String("sct_value_2"),
},
SparkConfig: pulumi.StringMap{
"config1": pulumi.String("value1"),
"config2": pulumi.String("value2"),
},
SparkEnvironmentVariables: pulumi.StringMap{
"envVar1": pulumi.String("value1"),
"envVar2": pulumi.String("value2"),
},
InitScripts: pulumi.StringArray{
pulumi.String("init.sh"),
pulumi.String("init2.sh"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var example = new Azure.Core.ResourceGroup("example", new()
{
Name = "example",
Location = "East US",
});
//Create a Linked Service using managed identity and new cluster config
var exampleFactory = new Azure.DataFactory.Factory("example", new()
{
Name = "TestDtaFactory92783401247",
Location = example.Location,
ResourceGroupName = example.Name,
Identity = new Azure.DataFactory.Inputs.FactoryIdentityArgs
{
Type = "SystemAssigned",
},
});
//Create a databricks instance
var exampleWorkspace = new Azure.DataBricks.Workspace("example", new()
{
Name = "databricks-test",
ResourceGroupName = example.Name,
Location = example.Location,
Sku = "standard",
});
var msiLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("msi_linked", new()
{
Name = "ADBLinkedServiceViaMSI",
DataFactoryId = exampleFactory.Id,
Description = "ADB Linked Service via MSI",
AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
MsiWorkSpaceResourceId = exampleWorkspace.Id,
NewClusterConfig = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
{
NodeType = "Standard_NC12",
ClusterVersion = "5.5.x-gpu-scala2.11",
MinNumberOfWorkers = 1,
MaxNumberOfWorkers = 5,
DriverNodeType = "Standard_NC12",
LogDestination = "dbfs:/logs",
CustomTags =
{
{ "custom_tag1", "sct_value_1" },
{ "custom_tag2", "sct_value_2" },
},
SparkConfig =
{
{ "config1", "value1" },
{ "config2", "value2" },
},
SparkEnvironmentVariables =
{
{ "envVar1", "value1" },
{ "envVar2", "value2" },
},
InitScripts = new[]
{
"init.sh",
"init2.sh",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.inputs.FactoryIdentityArgs;
import com.pulumi.azure.databricks.Workspace;
import com.pulumi.azure.databricks.WorkspaceArgs;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
import com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new ResourceGroup("example", ResourceGroupArgs.builder()
.name("example")
.location("East US")
.build());
//Create a Linked Service using managed identity and new cluster config
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.name("TestDtaFactory92783401247")
.location(example.location())
.resourceGroupName(example.name())
.identity(FactoryIdentityArgs.builder()
.type("SystemAssigned")
.build())
.build());
//Create a databricks instance
var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
.name("databricks-test")
.resourceGroupName(example.name())
.location(example.location())
.sku("standard")
.build());
var msiLinked = new LinkedServiceAzureDatabricks("msiLinked", LinkedServiceAzureDatabricksArgs.builder()
.name("ADBLinkedServiceViaMSI")
.dataFactoryId(exampleFactory.id())
.description("ADB Linked Service via MSI")
.adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
.msiWorkSpaceResourceId(exampleWorkspace.id())
.newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
.nodeType("Standard_NC12")
.clusterVersion("5.5.x-gpu-scala2.11")
.minNumberOfWorkers(1)
.maxNumberOfWorkers(5)
.driverNodeType("Standard_NC12")
.logDestination("dbfs:/logs")
.customTags(Map.ofEntries(
Map.entry("custom_tag1", "sct_value_1"),
Map.entry("custom_tag2", "sct_value_2")
))
.sparkConfig(Map.ofEntries(
Map.entry("config1", "value1"),
Map.entry("config2", "value2")
))
.sparkEnvironmentVariables(Map.ofEntries(
Map.entry("envVar1", "value1"),
Map.entry("envVar2", "value2")
))
.initScripts(
"init.sh",
"init2.sh")
.build())
.build());
}
}
resources:
example:
type: azure:core:ResourceGroup
properties:
name: example
location: East US
#Create a Linked Service using managed identity and new cluster config
exampleFactory:
type: azure:datafactory:Factory
name: example
properties:
name: TestDtaFactory92783401247
location: ${example.location}
resourceGroupName: ${example.name}
identity:
type: SystemAssigned
#Create a databricks instance
exampleWorkspace:
type: azure:databricks:Workspace
name: example
properties:
name: databricks-test
resourceGroupName: ${example.name}
location: ${example.location}
sku: standard
msiLinked:
type: azure:datafactory:LinkedServiceAzureDatabricks
name: msi_linked
properties:
name: ADBLinkedServiceViaMSI
dataFactoryId: ${exampleFactory.id}
description: ADB Linked Service via MSI
adbDomain: https://${exampleWorkspace.workspaceUrl}
msiWorkSpaceResourceId: ${exampleWorkspace.id}
newClusterConfig:
nodeType: Standard_NC12
clusterVersion: 5.5.x-gpu-scala2.11
minNumberOfWorkers: 1
maxNumberOfWorkers: 5
driverNodeType: Standard_NC12
logDestination: dbfs:/logs
customTags:
custom_tag1: sct_value_1
custom_tag2: sct_value_2
sparkConfig:
config1: value1
config2: value2
sparkEnvironmentVariables:
envVar1: value1
envVar2: value2
initScripts:
- init.sh
- init2.sh
With Access Token & Existing Cluster
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
name: "example",
location: "East US",
});
//Link to an existing cluster via access token
const exampleFactory = new azure.datafactory.Factory("example", {
name: "TestDtaFactory92783401247",
location: example.location,
resourceGroupName: example.name,
});
//Create a databricks instance
const exampleWorkspace = new azure.databricks.Workspace("example", {
name: "databricks-test",
resourceGroupName: example.name,
location: example.location,
sku: "standard",
});
const atLinked = new azure.datafactory.LinkedServiceAzureDatabricks("at_linked", {
name: "ADBLinkedServiceViaAccessToken",
dataFactoryId: exampleFactory.id,
description: "ADB Linked Service via Access Token",
existingClusterId: "0308-201146-sly615",
accessToken: "SomeDatabricksAccessToken",
adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
name="example",
location="East US")
#Link to an existing cluster via access token
example_factory = azure.datafactory.Factory("example",
name="TestDtaFactory92783401247",
location=example.location,
resource_group_name=example.name)
#Create a databricks instance
example_workspace = azure.databricks.Workspace("example",
name="databricks-test",
resource_group_name=example.name,
location=example.location,
sku="standard")
at_linked = azure.datafactory.LinkedServiceAzureDatabricks("at_linked",
name="ADBLinkedServiceViaAccessToken",
data_factory_id=example_factory.id,
description="ADB Linked Service via Access Token",
existing_cluster_id="0308-201146-sly615",
access_token="SomeDatabricksAccessToken",
adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"))
package main
import (
"fmt"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/databricks"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
Name: pulumi.String("example"),
Location: pulumi.String("East US"),
})
if err != nil {
return err
}
// Link to an existing cluster via access token
exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
Name: pulumi.String("TestDtaFactory92783401247"),
Location: example.Location,
ResourceGroupName: example.Name,
})
if err != nil {
return err
}
// Create a databricks instance
exampleWorkspace, err := databricks.NewWorkspace(ctx, "example", &databricks.WorkspaceArgs{
Name: pulumi.String("databricks-test"),
ResourceGroupName: example.Name,
Location: example.Location,
Sku: pulumi.String("standard"),
})
if err != nil {
return err
}
_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "at_linked", &datafactory.LinkedServiceAzureDatabricksArgs{
Name: pulumi.String("ADBLinkedServiceViaAccessToken"),
DataFactoryId: exampleFactory.ID(),
Description: pulumi.String("ADB Linked Service via Access Token"),
ExistingClusterId: pulumi.String("0308-201146-sly615"),
AccessToken: pulumi.String("SomeDatabricksAccessToken"),
AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
return fmt.Sprintf("https://%v", workspaceUrl), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var example = new Azure.Core.ResourceGroup("example", new()
{
Name = "example",
Location = "East US",
});
//Link to an existing cluster via access token
var exampleFactory = new Azure.DataFactory.Factory("example", new()
{
Name = "TestDtaFactory92783401247",
Location = example.Location,
ResourceGroupName = example.Name,
});
//Create a databricks instance
var exampleWorkspace = new Azure.DataBricks.Workspace("example", new()
{
Name = "databricks-test",
ResourceGroupName = example.Name,
Location = example.Location,
Sku = "standard",
});
var atLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("at_linked", new()
{
Name = "ADBLinkedServiceViaAccessToken",
DataFactoryId = exampleFactory.Id,
Description = "ADB Linked Service via Access Token",
ExistingClusterId = "0308-201146-sly615",
AccessToken = "SomeDatabricksAccessToken",
AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.databricks.Workspace;
import com.pulumi.azure.databricks.WorkspaceArgs;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new ResourceGroup("example", ResourceGroupArgs.builder()
.name("example")
.location("East US")
.build());
//Link to an existing cluster via access token
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.name("TestDtaFactory92783401247")
.location(example.location())
.resourceGroupName(example.name())
.build());
//Create a databricks instance
var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
.name("databricks-test")
.resourceGroupName(example.name())
.location(example.location())
.sku("standard")
.build());
var atLinked = new LinkedServiceAzureDatabricks("atLinked", LinkedServiceAzureDatabricksArgs.builder()
.name("ADBLinkedServiceViaAccessToken")
.dataFactoryId(exampleFactory.id())
.description("ADB Linked Service via Access Token")
.existingClusterId("0308-201146-sly615")
.accessToken("SomeDatabricksAccessToken")
.adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
.build());
}
}
resources:
example:
type: azure:core:ResourceGroup
properties:
name: example
location: East US
#Link to an existing cluster via access token
exampleFactory:
type: azure:datafactory:Factory
name: example
properties:
name: TestDtaFactory92783401247
location: ${example.location}
resourceGroupName: ${example.name}
#Create a databricks instance
exampleWorkspace:
type: azure:databricks:Workspace
name: example
properties:
name: databricks-test
resourceGroupName: ${example.name}
location: ${example.location}
sku: standard
atLinked:
type: azure:datafactory:LinkedServiceAzureDatabricks
name: at_linked
properties:
name: ADBLinkedServiceViaAccessToken
dataFactoryId: ${exampleFactory.id}
description: ADB Linked Service via Access Token
existingClusterId: 0308-201146-sly615
accessToken: SomeDatabricksAccessToken
adbDomain: https://${exampleWorkspace.workspaceUrl}
Create LinkedServiceAzureDatabricks Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new LinkedServiceAzureDatabricks(name: string, args: LinkedServiceAzureDatabricksArgs, opts?: CustomResourceOptions);
@overload
def LinkedServiceAzureDatabricks(resource_name: str,
args: LinkedServiceAzureDatabricksArgs,
opts: Optional[ResourceOptions] = None)
@overload
def LinkedServiceAzureDatabricks(resource_name: str,
opts: Optional[ResourceOptions] = None,
data_factory_id: Optional[str] = None,
adb_domain: Optional[str] = None,
existing_cluster_id: Optional[str] = None,
annotations: Optional[Sequence[str]] = None,
additional_properties: Optional[Mapping[str, str]] = None,
description: Optional[str] = None,
access_token: Optional[str] = None,
instance_pool: Optional[LinkedServiceAzureDatabricksInstancePoolArgs] = None,
integration_runtime_name: Optional[str] = None,
key_vault_password: Optional[LinkedServiceAzureDatabricksKeyVaultPasswordArgs] = None,
msi_work_space_resource_id: Optional[str] = None,
name: Optional[str] = None,
new_cluster_config: Optional[LinkedServiceAzureDatabricksNewClusterConfigArgs] = None,
parameters: Optional[Mapping[str, str]] = None)
func NewLinkedServiceAzureDatabricks(ctx *Context, name string, args LinkedServiceAzureDatabricksArgs, opts ...ResourceOption) (*LinkedServiceAzureDatabricks, error)
public LinkedServiceAzureDatabricks(string name, LinkedServiceAzureDatabricksArgs args, CustomResourceOptions? opts = null)
public LinkedServiceAzureDatabricks(String name, LinkedServiceAzureDatabricksArgs args)
public LinkedServiceAzureDatabricks(String name, LinkedServiceAzureDatabricksArgs args, CustomResourceOptions options)
type: azure:datafactory:LinkedServiceAzureDatabricks
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LinkedServiceAzureDatabricksArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LinkedServiceAzureDatabricksArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LinkedServiceAzureDatabricksArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LinkedServiceAzureDatabricksArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LinkedServiceAzureDatabricksArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var linkedServiceAzureDatabricksResource = new Azure.DataFactory.LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource", new()
{
DataFactoryId = "string",
AdbDomain = "string",
ExistingClusterId = "string",
Annotations = new[]
{
"string",
},
AdditionalProperties =
{
{ "string", "string" },
},
Description = "string",
AccessToken = "string",
InstancePool = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksInstancePoolArgs
{
ClusterVersion = "string",
InstancePoolId = "string",
MaxNumberOfWorkers = 0,
MinNumberOfWorkers = 0,
},
IntegrationRuntimeName = "string",
KeyVaultPassword = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksKeyVaultPasswordArgs
{
LinkedServiceName = "string",
SecretName = "string",
},
MsiWorkSpaceResourceId = "string",
Name = "string",
NewClusterConfig = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
{
ClusterVersion = "string",
NodeType = "string",
CustomTags =
{
{ "string", "string" },
},
DriverNodeType = "string",
InitScripts = new[]
{
"string",
},
LogDestination = "string",
MaxNumberOfWorkers = 0,
MinNumberOfWorkers = 0,
SparkConfig =
{
{ "string", "string" },
},
SparkEnvironmentVariables =
{
{ "string", "string" },
},
},
Parameters =
{
{ "string", "string" },
},
});
example, err := datafactory.NewLinkedServiceAzureDatabricks(ctx, "linkedServiceAzureDatabricksResource", &datafactory.LinkedServiceAzureDatabricksArgs{
DataFactoryId: pulumi.String("string"),
AdbDomain: pulumi.String("string"),
ExistingClusterId: pulumi.String("string"),
Annotations: pulumi.StringArray{
pulumi.String("string"),
},
AdditionalProperties: pulumi.StringMap{
"string": pulumi.String("string"),
},
Description: pulumi.String("string"),
AccessToken: pulumi.String("string"),
InstancePool: &datafactory.LinkedServiceAzureDatabricksInstancePoolArgs{
ClusterVersion: pulumi.String("string"),
InstancePoolId: pulumi.String("string"),
MaxNumberOfWorkers: pulumi.Int(0),
MinNumberOfWorkers: pulumi.Int(0),
},
IntegrationRuntimeName: pulumi.String("string"),
KeyVaultPassword: &datafactory.LinkedServiceAzureDatabricksKeyVaultPasswordArgs{
LinkedServiceName: pulumi.String("string"),
SecretName: pulumi.String("string"),
},
MsiWorkSpaceResourceId: pulumi.String("string"),
Name: pulumi.String("string"),
NewClusterConfig: &datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs{
ClusterVersion: pulumi.String("string"),
NodeType: pulumi.String("string"),
CustomTags: pulumi.StringMap{
"string": pulumi.String("string"),
},
DriverNodeType: pulumi.String("string"),
InitScripts: pulumi.StringArray{
pulumi.String("string"),
},
LogDestination: pulumi.String("string"),
MaxNumberOfWorkers: pulumi.Int(0),
MinNumberOfWorkers: pulumi.Int(0),
SparkConfig: pulumi.StringMap{
"string": pulumi.String("string"),
},
SparkEnvironmentVariables: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
Parameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
})
var linkedServiceAzureDatabricksResource = new LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource", LinkedServiceAzureDatabricksArgs.builder()
.dataFactoryId("string")
.adbDomain("string")
.existingClusterId("string")
.annotations("string")
.additionalProperties(Map.of("string", "string"))
.description("string")
.accessToken("string")
.instancePool(LinkedServiceAzureDatabricksInstancePoolArgs.builder()
.clusterVersion("string")
.instancePoolId("string")
.maxNumberOfWorkers(0)
.minNumberOfWorkers(0)
.build())
.integrationRuntimeName("string")
.keyVaultPassword(LinkedServiceAzureDatabricksKeyVaultPasswordArgs.builder()
.linkedServiceName("string")
.secretName("string")
.build())
.msiWorkSpaceResourceId("string")
.name("string")
.newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
.clusterVersion("string")
.nodeType("string")
.customTags(Map.of("string", "string"))
.driverNodeType("string")
.initScripts("string")
.logDestination("string")
.maxNumberOfWorkers(0)
.minNumberOfWorkers(0)
.sparkConfig(Map.of("string", "string"))
.sparkEnvironmentVariables(Map.of("string", "string"))
.build())
.parameters(Map.of("string", "string"))
.build());
linked_service_azure_databricks_resource = azure.datafactory.LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource",
data_factory_id="string",
adb_domain="string",
existing_cluster_id="string",
annotations=["string"],
additional_properties={
"string": "string",
},
description="string",
access_token="string",
instance_pool={
"cluster_version": "string",
"instance_pool_id": "string",
"max_number_of_workers": 0,
"min_number_of_workers": 0,
},
integration_runtime_name="string",
key_vault_password={
"linked_service_name": "string",
"secret_name": "string",
},
msi_work_space_resource_id="string",
name="string",
new_cluster_config={
"cluster_version": "string",
"node_type": "string",
"custom_tags": {
"string": "string",
},
"driver_node_type": "string",
"init_scripts": ["string"],
"log_destination": "string",
"max_number_of_workers": 0,
"min_number_of_workers": 0,
"spark_config": {
"string": "string",
},
"spark_environment_variables": {
"string": "string",
},
},
parameters={
"string": "string",
})
const linkedServiceAzureDatabricksResource = new azure.datafactory.LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource", {
dataFactoryId: "string",
adbDomain: "string",
existingClusterId: "string",
annotations: ["string"],
additionalProperties: {
string: "string",
},
description: "string",
accessToken: "string",
instancePool: {
clusterVersion: "string",
instancePoolId: "string",
maxNumberOfWorkers: 0,
minNumberOfWorkers: 0,
},
integrationRuntimeName: "string",
keyVaultPassword: {
linkedServiceName: "string",
secretName: "string",
},
msiWorkSpaceResourceId: "string",
name: "string",
newClusterConfig: {
clusterVersion: "string",
nodeType: "string",
customTags: {
string: "string",
},
driverNodeType: "string",
initScripts: ["string"],
logDestination: "string",
maxNumberOfWorkers: 0,
minNumberOfWorkers: 0,
sparkConfig: {
string: "string",
},
sparkEnvironmentVariables: {
string: "string",
},
},
parameters: {
string: "string",
},
});
type: azure:datafactory:LinkedServiceAzureDatabricks
properties:
accessToken: string
adbDomain: string
additionalProperties:
string: string
annotations:
- string
dataFactoryId: string
description: string
existingClusterId: string
instancePool:
clusterVersion: string
instancePoolId: string
maxNumberOfWorkers: 0
minNumberOfWorkers: 0
integrationRuntimeName: string
keyVaultPassword:
linkedServiceName: string
secretName: string
msiWorkSpaceResourceId: string
name: string
newClusterConfig:
clusterVersion: string
customTags:
string: string
driverNodeType: string
initScripts:
- string
logDestination: string
maxNumberOfWorkers: 0
minNumberOfWorkers: 0
nodeType: string
sparkConfig:
string: string
sparkEnvironmentVariables:
string: string
parameters:
string: string
LinkedServiceAzureDatabricks Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The LinkedServiceAzureDatabricks resource accepts the following input properties:
- Adb
Domain string - The domain URL of the databricks instance.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Access
Token string - Authenticate to ADB via an access token.
- Additional
Properties Dictionary<string, string> - A map of additional properties to associate with the Data Factory Linked Service.
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Linked Service.
- Description string
- The description for the Data Factory Linked Service.
- Existing
Cluster stringId - The cluster_id of an existing cluster within the linked ADB instance.
- Instance
Pool LinkedService Azure Databricks Instance Pool - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - Integration
Runtime stringName - The integration runtime reference to associate with the Data Factory Linked Service.
- Key
Vault LinkedPassword Service Azure Databricks Key Vault Password - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - Msi
Work stringSpace Resource Id - Authenticate to ADB via managed service identity.
- Name string
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- New
Cluster LinkedConfig Service Azure Databricks New Cluster Config - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Linked Service.
- Adb
Domain string - The domain URL of the databricks instance.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Access
Token string - Authenticate to ADB via an access token.
- Additional
Properties map[string]string - A map of additional properties to associate with the Data Factory Linked Service.
- Annotations []string
- List of tags that can be used for describing the Data Factory Linked Service.
- Description string
- The description for the Data Factory Linked Service.
- Existing
Cluster stringId - The cluster_id of an existing cluster within the linked ADB instance.
- Instance
Pool LinkedService Azure Databricks Instance Pool Args - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - Integration
Runtime stringName - The integration runtime reference to associate with the Data Factory Linked Service.
- Key
Vault LinkedPassword Service Azure Databricks Key Vault Password Args - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - Msi
Work stringSpace Resource Id - Authenticate to ADB via managed service identity.
- Name string
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- New
Cluster LinkedConfig Service Azure Databricks New Cluster Config Args - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - Parameters map[string]string
- A map of parameters to associate with the Data Factory Linked Service.
- adb
Domain String - The domain URL of the databricks instance.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- access
Token String - Authenticate to ADB via an access token.
- additional
Properties Map<String,String> - A map of additional properties to associate with the Data Factory Linked Service.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Linked Service.
- description String
- The description for the Data Factory Linked Service.
- existing
Cluster StringId - The cluster_id of an existing cluster within the linked ADB instance.
- instance
Pool LinkedService Azure Databricks Instance Pool - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration
Runtime StringName - The integration runtime reference to associate with the Data Factory Linked Service.
- key
Vault LinkedPassword Service Azure Databricks Key Vault Password - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi
Work StringSpace Resource Id - Authenticate to ADB via managed service identity.
- name String
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new
Cluster LinkedConfig Service Azure Databricks New Cluster Config - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters Map<String,String>
- A map of parameters to associate with the Data Factory Linked Service.
- adb
Domain string - The domain URL of the databricks instance.
- data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- access
Token string - Authenticate to ADB via an access token.
- additional
Properties {[key: string]: string} - A map of additional properties to associate with the Data Factory Linked Service.
- annotations string[]
- List of tags that can be used for describing the Data Factory Linked Service.
- description string
- The description for the Data Factory Linked Service.
- existing
Cluster stringId - The cluster_id of an existing cluster within the linked ADB instance.
- instance
Pool LinkedService Azure Databricks Instance Pool - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration
Runtime stringName - The integration runtime reference to associate with the Data Factory Linked Service.
- key
Vault LinkedPassword Service Azure Databricks Key Vault Password - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi
Work stringSpace Resource Id - Authenticate to ADB via managed service identity.
- name string
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new
Cluster LinkedConfig Service Azure Databricks New Cluster Config - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Linked Service.
- adb_
domain str - The domain URL of the databricks instance.
- data_
factory_ strid - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- access_
token str - Authenticate to ADB via an access token.
- additional_
properties Mapping[str, str] - A map of additional properties to associate with the Data Factory Linked Service.
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Linked Service.
- description str
- The description for the Data Factory Linked Service.
- existing_
cluster_ strid - The cluster_id of an existing cluster within the linked ADB instance.
- instance_
pool LinkedService Azure Databricks Instance Pool Args - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration_
runtime_ strname - The integration runtime reference to associate with the Data Factory Linked Service.
- key_
vault_ Linkedpassword Service Azure Databricks Key Vault Password Args - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi_
work_ strspace_ resource_ id - Authenticate to ADB via managed service identity.
- name str
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new_
cluster_ Linkedconfig Service Azure Databricks New Cluster Config Args - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Linked Service.
- adb
Domain String - The domain URL of the databricks instance.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- access
Token String - Authenticate to ADB via an access token.
- additional
Properties Map<String> - A map of additional properties to associate with the Data Factory Linked Service.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Linked Service.
- description String
- The description for the Data Factory Linked Service.
- existing
Cluster StringId - The cluster_id of an existing cluster within the linked ADB instance.
- instance
Pool Property Map - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration
Runtime StringName - The integration runtime reference to associate with the Data Factory Linked Service.
- key
Vault Property MapPassword - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi
Work StringSpace Resource Id - Authenticate to ADB via managed service identity.
- name String
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new
Cluster Property MapConfig - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters Map<String>
- A map of parameters to associate with the Data Factory Linked Service.
Outputs
All input properties are implicitly available as output properties. Additionally, the LinkedServiceAzureDatabricks resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing LinkedServiceAzureDatabricks Resource
Get an existing LinkedServiceAzureDatabricks resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LinkedServiceAzureDatabricksState, opts?: CustomResourceOptions): LinkedServiceAzureDatabricks
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
access_token: Optional[str] = None,
adb_domain: Optional[str] = None,
additional_properties: Optional[Mapping[str, str]] = None,
annotations: Optional[Sequence[str]] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
existing_cluster_id: Optional[str] = None,
instance_pool: Optional[LinkedServiceAzureDatabricksInstancePoolArgs] = None,
integration_runtime_name: Optional[str] = None,
key_vault_password: Optional[LinkedServiceAzureDatabricksKeyVaultPasswordArgs] = None,
msi_work_space_resource_id: Optional[str] = None,
name: Optional[str] = None,
new_cluster_config: Optional[LinkedServiceAzureDatabricksNewClusterConfigArgs] = None,
parameters: Optional[Mapping[str, str]] = None) -> LinkedServiceAzureDatabricks
func GetLinkedServiceAzureDatabricks(ctx *Context, name string, id IDInput, state *LinkedServiceAzureDatabricksState, opts ...ResourceOption) (*LinkedServiceAzureDatabricks, error)
public static LinkedServiceAzureDatabricks Get(string name, Input<string> id, LinkedServiceAzureDatabricksState? state, CustomResourceOptions? opts = null)
public static LinkedServiceAzureDatabricks get(String name, Output<String> id, LinkedServiceAzureDatabricksState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Access
Token string - Authenticate to ADB via an access token.
- Adb
Domain string - The domain URL of the databricks instance.
- Additional
Properties Dictionary<string, string> - A map of additional properties to associate with the Data Factory Linked Service.
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Linked Service.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Linked Service.
- Existing
Cluster stringId - The cluster_id of an existing cluster within the linked ADB instance.
- Instance
Pool LinkedService Azure Databricks Instance Pool - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - Integration
Runtime stringName - The integration runtime reference to associate with the Data Factory Linked Service.
- Key
Vault LinkedPassword Service Azure Databricks Key Vault Password - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - Msi
Work stringSpace Resource Id - Authenticate to ADB via managed service identity.
- Name string
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- New
Cluster LinkedConfig Service Azure Databricks New Cluster Config - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Linked Service.
- Access
Token string - Authenticate to ADB via an access token.
- Adb
Domain string - The domain URL of the databricks instance.
- Additional
Properties map[string]string - A map of additional properties to associate with the Data Factory Linked Service.
- Annotations []string
- List of tags that can be used for describing the Data Factory Linked Service.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Linked Service.
- Existing
Cluster stringId - The cluster_id of an existing cluster within the linked ADB instance.
- Instance
Pool LinkedService Azure Databricks Instance Pool Args - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - Integration
Runtime stringName - The integration runtime reference to associate with the Data Factory Linked Service.
- Key
Vault LinkedPassword Service Azure Databricks Key Vault Password Args - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - Msi
Work stringSpace Resource Id - Authenticate to ADB via managed service identity.
- Name string
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- New
Cluster LinkedConfig Service Azure Databricks New Cluster Config Args - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - Parameters map[string]string
- A map of parameters to associate with the Data Factory Linked Service.
- access
Token String - Authenticate to ADB via an access token.
- adb
Domain String - The domain URL of the databricks instance.
- additional
Properties Map<String,String> - A map of additional properties to associate with the Data Factory Linked Service.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Linked Service.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Linked Service.
- existing
Cluster StringId - The cluster_id of an existing cluster within the linked ADB instance.
- instance
Pool LinkedService Azure Databricks Instance Pool - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration
Runtime StringName - The integration runtime reference to associate with the Data Factory Linked Service.
- key
Vault LinkedPassword Service Azure Databricks Key Vault Password - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi
Work StringSpace Resource Id - Authenticate to ADB via managed service identity.
- name String
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new
Cluster LinkedConfig Service Azure Databricks New Cluster Config - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters Map<String,String>
- A map of parameters to associate with the Data Factory Linked Service.
- access
Token string - Authenticate to ADB via an access token.
- adb
Domain string - The domain URL of the databricks instance.
- additional
Properties {[key: string]: string} - A map of additional properties to associate with the Data Factory Linked Service.
- annotations string[]
- List of tags that can be used for describing the Data Factory Linked Service.
- data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description string
- The description for the Data Factory Linked Service.
- existing
Cluster stringId - The cluster_id of an existing cluster within the linked ADB instance.
- instance
Pool LinkedService Azure Databricks Instance Pool - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration
Runtime stringName - The integration runtime reference to associate with the Data Factory Linked Service.
- key
Vault LinkedPassword Service Azure Databricks Key Vault Password - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi
Work stringSpace Resource Id - Authenticate to ADB via managed service identity.
- name string
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new
Cluster LinkedConfig Service Azure Databricks New Cluster Config - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Linked Service.
- access_
token str - Authenticate to ADB via an access token.
- adb_
domain str - The domain URL of the databricks instance.
- additional_
properties Mapping[str, str] - A map of additional properties to associate with the Data Factory Linked Service.
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Linked Service.
- data_
factory_ strid - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description str
- The description for the Data Factory Linked Service.
- existing_
cluster_ strid - The cluster_id of an existing cluster within the linked ADB instance.
- instance_
pool LinkedService Azure Databricks Instance Pool Args - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration_
runtime_ strname - The integration runtime reference to associate with the Data Factory Linked Service.
- key_
vault_ Linkedpassword Service Azure Databricks Key Vault Password Args - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi_
work_ strspace_ resource_ id - Authenticate to ADB via managed service identity.
- name str
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new_
cluster_ Linkedconfig Service Azure Databricks New Cluster Config Args - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Linked Service.
- access
Token String - Authenticate to ADB via an access token.
- adb
Domain String - The domain URL of the databricks instance.
- additional
Properties Map<String> - A map of additional properties to associate with the Data Factory Linked Service.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Linked Service.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Linked Service.
- existing
Cluster StringId - The cluster_id of an existing cluster within the linked ADB instance.
- instance
Pool Property Map - Leverages an instance pool within the linked ADB instance as one
instance_pool
block defined below. - integration
Runtime StringName - The integration runtime reference to associate with the Data Factory Linked Service.
- key
Vault Property MapPassword - Authenticate to ADB via Azure Key Vault Linked Service as defined in the
key_vault_password
block below. - msi
Work StringSpace Resource Id - Authenticate to ADB via managed service identity.
- name String
- Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
- new
Cluster Property MapConfig - Creates new clusters within the linked ADB instance as defined in the
new_cluster_config
block below. - parameters Map<String>
- A map of parameters to associate with the Data Factory Linked Service.
Supporting Types
LinkedServiceAzureDatabricksInstancePool, LinkedServiceAzureDatabricksInstancePoolArgs
- Cluster
Version string - Spark version of a the cluster.
- Instance
Pool stringId - Identifier of the instance pool within the linked ADB instance.
- Max
Number intOf Workers - The max number of worker nodes. Set this value if you want to enable autoscaling between the
min_number_of_workers
and this value. Omit this value to use a fixed number of workers defined in themin_number_of_workers
property. - Min
Number intOf Workers - The minimum number of worker nodes. Defaults to
1
.
- Cluster
Version string - Spark version of a the cluster.
- Instance
Pool stringId - Identifier of the instance pool within the linked ADB instance.
- Max
Number intOf Workers - The max number of worker nodes. Set this value if you want to enable autoscaling between the
min_number_of_workers
and this value. Omit this value to use a fixed number of workers defined in themin_number_of_workers
property. - Min
Number intOf Workers - The minimum number of worker nodes. Defaults to
1
.
- cluster
Version String - Spark version of a the cluster.
- instance
Pool StringId - Identifier of the instance pool within the linked ADB instance.
- max
Number IntegerOf Workers - The max number of worker nodes. Set this value if you want to enable autoscaling between the
min_number_of_workers
and this value. Omit this value to use a fixed number of workers defined in themin_number_of_workers
property. - min
Number IntegerOf Workers - The minimum number of worker nodes. Defaults to
1
.
- cluster
Version string - Spark version of a the cluster.
- instance
Pool stringId - Identifier of the instance pool within the linked ADB instance.
- max
Number numberOf Workers - The max number of worker nodes. Set this value if you want to enable autoscaling between the
min_number_of_workers
and this value. Omit this value to use a fixed number of workers defined in themin_number_of_workers
property. - min
Number numberOf Workers - The minimum number of worker nodes. Defaults to
1
.
- cluster_
version str - Spark version of a the cluster.
- instance_
pool_ strid - Identifier of the instance pool within the linked ADB instance.
- max_
number_ intof_ workers - The max number of worker nodes. Set this value if you want to enable autoscaling between the
min_number_of_workers
and this value. Omit this value to use a fixed number of workers defined in themin_number_of_workers
property. - min_
number_ intof_ workers - The minimum number of worker nodes. Defaults to
1
.
- cluster
Version String - Spark version of a the cluster.
- instance
Pool StringId - Identifier of the instance pool within the linked ADB instance.
- max
Number NumberOf Workers - The max number of worker nodes. Set this value if you want to enable autoscaling between the
min_number_of_workers
and this value. Omit this value to use a fixed number of workers defined in themin_number_of_workers
property. - min
Number NumberOf Workers - The minimum number of worker nodes. Defaults to
1
.
LinkedServiceAzureDatabricksKeyVaultPassword, LinkedServiceAzureDatabricksKeyVaultPasswordArgs
- Linked
Service stringName - Specifies the name of an existing Key Vault Data Factory Linked Service.
- Secret
Name string - Specifies the secret name in Azure Key Vault that stores ADB access token.
- Linked
Service stringName - Specifies the name of an existing Key Vault Data Factory Linked Service.
- Secret
Name string - Specifies the secret name in Azure Key Vault that stores ADB access token.
- linked
Service StringName - Specifies the name of an existing Key Vault Data Factory Linked Service.
- secret
Name String - Specifies the secret name in Azure Key Vault that stores ADB access token.
- linked
Service stringName - Specifies the name of an existing Key Vault Data Factory Linked Service.
- secret
Name string - Specifies the secret name in Azure Key Vault that stores ADB access token.
- linked_
service_ strname - Specifies the name of an existing Key Vault Data Factory Linked Service.
- secret_
name str - Specifies the secret name in Azure Key Vault that stores ADB access token.
- linked
Service StringName - Specifies the name of an existing Key Vault Data Factory Linked Service.
- secret
Name String - Specifies the secret name in Azure Key Vault that stores ADB access token.
LinkedServiceAzureDatabricksNewClusterConfig, LinkedServiceAzureDatabricksNewClusterConfigArgs
- Cluster
Version string - Spark version of a the cluster.
- Node
Type string - Node type for the new cluster.
- Dictionary<string, string>
- Tags for the cluster resource.
- Driver
Node stringType - Driver node type for the cluster.
- Init
Scripts List<string> - User defined initialization scripts for the cluster.
- Log
Destination string - Location to deliver Spark driver, worker, and event logs.
- Max
Number intOf Workers - Specifies the maximum number of worker nodes. It should be between 1 and 25000.
- Min
Number intOf Workers - Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to
1
. - Spark
Config Dictionary<string, string> - User-specified Spark configuration variables key-value pairs.
- Spark
Environment Dictionary<string, string>Variables - User-specified Spark environment variables key-value pairs.
- Cluster
Version string - Spark version of a the cluster.
- Node
Type string - Node type for the new cluster.
- map[string]string
- Tags for the cluster resource.
- Driver
Node stringType - Driver node type for the cluster.
- Init
Scripts []string - User defined initialization scripts for the cluster.
- Log
Destination string - Location to deliver Spark driver, worker, and event logs.
- Max
Number intOf Workers - Specifies the maximum number of worker nodes. It should be between 1 and 25000.
- Min
Number intOf Workers - Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to
1
. - Spark
Config map[string]string - User-specified Spark configuration variables key-value pairs.
- Spark
Environment map[string]stringVariables - User-specified Spark environment variables key-value pairs.
- cluster
Version String - Spark version of a the cluster.
- node
Type String - Node type for the new cluster.
- Map<String,String>
- Tags for the cluster resource.
- driver
Node StringType - Driver node type for the cluster.
- init
Scripts List<String> - User defined initialization scripts for the cluster.
- log
Destination String - Location to deliver Spark driver, worker, and event logs.
- max
Number IntegerOf Workers - Specifies the maximum number of worker nodes. It should be between 1 and 25000.
- min
Number IntegerOf Workers - Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to
1
. - spark
Config Map<String,String> - User-specified Spark configuration variables key-value pairs.
- spark
Environment Map<String,String>Variables - User-specified Spark environment variables key-value pairs.
- cluster
Version string - Spark version of a the cluster.
- node
Type string - Node type for the new cluster.
- {[key: string]: string}
- Tags for the cluster resource.
- driver
Node stringType - Driver node type for the cluster.
- init
Scripts string[] - User defined initialization scripts for the cluster.
- log
Destination string - Location to deliver Spark driver, worker, and event logs.
- max
Number numberOf Workers - Specifies the maximum number of worker nodes. It should be between 1 and 25000.
- min
Number numberOf Workers - Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to
1
. - spark
Config {[key: string]: string} - User-specified Spark configuration variables key-value pairs.
- spark
Environment {[key: string]: string}Variables - User-specified Spark environment variables key-value pairs.
- cluster_
version str - Spark version of a the cluster.
- node_
type str - Node type for the new cluster.
- Mapping[str, str]
- Tags for the cluster resource.
- driver_
node_ strtype - Driver node type for the cluster.
- init_
scripts Sequence[str] - User defined initialization scripts for the cluster.
- log_
destination str - Location to deliver Spark driver, worker, and event logs.
- max_
number_ intof_ workers - Specifies the maximum number of worker nodes. It should be between 1 and 25000.
- min_
number_ intof_ workers - Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to
1
. - spark_
config Mapping[str, str] - User-specified Spark configuration variables key-value pairs.
- spark_
environment_ Mapping[str, str]variables - User-specified Spark environment variables key-value pairs.
- cluster
Version String - Spark version of a the cluster.
- node
Type String - Node type for the new cluster.
- Map<String>
- Tags for the cluster resource.
- driver
Node StringType - Driver node type for the cluster.
- init
Scripts List<String> - User defined initialization scripts for the cluster.
- log
Destination String - Location to deliver Spark driver, worker, and event logs.
- max
Number NumberOf Workers - Specifies the maximum number of worker nodes. It should be between 1 and 25000.
- min
Number NumberOf Workers - Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to
1
. - spark
Config Map<String> - User-specified Spark configuration variables key-value pairs.
- spark
Environment Map<String>Variables - User-specified Spark environment variables key-value pairs.
Import
Data Factory Linked Services can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/linkedServiceAzureDatabricks:LinkedServiceAzureDatabricks example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/linkedservices/example
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
azurerm
Terraform Provider.