1. Packages
  2. Azure Classic
  3. API Docs
  4. datafactory
  5. LinkedServiceAzureDatabricks

We recommend using Azure Native.

Azure v6.10.0 published on Tuesday, Nov 19, 2024 by Pulumi

azure.datafactory.LinkedServiceAzureDatabricks

Explore with Pulumi AI

azure logo

We recommend using Azure Native.

Azure v6.10.0 published on Tuesday, Nov 19, 2024 by Pulumi

    Manages a Linked Service (connection) between Azure Databricks and Azure Data Factory.

    Example Usage

    With Managed Identity & New Cluster

    import * as pulumi from "@pulumi/pulumi";
    import * as azure from "@pulumi/azure";
    
    const example = new azure.core.ResourceGroup("example", {
        name: "example",
        location: "East US",
    });
    //Create a Linked Service using managed identity and new cluster config
    const exampleFactory = new azure.datafactory.Factory("example", {
        name: "TestDtaFactory92783401247",
        location: example.location,
        resourceGroupName: example.name,
        identity: {
            type: "SystemAssigned",
        },
    });
    //Create a databricks instance
    const exampleWorkspace = new azure.databricks.Workspace("example", {
        name: "databricks-test",
        resourceGroupName: example.name,
        location: example.location,
        sku: "standard",
    });
    const msiLinked = new azure.datafactory.LinkedServiceAzureDatabricks("msi_linked", {
        name: "ADBLinkedServiceViaMSI",
        dataFactoryId: exampleFactory.id,
        description: "ADB Linked Service via MSI",
        adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
        msiWorkSpaceResourceId: exampleWorkspace.id,
        newClusterConfig: {
            nodeType: "Standard_NC12",
            clusterVersion: "5.5.x-gpu-scala2.11",
            minNumberOfWorkers: 1,
            maxNumberOfWorkers: 5,
            driverNodeType: "Standard_NC12",
            logDestination: "dbfs:/logs",
            customTags: {
                custom_tag1: "sct_value_1",
                custom_tag2: "sct_value_2",
            },
            sparkConfig: {
                config1: "value1",
                config2: "value2",
            },
            sparkEnvironmentVariables: {
                envVar1: "value1",
                envVar2: "value2",
            },
            initScripts: [
                "init.sh",
                "init2.sh",
            ],
        },
    });
    
    import pulumi
    import pulumi_azure as azure
    
    example = azure.core.ResourceGroup("example",
        name="example",
        location="East US")
    #Create a Linked Service using managed identity and new cluster config
    example_factory = azure.datafactory.Factory("example",
        name="TestDtaFactory92783401247",
        location=example.location,
        resource_group_name=example.name,
        identity={
            "type": "SystemAssigned",
        })
    #Create a databricks instance
    example_workspace = azure.databricks.Workspace("example",
        name="databricks-test",
        resource_group_name=example.name,
        location=example.location,
        sku="standard")
    msi_linked = azure.datafactory.LinkedServiceAzureDatabricks("msi_linked",
        name="ADBLinkedServiceViaMSI",
        data_factory_id=example_factory.id,
        description="ADB Linked Service via MSI",
        adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"),
        msi_work_space_resource_id=example_workspace.id,
        new_cluster_config={
            "node_type": "Standard_NC12",
            "cluster_version": "5.5.x-gpu-scala2.11",
            "min_number_of_workers": 1,
            "max_number_of_workers": 5,
            "driver_node_type": "Standard_NC12",
            "log_destination": "dbfs:/logs",
            "custom_tags": {
                "custom_tag1": "sct_value_1",
                "custom_tag2": "sct_value_2",
            },
            "spark_config": {
                "config1": "value1",
                "config2": "value2",
            },
            "spark_environment_variables": {
                "envVar1": "value1",
                "envVar2": "value2",
            },
            "init_scripts": [
                "init.sh",
                "init2.sh",
            ],
        })
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
    	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/databricks"
    	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
    			Name:     pulumi.String("example"),
    			Location: pulumi.String("East US"),
    		})
    		if err != nil {
    			return err
    		}
    		// Create a Linked Service using managed identity and new cluster config
    		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
    			Name:              pulumi.String("TestDtaFactory92783401247"),
    			Location:          example.Location,
    			ResourceGroupName: example.Name,
    			Identity: &datafactory.FactoryIdentityArgs{
    				Type: pulumi.String("SystemAssigned"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		// Create a databricks instance
    		exampleWorkspace, err := databricks.NewWorkspace(ctx, "example", &databricks.WorkspaceArgs{
    			Name:              pulumi.String("databricks-test"),
    			ResourceGroupName: example.Name,
    			Location:          example.Location,
    			Sku:               pulumi.String("standard"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "msi_linked", &datafactory.LinkedServiceAzureDatabricksArgs{
    			Name:          pulumi.String("ADBLinkedServiceViaMSI"),
    			DataFactoryId: exampleFactory.ID(),
    			Description:   pulumi.String("ADB Linked Service via MSI"),
    			AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
    				return fmt.Sprintf("https://%v", workspaceUrl), nil
    			}).(pulumi.StringOutput),
    			MsiWorkSpaceResourceId: exampleWorkspace.ID(),
    			NewClusterConfig: &datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs{
    				NodeType:           pulumi.String("Standard_NC12"),
    				ClusterVersion:     pulumi.String("5.5.x-gpu-scala2.11"),
    				MinNumberOfWorkers: pulumi.Int(1),
    				MaxNumberOfWorkers: pulumi.Int(5),
    				DriverNodeType:     pulumi.String("Standard_NC12"),
    				LogDestination:     pulumi.String("dbfs:/logs"),
    				CustomTags: pulumi.StringMap{
    					"custom_tag1": pulumi.String("sct_value_1"),
    					"custom_tag2": pulumi.String("sct_value_2"),
    				},
    				SparkConfig: pulumi.StringMap{
    					"config1": pulumi.String("value1"),
    					"config2": pulumi.String("value2"),
    				},
    				SparkEnvironmentVariables: pulumi.StringMap{
    					"envVar1": pulumi.String("value1"),
    					"envVar2": pulumi.String("value2"),
    				},
    				InitScripts: pulumi.StringArray{
    					pulumi.String("init.sh"),
    					pulumi.String("init2.sh"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Azure = Pulumi.Azure;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Azure.Core.ResourceGroup("example", new()
        {
            Name = "example",
            Location = "East US",
        });
    
        //Create a Linked Service using managed identity and new cluster config
        var exampleFactory = new Azure.DataFactory.Factory("example", new()
        {
            Name = "TestDtaFactory92783401247",
            Location = example.Location,
            ResourceGroupName = example.Name,
            Identity = new Azure.DataFactory.Inputs.FactoryIdentityArgs
            {
                Type = "SystemAssigned",
            },
        });
    
        //Create a databricks instance
        var exampleWorkspace = new Azure.DataBricks.Workspace("example", new()
        {
            Name = "databricks-test",
            ResourceGroupName = example.Name,
            Location = example.Location,
            Sku = "standard",
        });
    
        var msiLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("msi_linked", new()
        {
            Name = "ADBLinkedServiceViaMSI",
            DataFactoryId = exampleFactory.Id,
            Description = "ADB Linked Service via MSI",
            AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
            MsiWorkSpaceResourceId = exampleWorkspace.Id,
            NewClusterConfig = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
            {
                NodeType = "Standard_NC12",
                ClusterVersion = "5.5.x-gpu-scala2.11",
                MinNumberOfWorkers = 1,
                MaxNumberOfWorkers = 5,
                DriverNodeType = "Standard_NC12",
                LogDestination = "dbfs:/logs",
                CustomTags = 
                {
                    { "custom_tag1", "sct_value_1" },
                    { "custom_tag2", "sct_value_2" },
                },
                SparkConfig = 
                {
                    { "config1", "value1" },
                    { "config2", "value2" },
                },
                SparkEnvironmentVariables = 
                {
                    { "envVar1", "value1" },
                    { "envVar2", "value2" },
                },
                InitScripts = new[]
                {
                    "init.sh",
                    "init2.sh",
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azure.core.ResourceGroup;
    import com.pulumi.azure.core.ResourceGroupArgs;
    import com.pulumi.azure.datafactory.Factory;
    import com.pulumi.azure.datafactory.FactoryArgs;
    import com.pulumi.azure.datafactory.inputs.FactoryIdentityArgs;
    import com.pulumi.azure.databricks.Workspace;
    import com.pulumi.azure.databricks.WorkspaceArgs;
    import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
    import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
    import com.pulumi.azure.datafactory.inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new ResourceGroup("example", ResourceGroupArgs.builder()
                .name("example")
                .location("East US")
                .build());
    
            //Create a Linked Service using managed identity and new cluster config
            var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
                .name("TestDtaFactory92783401247")
                .location(example.location())
                .resourceGroupName(example.name())
                .identity(FactoryIdentityArgs.builder()
                    .type("SystemAssigned")
                    .build())
                .build());
    
            //Create a databricks instance
            var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
                .name("databricks-test")
                .resourceGroupName(example.name())
                .location(example.location())
                .sku("standard")
                .build());
    
            var msiLinked = new LinkedServiceAzureDatabricks("msiLinked", LinkedServiceAzureDatabricksArgs.builder()
                .name("ADBLinkedServiceViaMSI")
                .dataFactoryId(exampleFactory.id())
                .description("ADB Linked Service via MSI")
                .adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
                .msiWorkSpaceResourceId(exampleWorkspace.id())
                .newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
                    .nodeType("Standard_NC12")
                    .clusterVersion("5.5.x-gpu-scala2.11")
                    .minNumberOfWorkers(1)
                    .maxNumberOfWorkers(5)
                    .driverNodeType("Standard_NC12")
                    .logDestination("dbfs:/logs")
                    .customTags(Map.ofEntries(
                        Map.entry("custom_tag1", "sct_value_1"),
                        Map.entry("custom_tag2", "sct_value_2")
                    ))
                    .sparkConfig(Map.ofEntries(
                        Map.entry("config1", "value1"),
                        Map.entry("config2", "value2")
                    ))
                    .sparkEnvironmentVariables(Map.ofEntries(
                        Map.entry("envVar1", "value1"),
                        Map.entry("envVar2", "value2")
                    ))
                    .initScripts(                
                        "init.sh",
                        "init2.sh")
                    .build())
                .build());
    
        }
    }
    
    resources:
      example:
        type: azure:core:ResourceGroup
        properties:
          name: example
          location: East US
      #Create a Linked Service using managed identity and new cluster config
      exampleFactory:
        type: azure:datafactory:Factory
        name: example
        properties:
          name: TestDtaFactory92783401247
          location: ${example.location}
          resourceGroupName: ${example.name}
          identity:
            type: SystemAssigned
      #Create a databricks instance
      exampleWorkspace:
        type: azure:databricks:Workspace
        name: example
        properties:
          name: databricks-test
          resourceGroupName: ${example.name}
          location: ${example.location}
          sku: standard
      msiLinked:
        type: azure:datafactory:LinkedServiceAzureDatabricks
        name: msi_linked
        properties:
          name: ADBLinkedServiceViaMSI
          dataFactoryId: ${exampleFactory.id}
          description: ADB Linked Service via MSI
          adbDomain: https://${exampleWorkspace.workspaceUrl}
          msiWorkSpaceResourceId: ${exampleWorkspace.id}
          newClusterConfig:
            nodeType: Standard_NC12
            clusterVersion: 5.5.x-gpu-scala2.11
            minNumberOfWorkers: 1
            maxNumberOfWorkers: 5
            driverNodeType: Standard_NC12
            logDestination: dbfs:/logs
            customTags:
              custom_tag1: sct_value_1
              custom_tag2: sct_value_2
            sparkConfig:
              config1: value1
              config2: value2
            sparkEnvironmentVariables:
              envVar1: value1
              envVar2: value2
            initScripts:
              - init.sh
              - init2.sh
    

    With Access Token & Existing Cluster

    import * as pulumi from "@pulumi/pulumi";
    import * as azure from "@pulumi/azure";
    
    const example = new azure.core.ResourceGroup("example", {
        name: "example",
        location: "East US",
    });
    //Link to an existing cluster via access token
    const exampleFactory = new azure.datafactory.Factory("example", {
        name: "TestDtaFactory92783401247",
        location: example.location,
        resourceGroupName: example.name,
    });
    //Create a databricks instance
    const exampleWorkspace = new azure.databricks.Workspace("example", {
        name: "databricks-test",
        resourceGroupName: example.name,
        location: example.location,
        sku: "standard",
    });
    const atLinked = new azure.datafactory.LinkedServiceAzureDatabricks("at_linked", {
        name: "ADBLinkedServiceViaAccessToken",
        dataFactoryId: exampleFactory.id,
        description: "ADB Linked Service via Access Token",
        existingClusterId: "0308-201146-sly615",
        accessToken: "SomeDatabricksAccessToken",
        adbDomain: pulumi.interpolate`https://${exampleWorkspace.workspaceUrl}`,
    });
    
    import pulumi
    import pulumi_azure as azure
    
    example = azure.core.ResourceGroup("example",
        name="example",
        location="East US")
    #Link to an existing cluster via access token
    example_factory = azure.datafactory.Factory("example",
        name="TestDtaFactory92783401247",
        location=example.location,
        resource_group_name=example.name)
    #Create a databricks instance
    example_workspace = azure.databricks.Workspace("example",
        name="databricks-test",
        resource_group_name=example.name,
        location=example.location,
        sku="standard")
    at_linked = azure.datafactory.LinkedServiceAzureDatabricks("at_linked",
        name="ADBLinkedServiceViaAccessToken",
        data_factory_id=example_factory.id,
        description="ADB Linked Service via Access Token",
        existing_cluster_id="0308-201146-sly615",
        access_token="SomeDatabricksAccessToken",
        adb_domain=example_workspace.workspace_url.apply(lambda workspace_url: f"https://{workspace_url}"))
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
    	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/databricks"
    	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
    			Name:     pulumi.String("example"),
    			Location: pulumi.String("East US"),
    		})
    		if err != nil {
    			return err
    		}
    		// Link to an existing cluster via access token
    		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
    			Name:              pulumi.String("TestDtaFactory92783401247"),
    			Location:          example.Location,
    			ResourceGroupName: example.Name,
    		})
    		if err != nil {
    			return err
    		}
    		// Create a databricks instance
    		exampleWorkspace, err := databricks.NewWorkspace(ctx, "example", &databricks.WorkspaceArgs{
    			Name:              pulumi.String("databricks-test"),
    			ResourceGroupName: example.Name,
    			Location:          example.Location,
    			Sku:               pulumi.String("standard"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = datafactory.NewLinkedServiceAzureDatabricks(ctx, "at_linked", &datafactory.LinkedServiceAzureDatabricksArgs{
    			Name:              pulumi.String("ADBLinkedServiceViaAccessToken"),
    			DataFactoryId:     exampleFactory.ID(),
    			Description:       pulumi.String("ADB Linked Service via Access Token"),
    			ExistingClusterId: pulumi.String("0308-201146-sly615"),
    			AccessToken:       pulumi.String("SomeDatabricksAccessToken"),
    			AdbDomain: exampleWorkspace.WorkspaceUrl.ApplyT(func(workspaceUrl string) (string, error) {
    				return fmt.Sprintf("https://%v", workspaceUrl), nil
    			}).(pulumi.StringOutput),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Azure = Pulumi.Azure;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Azure.Core.ResourceGroup("example", new()
        {
            Name = "example",
            Location = "East US",
        });
    
        //Link to an existing cluster via access token
        var exampleFactory = new Azure.DataFactory.Factory("example", new()
        {
            Name = "TestDtaFactory92783401247",
            Location = example.Location,
            ResourceGroupName = example.Name,
        });
    
        //Create a databricks instance
        var exampleWorkspace = new Azure.DataBricks.Workspace("example", new()
        {
            Name = "databricks-test",
            ResourceGroupName = example.Name,
            Location = example.Location,
            Sku = "standard",
        });
    
        var atLinked = new Azure.DataFactory.LinkedServiceAzureDatabricks("at_linked", new()
        {
            Name = "ADBLinkedServiceViaAccessToken",
            DataFactoryId = exampleFactory.Id,
            Description = "ADB Linked Service via Access Token",
            ExistingClusterId = "0308-201146-sly615",
            AccessToken = "SomeDatabricksAccessToken",
            AdbDomain = exampleWorkspace.WorkspaceUrl.Apply(workspaceUrl => $"https://{workspaceUrl}"),
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azure.core.ResourceGroup;
    import com.pulumi.azure.core.ResourceGroupArgs;
    import com.pulumi.azure.datafactory.Factory;
    import com.pulumi.azure.datafactory.FactoryArgs;
    import com.pulumi.azure.databricks.Workspace;
    import com.pulumi.azure.databricks.WorkspaceArgs;
    import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricks;
    import com.pulumi.azure.datafactory.LinkedServiceAzureDatabricksArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new ResourceGroup("example", ResourceGroupArgs.builder()
                .name("example")
                .location("East US")
                .build());
    
            //Link to an existing cluster via access token
            var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
                .name("TestDtaFactory92783401247")
                .location(example.location())
                .resourceGroupName(example.name())
                .build());
    
            //Create a databricks instance
            var exampleWorkspace = new Workspace("exampleWorkspace", WorkspaceArgs.builder()
                .name("databricks-test")
                .resourceGroupName(example.name())
                .location(example.location())
                .sku("standard")
                .build());
    
            var atLinked = new LinkedServiceAzureDatabricks("atLinked", LinkedServiceAzureDatabricksArgs.builder()
                .name("ADBLinkedServiceViaAccessToken")
                .dataFactoryId(exampleFactory.id())
                .description("ADB Linked Service via Access Token")
                .existingClusterId("0308-201146-sly615")
                .accessToken("SomeDatabricksAccessToken")
                .adbDomain(exampleWorkspace.workspaceUrl().applyValue(workspaceUrl -> String.format("https://%s", workspaceUrl)))
                .build());
    
        }
    }
    
    resources:
      example:
        type: azure:core:ResourceGroup
        properties:
          name: example
          location: East US
      #Link to an existing cluster via access token
      exampleFactory:
        type: azure:datafactory:Factory
        name: example
        properties:
          name: TestDtaFactory92783401247
          location: ${example.location}
          resourceGroupName: ${example.name}
      #Create a databricks instance
      exampleWorkspace:
        type: azure:databricks:Workspace
        name: example
        properties:
          name: databricks-test
          resourceGroupName: ${example.name}
          location: ${example.location}
          sku: standard
      atLinked:
        type: azure:datafactory:LinkedServiceAzureDatabricks
        name: at_linked
        properties:
          name: ADBLinkedServiceViaAccessToken
          dataFactoryId: ${exampleFactory.id}
          description: ADB Linked Service via Access Token
          existingClusterId: 0308-201146-sly615
          accessToken: SomeDatabricksAccessToken
          adbDomain: https://${exampleWorkspace.workspaceUrl}
    

    Create LinkedServiceAzureDatabricks Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new LinkedServiceAzureDatabricks(name: string, args: LinkedServiceAzureDatabricksArgs, opts?: CustomResourceOptions);
    @overload
    def LinkedServiceAzureDatabricks(resource_name: str,
                                     args: LinkedServiceAzureDatabricksArgs,
                                     opts: Optional[ResourceOptions] = None)
    
    @overload
    def LinkedServiceAzureDatabricks(resource_name: str,
                                     opts: Optional[ResourceOptions] = None,
                                     data_factory_id: Optional[str] = None,
                                     adb_domain: Optional[str] = None,
                                     existing_cluster_id: Optional[str] = None,
                                     annotations: Optional[Sequence[str]] = None,
                                     additional_properties: Optional[Mapping[str, str]] = None,
                                     description: Optional[str] = None,
                                     access_token: Optional[str] = None,
                                     instance_pool: Optional[LinkedServiceAzureDatabricksInstancePoolArgs] = None,
                                     integration_runtime_name: Optional[str] = None,
                                     key_vault_password: Optional[LinkedServiceAzureDatabricksKeyVaultPasswordArgs] = None,
                                     msi_work_space_resource_id: Optional[str] = None,
                                     name: Optional[str] = None,
                                     new_cluster_config: Optional[LinkedServiceAzureDatabricksNewClusterConfigArgs] = None,
                                     parameters: Optional[Mapping[str, str]] = None)
    func NewLinkedServiceAzureDatabricks(ctx *Context, name string, args LinkedServiceAzureDatabricksArgs, opts ...ResourceOption) (*LinkedServiceAzureDatabricks, error)
    public LinkedServiceAzureDatabricks(string name, LinkedServiceAzureDatabricksArgs args, CustomResourceOptions? opts = null)
    public LinkedServiceAzureDatabricks(String name, LinkedServiceAzureDatabricksArgs args)
    public LinkedServiceAzureDatabricks(String name, LinkedServiceAzureDatabricksArgs args, CustomResourceOptions options)
    
    type: azure:datafactory:LinkedServiceAzureDatabricks
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args LinkedServiceAzureDatabricksArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args LinkedServiceAzureDatabricksArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args LinkedServiceAzureDatabricksArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args LinkedServiceAzureDatabricksArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args LinkedServiceAzureDatabricksArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var linkedServiceAzureDatabricksResource = new Azure.DataFactory.LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource", new()
    {
        DataFactoryId = "string",
        AdbDomain = "string",
        ExistingClusterId = "string",
        Annotations = new[]
        {
            "string",
        },
        AdditionalProperties = 
        {
            { "string", "string" },
        },
        Description = "string",
        AccessToken = "string",
        InstancePool = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksInstancePoolArgs
        {
            ClusterVersion = "string",
            InstancePoolId = "string",
            MaxNumberOfWorkers = 0,
            MinNumberOfWorkers = 0,
        },
        IntegrationRuntimeName = "string",
        KeyVaultPassword = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksKeyVaultPasswordArgs
        {
            LinkedServiceName = "string",
            SecretName = "string",
        },
        MsiWorkSpaceResourceId = "string",
        Name = "string",
        NewClusterConfig = new Azure.DataFactory.Inputs.LinkedServiceAzureDatabricksNewClusterConfigArgs
        {
            ClusterVersion = "string",
            NodeType = "string",
            CustomTags = 
            {
                { "string", "string" },
            },
            DriverNodeType = "string",
            InitScripts = new[]
            {
                "string",
            },
            LogDestination = "string",
            MaxNumberOfWorkers = 0,
            MinNumberOfWorkers = 0,
            SparkConfig = 
            {
                { "string", "string" },
            },
            SparkEnvironmentVariables = 
            {
                { "string", "string" },
            },
        },
        Parameters = 
        {
            { "string", "string" },
        },
    });
    
    example, err := datafactory.NewLinkedServiceAzureDatabricks(ctx, "linkedServiceAzureDatabricksResource", &datafactory.LinkedServiceAzureDatabricksArgs{
    	DataFactoryId:     pulumi.String("string"),
    	AdbDomain:         pulumi.String("string"),
    	ExistingClusterId: pulumi.String("string"),
    	Annotations: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	AdditionalProperties: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Description: pulumi.String("string"),
    	AccessToken: pulumi.String("string"),
    	InstancePool: &datafactory.LinkedServiceAzureDatabricksInstancePoolArgs{
    		ClusterVersion:     pulumi.String("string"),
    		InstancePoolId:     pulumi.String("string"),
    		MaxNumberOfWorkers: pulumi.Int(0),
    		MinNumberOfWorkers: pulumi.Int(0),
    	},
    	IntegrationRuntimeName: pulumi.String("string"),
    	KeyVaultPassword: &datafactory.LinkedServiceAzureDatabricksKeyVaultPasswordArgs{
    		LinkedServiceName: pulumi.String("string"),
    		SecretName:        pulumi.String("string"),
    	},
    	MsiWorkSpaceResourceId: pulumi.String("string"),
    	Name:                   pulumi.String("string"),
    	NewClusterConfig: &datafactory.LinkedServiceAzureDatabricksNewClusterConfigArgs{
    		ClusterVersion: pulumi.String("string"),
    		NodeType:       pulumi.String("string"),
    		CustomTags: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		DriverNodeType: pulumi.String("string"),
    		InitScripts: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		LogDestination:     pulumi.String("string"),
    		MaxNumberOfWorkers: pulumi.Int(0),
    		MinNumberOfWorkers: pulumi.Int(0),
    		SparkConfig: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		SparkEnvironmentVariables: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    	},
    	Parameters: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    })
    
    var linkedServiceAzureDatabricksResource = new LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource", LinkedServiceAzureDatabricksArgs.builder()
        .dataFactoryId("string")
        .adbDomain("string")
        .existingClusterId("string")
        .annotations("string")
        .additionalProperties(Map.of("string", "string"))
        .description("string")
        .accessToken("string")
        .instancePool(LinkedServiceAzureDatabricksInstancePoolArgs.builder()
            .clusterVersion("string")
            .instancePoolId("string")
            .maxNumberOfWorkers(0)
            .minNumberOfWorkers(0)
            .build())
        .integrationRuntimeName("string")
        .keyVaultPassword(LinkedServiceAzureDatabricksKeyVaultPasswordArgs.builder()
            .linkedServiceName("string")
            .secretName("string")
            .build())
        .msiWorkSpaceResourceId("string")
        .name("string")
        .newClusterConfig(LinkedServiceAzureDatabricksNewClusterConfigArgs.builder()
            .clusterVersion("string")
            .nodeType("string")
            .customTags(Map.of("string", "string"))
            .driverNodeType("string")
            .initScripts("string")
            .logDestination("string")
            .maxNumberOfWorkers(0)
            .minNumberOfWorkers(0)
            .sparkConfig(Map.of("string", "string"))
            .sparkEnvironmentVariables(Map.of("string", "string"))
            .build())
        .parameters(Map.of("string", "string"))
        .build());
    
    linked_service_azure_databricks_resource = azure.datafactory.LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource",
        data_factory_id="string",
        adb_domain="string",
        existing_cluster_id="string",
        annotations=["string"],
        additional_properties={
            "string": "string",
        },
        description="string",
        access_token="string",
        instance_pool={
            "cluster_version": "string",
            "instance_pool_id": "string",
            "max_number_of_workers": 0,
            "min_number_of_workers": 0,
        },
        integration_runtime_name="string",
        key_vault_password={
            "linked_service_name": "string",
            "secret_name": "string",
        },
        msi_work_space_resource_id="string",
        name="string",
        new_cluster_config={
            "cluster_version": "string",
            "node_type": "string",
            "custom_tags": {
                "string": "string",
            },
            "driver_node_type": "string",
            "init_scripts": ["string"],
            "log_destination": "string",
            "max_number_of_workers": 0,
            "min_number_of_workers": 0,
            "spark_config": {
                "string": "string",
            },
            "spark_environment_variables": {
                "string": "string",
            },
        },
        parameters={
            "string": "string",
        })
    
    const linkedServiceAzureDatabricksResource = new azure.datafactory.LinkedServiceAzureDatabricks("linkedServiceAzureDatabricksResource", {
        dataFactoryId: "string",
        adbDomain: "string",
        existingClusterId: "string",
        annotations: ["string"],
        additionalProperties: {
            string: "string",
        },
        description: "string",
        accessToken: "string",
        instancePool: {
            clusterVersion: "string",
            instancePoolId: "string",
            maxNumberOfWorkers: 0,
            minNumberOfWorkers: 0,
        },
        integrationRuntimeName: "string",
        keyVaultPassword: {
            linkedServiceName: "string",
            secretName: "string",
        },
        msiWorkSpaceResourceId: "string",
        name: "string",
        newClusterConfig: {
            clusterVersion: "string",
            nodeType: "string",
            customTags: {
                string: "string",
            },
            driverNodeType: "string",
            initScripts: ["string"],
            logDestination: "string",
            maxNumberOfWorkers: 0,
            minNumberOfWorkers: 0,
            sparkConfig: {
                string: "string",
            },
            sparkEnvironmentVariables: {
                string: "string",
            },
        },
        parameters: {
            string: "string",
        },
    });
    
    type: azure:datafactory:LinkedServiceAzureDatabricks
    properties:
        accessToken: string
        adbDomain: string
        additionalProperties:
            string: string
        annotations:
            - string
        dataFactoryId: string
        description: string
        existingClusterId: string
        instancePool:
            clusterVersion: string
            instancePoolId: string
            maxNumberOfWorkers: 0
            minNumberOfWorkers: 0
        integrationRuntimeName: string
        keyVaultPassword:
            linkedServiceName: string
            secretName: string
        msiWorkSpaceResourceId: string
        name: string
        newClusterConfig:
            clusterVersion: string
            customTags:
                string: string
            driverNodeType: string
            initScripts:
                - string
            logDestination: string
            maxNumberOfWorkers: 0
            minNumberOfWorkers: 0
            nodeType: string
            sparkConfig:
                string: string
            sparkEnvironmentVariables:
                string: string
        parameters:
            string: string
    

    LinkedServiceAzureDatabricks Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The LinkedServiceAzureDatabricks resource accepts the following input properties:

    AdbDomain string
    The domain URL of the databricks instance.
    DataFactoryId string
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    AccessToken string
    Authenticate to ADB via an access token.
    AdditionalProperties Dictionary<string, string>
    A map of additional properties to associate with the Data Factory Linked Service.
    Annotations List<string>
    List of tags that can be used for describing the Data Factory Linked Service.
    Description string
    The description for the Data Factory Linked Service.
    ExistingClusterId string
    The cluster_id of an existing cluster within the linked ADB instance.
    InstancePool LinkedServiceAzureDatabricksInstancePool
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    IntegrationRuntimeName string
    The integration runtime reference to associate with the Data Factory Linked Service.
    KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPassword
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    MsiWorkSpaceResourceId string
    Authenticate to ADB via managed service identity.
    Name string
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfig
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    Parameters Dictionary<string, string>
    A map of parameters to associate with the Data Factory Linked Service.
    AdbDomain string
    The domain URL of the databricks instance.
    DataFactoryId string
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    AccessToken string
    Authenticate to ADB via an access token.
    AdditionalProperties map[string]string
    A map of additional properties to associate with the Data Factory Linked Service.
    Annotations []string
    List of tags that can be used for describing the Data Factory Linked Service.
    Description string
    The description for the Data Factory Linked Service.
    ExistingClusterId string
    The cluster_id of an existing cluster within the linked ADB instance.
    InstancePool LinkedServiceAzureDatabricksInstancePoolArgs
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    IntegrationRuntimeName string
    The integration runtime reference to associate with the Data Factory Linked Service.
    KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    MsiWorkSpaceResourceId string
    Authenticate to ADB via managed service identity.
    Name string
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    Parameters map[string]string
    A map of parameters to associate with the Data Factory Linked Service.
    adbDomain String
    The domain URL of the databricks instance.
    dataFactoryId String
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    accessToken String
    Authenticate to ADB via an access token.
    additionalProperties Map<String,String>
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations List<String>
    List of tags that can be used for describing the Data Factory Linked Service.
    description String
    The description for the Data Factory Linked Service.
    existingClusterId String
    The cluster_id of an existing cluster within the linked ADB instance.
    instancePool LinkedServiceAzureDatabricksInstancePool
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integrationRuntimeName String
    The integration runtime reference to associate with the Data Factory Linked Service.
    keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPassword
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msiWorkSpaceResourceId String
    Authenticate to ADB via managed service identity.
    name String
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    newClusterConfig LinkedServiceAzureDatabricksNewClusterConfig
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters Map<String,String>
    A map of parameters to associate with the Data Factory Linked Service.
    adbDomain string
    The domain URL of the databricks instance.
    dataFactoryId string
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    accessToken string
    Authenticate to ADB via an access token.
    additionalProperties {[key: string]: string}
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations string[]
    List of tags that can be used for describing the Data Factory Linked Service.
    description string
    The description for the Data Factory Linked Service.
    existingClusterId string
    The cluster_id of an existing cluster within the linked ADB instance.
    instancePool LinkedServiceAzureDatabricksInstancePool
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integrationRuntimeName string
    The integration runtime reference to associate with the Data Factory Linked Service.
    keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPassword
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msiWorkSpaceResourceId string
    Authenticate to ADB via managed service identity.
    name string
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    newClusterConfig LinkedServiceAzureDatabricksNewClusterConfig
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters {[key: string]: string}
    A map of parameters to associate with the Data Factory Linked Service.
    adb_domain str
    The domain URL of the databricks instance.
    data_factory_id str
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    access_token str
    Authenticate to ADB via an access token.
    additional_properties Mapping[str, str]
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations Sequence[str]
    List of tags that can be used for describing the Data Factory Linked Service.
    description str
    The description for the Data Factory Linked Service.
    existing_cluster_id str
    The cluster_id of an existing cluster within the linked ADB instance.
    instance_pool LinkedServiceAzureDatabricksInstancePoolArgs
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integration_runtime_name str
    The integration runtime reference to associate with the Data Factory Linked Service.
    key_vault_password LinkedServiceAzureDatabricksKeyVaultPasswordArgs
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msi_work_space_resource_id str
    Authenticate to ADB via managed service identity.
    name str
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    new_cluster_config LinkedServiceAzureDatabricksNewClusterConfigArgs
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters Mapping[str, str]
    A map of parameters to associate with the Data Factory Linked Service.
    adbDomain String
    The domain URL of the databricks instance.
    dataFactoryId String
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    accessToken String
    Authenticate to ADB via an access token.
    additionalProperties Map<String>
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations List<String>
    List of tags that can be used for describing the Data Factory Linked Service.
    description String
    The description for the Data Factory Linked Service.
    existingClusterId String
    The cluster_id of an existing cluster within the linked ADB instance.
    instancePool Property Map
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integrationRuntimeName String
    The integration runtime reference to associate with the Data Factory Linked Service.
    keyVaultPassword Property Map
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msiWorkSpaceResourceId String
    Authenticate to ADB via managed service identity.
    name String
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    newClusterConfig Property Map
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters Map<String>
    A map of parameters to associate with the Data Factory Linked Service.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the LinkedServiceAzureDatabricks resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing LinkedServiceAzureDatabricks Resource

    Get an existing LinkedServiceAzureDatabricks resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: LinkedServiceAzureDatabricksState, opts?: CustomResourceOptions): LinkedServiceAzureDatabricks
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            access_token: Optional[str] = None,
            adb_domain: Optional[str] = None,
            additional_properties: Optional[Mapping[str, str]] = None,
            annotations: Optional[Sequence[str]] = None,
            data_factory_id: Optional[str] = None,
            description: Optional[str] = None,
            existing_cluster_id: Optional[str] = None,
            instance_pool: Optional[LinkedServiceAzureDatabricksInstancePoolArgs] = None,
            integration_runtime_name: Optional[str] = None,
            key_vault_password: Optional[LinkedServiceAzureDatabricksKeyVaultPasswordArgs] = None,
            msi_work_space_resource_id: Optional[str] = None,
            name: Optional[str] = None,
            new_cluster_config: Optional[LinkedServiceAzureDatabricksNewClusterConfigArgs] = None,
            parameters: Optional[Mapping[str, str]] = None) -> LinkedServiceAzureDatabricks
    func GetLinkedServiceAzureDatabricks(ctx *Context, name string, id IDInput, state *LinkedServiceAzureDatabricksState, opts ...ResourceOption) (*LinkedServiceAzureDatabricks, error)
    public static LinkedServiceAzureDatabricks Get(string name, Input<string> id, LinkedServiceAzureDatabricksState? state, CustomResourceOptions? opts = null)
    public static LinkedServiceAzureDatabricks get(String name, Output<String> id, LinkedServiceAzureDatabricksState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AccessToken string
    Authenticate to ADB via an access token.
    AdbDomain string
    The domain URL of the databricks instance.
    AdditionalProperties Dictionary<string, string>
    A map of additional properties to associate with the Data Factory Linked Service.
    Annotations List<string>
    List of tags that can be used for describing the Data Factory Linked Service.
    DataFactoryId string
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    Description string
    The description for the Data Factory Linked Service.
    ExistingClusterId string
    The cluster_id of an existing cluster within the linked ADB instance.
    InstancePool LinkedServiceAzureDatabricksInstancePool
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    IntegrationRuntimeName string
    The integration runtime reference to associate with the Data Factory Linked Service.
    KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPassword
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    MsiWorkSpaceResourceId string
    Authenticate to ADB via managed service identity.
    Name string
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfig
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    Parameters Dictionary<string, string>
    A map of parameters to associate with the Data Factory Linked Service.
    AccessToken string
    Authenticate to ADB via an access token.
    AdbDomain string
    The domain URL of the databricks instance.
    AdditionalProperties map[string]string
    A map of additional properties to associate with the Data Factory Linked Service.
    Annotations []string
    List of tags that can be used for describing the Data Factory Linked Service.
    DataFactoryId string
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    Description string
    The description for the Data Factory Linked Service.
    ExistingClusterId string
    The cluster_id of an existing cluster within the linked ADB instance.
    InstancePool LinkedServiceAzureDatabricksInstancePoolArgs
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    IntegrationRuntimeName string
    The integration runtime reference to associate with the Data Factory Linked Service.
    KeyVaultPassword LinkedServiceAzureDatabricksKeyVaultPasswordArgs
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    MsiWorkSpaceResourceId string
    Authenticate to ADB via managed service identity.
    Name string
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    NewClusterConfig LinkedServiceAzureDatabricksNewClusterConfigArgs
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    Parameters map[string]string
    A map of parameters to associate with the Data Factory Linked Service.
    accessToken String
    Authenticate to ADB via an access token.
    adbDomain String
    The domain URL of the databricks instance.
    additionalProperties Map<String,String>
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations List<String>
    List of tags that can be used for describing the Data Factory Linked Service.
    dataFactoryId String
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    description String
    The description for the Data Factory Linked Service.
    existingClusterId String
    The cluster_id of an existing cluster within the linked ADB instance.
    instancePool LinkedServiceAzureDatabricksInstancePool
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integrationRuntimeName String
    The integration runtime reference to associate with the Data Factory Linked Service.
    keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPassword
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msiWorkSpaceResourceId String
    Authenticate to ADB via managed service identity.
    name String
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    newClusterConfig LinkedServiceAzureDatabricksNewClusterConfig
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters Map<String,String>
    A map of parameters to associate with the Data Factory Linked Service.
    accessToken string
    Authenticate to ADB via an access token.
    adbDomain string
    The domain URL of the databricks instance.
    additionalProperties {[key: string]: string}
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations string[]
    List of tags that can be used for describing the Data Factory Linked Service.
    dataFactoryId string
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    description string
    The description for the Data Factory Linked Service.
    existingClusterId string
    The cluster_id of an existing cluster within the linked ADB instance.
    instancePool LinkedServiceAzureDatabricksInstancePool
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integrationRuntimeName string
    The integration runtime reference to associate with the Data Factory Linked Service.
    keyVaultPassword LinkedServiceAzureDatabricksKeyVaultPassword
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msiWorkSpaceResourceId string
    Authenticate to ADB via managed service identity.
    name string
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    newClusterConfig LinkedServiceAzureDatabricksNewClusterConfig
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters {[key: string]: string}
    A map of parameters to associate with the Data Factory Linked Service.
    access_token str
    Authenticate to ADB via an access token.
    adb_domain str
    The domain URL of the databricks instance.
    additional_properties Mapping[str, str]
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations Sequence[str]
    List of tags that can be used for describing the Data Factory Linked Service.
    data_factory_id str
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    description str
    The description for the Data Factory Linked Service.
    existing_cluster_id str
    The cluster_id of an existing cluster within the linked ADB instance.
    instance_pool LinkedServiceAzureDatabricksInstancePoolArgs
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integration_runtime_name str
    The integration runtime reference to associate with the Data Factory Linked Service.
    key_vault_password LinkedServiceAzureDatabricksKeyVaultPasswordArgs
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msi_work_space_resource_id str
    Authenticate to ADB via managed service identity.
    name str
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    new_cluster_config LinkedServiceAzureDatabricksNewClusterConfigArgs
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters Mapping[str, str]
    A map of parameters to associate with the Data Factory Linked Service.
    accessToken String
    Authenticate to ADB via an access token.
    adbDomain String
    The domain URL of the databricks instance.
    additionalProperties Map<String>
    A map of additional properties to associate with the Data Factory Linked Service.
    annotations List<String>
    List of tags that can be used for describing the Data Factory Linked Service.
    dataFactoryId String
    The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
    description String
    The description for the Data Factory Linked Service.
    existingClusterId String
    The cluster_id of an existing cluster within the linked ADB instance.
    instancePool Property Map
    Leverages an instance pool within the linked ADB instance as one instance_pool block defined below.
    integrationRuntimeName String
    The integration runtime reference to associate with the Data Factory Linked Service.
    keyVaultPassword Property Map
    Authenticate to ADB via Azure Key Vault Linked Service as defined in the key_vault_password block below.
    msiWorkSpaceResourceId String
    Authenticate to ADB via managed service identity.
    name String
    Specifies the name of the Data Factory Linked Service. Changing this forces a new resource to be created. Must be unique within a data factory. See the Microsoft documentation for all restrictions.
    newClusterConfig Property Map
    Creates new clusters within the linked ADB instance as defined in the new_cluster_config block below.
    parameters Map<String>
    A map of parameters to associate with the Data Factory Linked Service.

    Supporting Types

    LinkedServiceAzureDatabricksInstancePool, LinkedServiceAzureDatabricksInstancePoolArgs

    ClusterVersion string
    Spark version of a the cluster.
    InstancePoolId string
    Identifier of the instance pool within the linked ADB instance.
    MaxNumberOfWorkers int
    The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.
    MinNumberOfWorkers int
    The minimum number of worker nodes. Defaults to 1.
    ClusterVersion string
    Spark version of a the cluster.
    InstancePoolId string
    Identifier of the instance pool within the linked ADB instance.
    MaxNumberOfWorkers int
    The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.
    MinNumberOfWorkers int
    The minimum number of worker nodes. Defaults to 1.
    clusterVersion String
    Spark version of a the cluster.
    instancePoolId String
    Identifier of the instance pool within the linked ADB instance.
    maxNumberOfWorkers Integer
    The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.
    minNumberOfWorkers Integer
    The minimum number of worker nodes. Defaults to 1.
    clusterVersion string
    Spark version of a the cluster.
    instancePoolId string
    Identifier of the instance pool within the linked ADB instance.
    maxNumberOfWorkers number
    The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.
    minNumberOfWorkers number
    The minimum number of worker nodes. Defaults to 1.
    cluster_version str
    Spark version of a the cluster.
    instance_pool_id str
    Identifier of the instance pool within the linked ADB instance.
    max_number_of_workers int
    The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.
    min_number_of_workers int
    The minimum number of worker nodes. Defaults to 1.
    clusterVersion String
    Spark version of a the cluster.
    instancePoolId String
    Identifier of the instance pool within the linked ADB instance.
    maxNumberOfWorkers Number
    The max number of worker nodes. Set this value if you want to enable autoscaling between the min_number_of_workers and this value. Omit this value to use a fixed number of workers defined in the min_number_of_workers property.
    minNumberOfWorkers Number
    The minimum number of worker nodes. Defaults to 1.

    LinkedServiceAzureDatabricksKeyVaultPassword, LinkedServiceAzureDatabricksKeyVaultPasswordArgs

    LinkedServiceName string
    Specifies the name of an existing Key Vault Data Factory Linked Service.
    SecretName string
    Specifies the secret name in Azure Key Vault that stores ADB access token.
    LinkedServiceName string
    Specifies the name of an existing Key Vault Data Factory Linked Service.
    SecretName string
    Specifies the secret name in Azure Key Vault that stores ADB access token.
    linkedServiceName String
    Specifies the name of an existing Key Vault Data Factory Linked Service.
    secretName String
    Specifies the secret name in Azure Key Vault that stores ADB access token.
    linkedServiceName string
    Specifies the name of an existing Key Vault Data Factory Linked Service.
    secretName string
    Specifies the secret name in Azure Key Vault that stores ADB access token.
    linked_service_name str
    Specifies the name of an existing Key Vault Data Factory Linked Service.
    secret_name str
    Specifies the secret name in Azure Key Vault that stores ADB access token.
    linkedServiceName String
    Specifies the name of an existing Key Vault Data Factory Linked Service.
    secretName String
    Specifies the secret name in Azure Key Vault that stores ADB access token.

    LinkedServiceAzureDatabricksNewClusterConfig, LinkedServiceAzureDatabricksNewClusterConfigArgs

    ClusterVersion string
    Spark version of a the cluster.
    NodeType string
    Node type for the new cluster.
    CustomTags Dictionary<string, string>
    Tags for the cluster resource.
    DriverNodeType string
    Driver node type for the cluster.
    InitScripts List<string>
    User defined initialization scripts for the cluster.
    LogDestination string
    Location to deliver Spark driver, worker, and event logs.
    MaxNumberOfWorkers int
    Specifies the maximum number of worker nodes. It should be between 1 and 25000.
    MinNumberOfWorkers int
    Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to 1.
    SparkConfig Dictionary<string, string>
    User-specified Spark configuration variables key-value pairs.
    SparkEnvironmentVariables Dictionary<string, string>
    User-specified Spark environment variables key-value pairs.
    ClusterVersion string
    Spark version of a the cluster.
    NodeType string
    Node type for the new cluster.
    CustomTags map[string]string
    Tags for the cluster resource.
    DriverNodeType string
    Driver node type for the cluster.
    InitScripts []string
    User defined initialization scripts for the cluster.
    LogDestination string
    Location to deliver Spark driver, worker, and event logs.
    MaxNumberOfWorkers int
    Specifies the maximum number of worker nodes. It should be between 1 and 25000.
    MinNumberOfWorkers int
    Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to 1.
    SparkConfig map[string]string
    User-specified Spark configuration variables key-value pairs.
    SparkEnvironmentVariables map[string]string
    User-specified Spark environment variables key-value pairs.
    clusterVersion String
    Spark version of a the cluster.
    nodeType String
    Node type for the new cluster.
    customTags Map<String,String>
    Tags for the cluster resource.
    driverNodeType String
    Driver node type for the cluster.
    initScripts List<String>
    User defined initialization scripts for the cluster.
    logDestination String
    Location to deliver Spark driver, worker, and event logs.
    maxNumberOfWorkers Integer
    Specifies the maximum number of worker nodes. It should be between 1 and 25000.
    minNumberOfWorkers Integer
    Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to 1.
    sparkConfig Map<String,String>
    User-specified Spark configuration variables key-value pairs.
    sparkEnvironmentVariables Map<String,String>
    User-specified Spark environment variables key-value pairs.
    clusterVersion string
    Spark version of a the cluster.
    nodeType string
    Node type for the new cluster.
    customTags {[key: string]: string}
    Tags for the cluster resource.
    driverNodeType string
    Driver node type for the cluster.
    initScripts string[]
    User defined initialization scripts for the cluster.
    logDestination string
    Location to deliver Spark driver, worker, and event logs.
    maxNumberOfWorkers number
    Specifies the maximum number of worker nodes. It should be between 1 and 25000.
    minNumberOfWorkers number
    Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to 1.
    sparkConfig {[key: string]: string}
    User-specified Spark configuration variables key-value pairs.
    sparkEnvironmentVariables {[key: string]: string}
    User-specified Spark environment variables key-value pairs.
    cluster_version str
    Spark version of a the cluster.
    node_type str
    Node type for the new cluster.
    custom_tags Mapping[str, str]
    Tags for the cluster resource.
    driver_node_type str
    Driver node type for the cluster.
    init_scripts Sequence[str]
    User defined initialization scripts for the cluster.
    log_destination str
    Location to deliver Spark driver, worker, and event logs.
    max_number_of_workers int
    Specifies the maximum number of worker nodes. It should be between 1 and 25000.
    min_number_of_workers int
    Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to 1.
    spark_config Mapping[str, str]
    User-specified Spark configuration variables key-value pairs.
    spark_environment_variables Mapping[str, str]
    User-specified Spark environment variables key-value pairs.
    clusterVersion String
    Spark version of a the cluster.
    nodeType String
    Node type for the new cluster.
    customTags Map<String>
    Tags for the cluster resource.
    driverNodeType String
    Driver node type for the cluster.
    initScripts List<String>
    User defined initialization scripts for the cluster.
    logDestination String
    Location to deliver Spark driver, worker, and event logs.
    maxNumberOfWorkers Number
    Specifies the maximum number of worker nodes. It should be between 1 and 25000.
    minNumberOfWorkers Number
    Specifies the minimum number of worker nodes. It should be between 1 and 25000. It defaults to 1.
    sparkConfig Map<String>
    User-specified Spark configuration variables key-value pairs.
    sparkEnvironmentVariables Map<String>
    User-specified Spark environment variables key-value pairs.

    Import

    Data Factory Linked Services can be imported using the resource id, e.g.

    $ pulumi import azure:datafactory/linkedServiceAzureDatabricks:LinkedServiceAzureDatabricks example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/linkedservices/example
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Azure Classic pulumi/pulumi-azure
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the azurerm Terraform Provider.
    azure logo

    We recommend using Azure Native.

    Azure v6.10.0 published on Tuesday, Nov 19, 2024 by Pulumi