1. Packages
  2. Databricks
  3. API Docs
  4. StorageCredential
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

databricks.StorageCredential

Explore with Pulumi AI

databricks logo
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

    This resource can be used with an account or workspace-level provider.

    To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:

    • databricks.StorageCredential represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.
    • databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.

    Example Usage

    For AWS

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const external = new databricks.StorageCredential("external", {
        name: externalDataAccess.name,
        awsIamRole: {
            roleArn: externalDataAccess.arn,
        },
        comment: "Managed by TF",
    });
    const externalCreds = new databricks.Grants("external_creds", {
        storageCredential: external.id,
        grants: [{
            principal: "Data Engineers",
            privileges: ["CREATE_EXTERNAL_TABLE"],
        }],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    external = databricks.StorageCredential("external",
        name=external_data_access["name"],
        aws_iam_role={
            "role_arn": external_data_access["arn"],
        },
        comment="Managed by TF")
    external_creds = databricks.Grants("external_creds",
        storage_credential=external.id,
        grants=[{
            "principal": "Data Engineers",
            "privileges": ["CREATE_EXTERNAL_TABLE"],
        }])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
    			Name: pulumi.Any(externalDataAccess.Name),
    			AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
    				RoleArn: pulumi.Any(externalDataAccess.Arn),
    			},
    			Comment: pulumi.String("Managed by TF"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
    			StorageCredential: external.ID(),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var external = new Databricks.StorageCredential("external", new()
        {
            Name = externalDataAccess.Name,
            AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
            {
                RoleArn = externalDataAccess.Arn,
            },
            Comment = "Managed by TF",
        });
    
        var externalCreds = new Databricks.Grants("external_creds", new()
        {
            StorageCredential = external.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var external = new StorageCredential("external", StorageCredentialArgs.builder()
                .name(externalDataAccess.name())
                .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
                    .roleArn(externalDataAccess.arn())
                    .build())
                .comment("Managed by TF")
                .build());
    
            var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
                .storageCredential(external.id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges("CREATE_EXTERNAL_TABLE")
                    .build())
                .build());
    
        }
    }
    
    resources:
      external:
        type: databricks:StorageCredential
        properties:
          name: ${externalDataAccess.name}
          awsIamRole:
            roleArn: ${externalDataAccess.arn}
          comment: Managed by TF
      externalCreds:
        type: databricks:Grants
        name: external_creds
        properties:
          storageCredential: ${external.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
    

    For Azure

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const externalMi = new databricks.StorageCredential("external_mi", {
        name: "mi_credential",
        azureManagedIdentity: {
            accessConnectorId: example.id,
        },
        comment: "Managed identity credential managed by TF",
    });
    const externalCreds = new databricks.Grants("external_creds", {
        storageCredential: external.id,
        grants: [{
            principal: "Data Engineers",
            privileges: ["CREATE_EXTERNAL_TABLE"],
        }],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    external_mi = databricks.StorageCredential("external_mi",
        name="mi_credential",
        azure_managed_identity={
            "access_connector_id": example["id"],
        },
        comment="Managed identity credential managed by TF")
    external_creds = databricks.Grants("external_creds",
        storage_credential=external["id"],
        grants=[{
            "principal": "Data Engineers",
            "privileges": ["CREATE_EXTERNAL_TABLE"],
        }])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewStorageCredential(ctx, "external_mi", &databricks.StorageCredentialArgs{
    			Name: pulumi.String("mi_credential"),
    			AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
    				AccessConnectorId: pulumi.Any(example.Id),
    			},
    			Comment: pulumi.String("Managed identity credential managed by TF"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
    			StorageCredential: pulumi.Any(external.Id),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var externalMi = new Databricks.StorageCredential("external_mi", new()
        {
            Name = "mi_credential",
            AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
            {
                AccessConnectorId = example.Id,
            },
            Comment = "Managed identity credential managed by TF",
        });
    
        var externalCreds = new Databricks.Grants("external_creds", new()
        {
            StorageCredential = external.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialAzureManagedIdentityArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var externalMi = new StorageCredential("externalMi", StorageCredentialArgs.builder()
                .name("mi_credential")
                .azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
                    .accessConnectorId(example.id())
                    .build())
                .comment("Managed identity credential managed by TF")
                .build());
    
            var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
                .storageCredential(external.id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges("CREATE_EXTERNAL_TABLE")
                    .build())
                .build());
    
        }
    }
    
    resources:
      externalMi:
        type: databricks:StorageCredential
        name: external_mi
        properties:
          name: mi_credential
          azureManagedIdentity:
            accessConnectorId: ${example.id}
          comment: Managed identity credential managed by TF
      externalCreds:
        type: databricks:Grants
        name: external_creds
        properties:
          storageCredential: ${external.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
    

    For GCP

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const external = new databricks.StorageCredential("external", {
        name: "the-creds",
        databricksGcpServiceAccount: {},
    });
    const externalCreds = new databricks.Grants("external_creds", {
        storageCredential: external.id,
        grants: [{
            principal: "Data Engineers",
            privileges: ["CREATE_EXTERNAL_TABLE"],
        }],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    external = databricks.StorageCredential("external",
        name="the-creds",
        databricks_gcp_service_account={})
    external_creds = databricks.Grants("external_creds",
        storage_credential=external.id,
        grants=[{
            "principal": "Data Engineers",
            "privileges": ["CREATE_EXTERNAL_TABLE"],
        }])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
    			Name:                        pulumi.String("the-creds"),
    			DatabricksGcpServiceAccount: &databricks.StorageCredentialDatabricksGcpServiceAccountArgs{},
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
    			StorageCredential: external.ID(),
    			Grants: databricks.GrantsGrantArray{
    				&databricks.GrantsGrantArgs{
    					Principal: pulumi.String("Data Engineers"),
    					Privileges: pulumi.StringArray{
    						pulumi.String("CREATE_EXTERNAL_TABLE"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var external = new Databricks.StorageCredential("external", new()
        {
            Name = "the-creds",
            DatabricksGcpServiceAccount = null,
        });
    
        var externalCreds = new Databricks.Grants("external_creds", new()
        {
            StorageCredential = external.Id,
            GrantDetails = new[]
            {
                new Databricks.Inputs.GrantsGrantArgs
                {
                    Principal = "Data Engineers",
                    Privileges = new[]
                    {
                        "CREATE_EXTERNAL_TABLE",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.StorageCredential;
    import com.pulumi.databricks.StorageCredentialArgs;
    import com.pulumi.databricks.inputs.StorageCredentialDatabricksGcpServiceAccountArgs;
    import com.pulumi.databricks.Grants;
    import com.pulumi.databricks.GrantsArgs;
    import com.pulumi.databricks.inputs.GrantsGrantArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var external = new StorageCredential("external", StorageCredentialArgs.builder()
                .name("the-creds")
                .databricksGcpServiceAccount()
                .build());
    
            var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
                .storageCredential(external.id())
                .grants(GrantsGrantArgs.builder()
                    .principal("Data Engineers")
                    .privileges("CREATE_EXTERNAL_TABLE")
                    .build())
                .build());
    
        }
    }
    
    resources:
      external:
        type: databricks:StorageCredential
        properties:
          name: the-creds
          databricksGcpServiceAccount: {}
      externalCreds:
        type: databricks:Grants
        name: external_creds
        properties:
          storageCredential: ${external.id}
          grants:
            - principal: Data Engineers
              privileges:
                - CREATE_EXTERNAL_TABLE
    

    Create StorageCredential Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);
    @overload
    def StorageCredential(resource_name: str,
                          args: Optional[StorageCredentialArgs] = None,
                          opts: Optional[ResourceOptions] = None)
    
    @overload
    def StorageCredential(resource_name: str,
                          opts: Optional[ResourceOptions] = None,
                          aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
                          azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
                          azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
                          cloudflare_api_token: Optional[StorageCredentialCloudflareApiTokenArgs] = None,
                          comment: Optional[str] = None,
                          databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
                          force_destroy: Optional[bool] = None,
                          force_update: Optional[bool] = None,
                          gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
                          isolation_mode: Optional[str] = None,
                          metastore_id: Optional[str] = None,
                          name: Optional[str] = None,
                          owner: Optional[str] = None,
                          read_only: Optional[bool] = None,
                          skip_validation: Optional[bool] = None)
    func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)
    public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
    public StorageCredential(String name, StorageCredentialArgs args)
    public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
    
    type: databricks:StorageCredential
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args StorageCredentialArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var storageCredentialResource = new Databricks.StorageCredential("storageCredentialResource", new()
    {
        AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
        {
            RoleArn = "string",
            ExternalId = "string",
            UnityCatalogIamArn = "string",
        },
        AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
        {
            AccessConnectorId = "string",
            CredentialId = "string",
            ManagedIdentityId = "string",
        },
        AzureServicePrincipal = new Databricks.Inputs.StorageCredentialAzureServicePrincipalArgs
        {
            ApplicationId = "string",
            ClientSecret = "string",
            DirectoryId = "string",
        },
        CloudflareApiToken = new Databricks.Inputs.StorageCredentialCloudflareApiTokenArgs
        {
            AccessKeyId = "string",
            AccountId = "string",
            SecretAccessKey = "string",
        },
        Comment = "string",
        DatabricksGcpServiceAccount = new Databricks.Inputs.StorageCredentialDatabricksGcpServiceAccountArgs
        {
            CredentialId = "string",
            Email = "string",
        },
        ForceDestroy = false,
        ForceUpdate = false,
        GcpServiceAccountKey = new Databricks.Inputs.StorageCredentialGcpServiceAccountKeyArgs
        {
            Email = "string",
            PrivateKey = "string",
            PrivateKeyId = "string",
        },
        IsolationMode = "string",
        MetastoreId = "string",
        Name = "string",
        Owner = "string",
        ReadOnly = false,
        SkipValidation = false,
    });
    
    example, err := databricks.NewStorageCredential(ctx, "storageCredentialResource", &databricks.StorageCredentialArgs{
    	AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
    		RoleArn:            pulumi.String("string"),
    		ExternalId:         pulumi.String("string"),
    		UnityCatalogIamArn: pulumi.String("string"),
    	},
    	AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
    		AccessConnectorId: pulumi.String("string"),
    		CredentialId:      pulumi.String("string"),
    		ManagedIdentityId: pulumi.String("string"),
    	},
    	AzureServicePrincipal: &databricks.StorageCredentialAzureServicePrincipalArgs{
    		ApplicationId: pulumi.String("string"),
    		ClientSecret:  pulumi.String("string"),
    		DirectoryId:   pulumi.String("string"),
    	},
    	CloudflareApiToken: &databricks.StorageCredentialCloudflareApiTokenArgs{
    		AccessKeyId:     pulumi.String("string"),
    		AccountId:       pulumi.String("string"),
    		SecretAccessKey: pulumi.String("string"),
    	},
    	Comment: pulumi.String("string"),
    	DatabricksGcpServiceAccount: &databricks.StorageCredentialDatabricksGcpServiceAccountArgs{
    		CredentialId: pulumi.String("string"),
    		Email:        pulumi.String("string"),
    	},
    	ForceDestroy: pulumi.Bool(false),
    	ForceUpdate:  pulumi.Bool(false),
    	GcpServiceAccountKey: &databricks.StorageCredentialGcpServiceAccountKeyArgs{
    		Email:        pulumi.String("string"),
    		PrivateKey:   pulumi.String("string"),
    		PrivateKeyId: pulumi.String("string"),
    	},
    	IsolationMode:  pulumi.String("string"),
    	MetastoreId:    pulumi.String("string"),
    	Name:           pulumi.String("string"),
    	Owner:          pulumi.String("string"),
    	ReadOnly:       pulumi.Bool(false),
    	SkipValidation: pulumi.Bool(false),
    })
    
    var storageCredentialResource = new StorageCredential("storageCredentialResource", StorageCredentialArgs.builder()
        .awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
            .roleArn("string")
            .externalId("string")
            .unityCatalogIamArn("string")
            .build())
        .azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
            .accessConnectorId("string")
            .credentialId("string")
            .managedIdentityId("string")
            .build())
        .azureServicePrincipal(StorageCredentialAzureServicePrincipalArgs.builder()
            .applicationId("string")
            .clientSecret("string")
            .directoryId("string")
            .build())
        .cloudflareApiToken(StorageCredentialCloudflareApiTokenArgs.builder()
            .accessKeyId("string")
            .accountId("string")
            .secretAccessKey("string")
            .build())
        .comment("string")
        .databricksGcpServiceAccount(StorageCredentialDatabricksGcpServiceAccountArgs.builder()
            .credentialId("string")
            .email("string")
            .build())
        .forceDestroy(false)
        .forceUpdate(false)
        .gcpServiceAccountKey(StorageCredentialGcpServiceAccountKeyArgs.builder()
            .email("string")
            .privateKey("string")
            .privateKeyId("string")
            .build())
        .isolationMode("string")
        .metastoreId("string")
        .name("string")
        .owner("string")
        .readOnly(false)
        .skipValidation(false)
        .build());
    
    storage_credential_resource = databricks.StorageCredential("storageCredentialResource",
        aws_iam_role={
            "role_arn": "string",
            "external_id": "string",
            "unity_catalog_iam_arn": "string",
        },
        azure_managed_identity={
            "access_connector_id": "string",
            "credential_id": "string",
            "managed_identity_id": "string",
        },
        azure_service_principal={
            "application_id": "string",
            "client_secret": "string",
            "directory_id": "string",
        },
        cloudflare_api_token={
            "access_key_id": "string",
            "account_id": "string",
            "secret_access_key": "string",
        },
        comment="string",
        databricks_gcp_service_account={
            "credential_id": "string",
            "email": "string",
        },
        force_destroy=False,
        force_update=False,
        gcp_service_account_key={
            "email": "string",
            "private_key": "string",
            "private_key_id": "string",
        },
        isolation_mode="string",
        metastore_id="string",
        name="string",
        owner="string",
        read_only=False,
        skip_validation=False)
    
    const storageCredentialResource = new databricks.StorageCredential("storageCredentialResource", {
        awsIamRole: {
            roleArn: "string",
            externalId: "string",
            unityCatalogIamArn: "string",
        },
        azureManagedIdentity: {
            accessConnectorId: "string",
            credentialId: "string",
            managedIdentityId: "string",
        },
        azureServicePrincipal: {
            applicationId: "string",
            clientSecret: "string",
            directoryId: "string",
        },
        cloudflareApiToken: {
            accessKeyId: "string",
            accountId: "string",
            secretAccessKey: "string",
        },
        comment: "string",
        databricksGcpServiceAccount: {
            credentialId: "string",
            email: "string",
        },
        forceDestroy: false,
        forceUpdate: false,
        gcpServiceAccountKey: {
            email: "string",
            privateKey: "string",
            privateKeyId: "string",
        },
        isolationMode: "string",
        metastoreId: "string",
        name: "string",
        owner: "string",
        readOnly: false,
        skipValidation: false,
    });
    
    type: databricks:StorageCredential
    properties:
        awsIamRole:
            externalId: string
            roleArn: string
            unityCatalogIamArn: string
        azureManagedIdentity:
            accessConnectorId: string
            credentialId: string
            managedIdentityId: string
        azureServicePrincipal:
            applicationId: string
            clientSecret: string
            directoryId: string
        cloudflareApiToken:
            accessKeyId: string
            accountId: string
            secretAccessKey: string
        comment: string
        databricksGcpServiceAccount:
            credentialId: string
            email: string
        forceDestroy: false
        forceUpdate: false
        gcpServiceAccountKey:
            email: string
            privateKey: string
            privateKeyId: string
        isolationMode: string
        metastoreId: string
        name: string
        owner: string
        readOnly: false
        skipValidation: false
    

    StorageCredential Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The StorageCredential resource accepts the following input properties:

    AwsIamRole StorageCredentialAwsIamRole
    AzureManagedIdentity StorageCredentialAzureManagedIdentity
    AzureServicePrincipal StorageCredentialAzureServicePrincipal
    CloudflareApiToken StorageCredentialCloudflareApiToken
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool
    Update storage credential regardless of its dependents.
    GcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    IsolationMode string

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    AwsIamRole StorageCredentialAwsIamRoleArgs
    AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
    AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
    CloudflareApiToken StorageCredentialCloudflareApiTokenArgs
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool
    Update storage credential regardless of its dependents.
    GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
    IsolationMode string

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    cloudflareApiToken StorageCredentialCloudflareApiToken
    comment String
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean
    Update storage credential regardless of its dependents.
    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    isolationMode String

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    cloudflareApiToken StorageCredentialCloudflareApiToken
    comment string
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate boolean
    Update storage credential regardless of its dependents.
    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    isolationMode string

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner string
    Username/groupname/sp application_id of the storage credential owner.
    readOnly boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation boolean
    Suppress validation errors if any & force save the storage credential.
    aws_iam_role StorageCredentialAwsIamRoleArgs
    azure_managed_identity StorageCredentialAzureManagedIdentityArgs
    azure_service_principal StorageCredentialAzureServicePrincipalArgs
    cloudflare_api_token StorageCredentialCloudflareApiTokenArgs
    comment str
    databricks_gcp_service_account StorageCredentialDatabricksGcpServiceAccountArgs
    force_destroy bool
    Delete storage credential regardless of its dependencies.
    force_update bool
    Update storage credential regardless of its dependents.
    gcp_service_account_key StorageCredentialGcpServiceAccountKeyArgs
    isolation_mode str

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastore_id str
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name str
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner str
    Username/groupname/sp application_id of the storage credential owner.
    read_only bool
    Indicates whether the storage credential is only usable for read operations.
    skip_validation bool
    Suppress validation errors if any & force save the storage credential.
    awsIamRole Property Map
    azureManagedIdentity Property Map
    azureServicePrincipal Property Map
    cloudflareApiToken Property Map
    comment String
    databricksGcpServiceAccount Property Map
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean
    Update storage credential regardless of its dependents.
    gcpServiceAccountKey Property Map
    isolationMode String

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    StorageCredentialId string
    Unique ID of storage credential.
    Id string
    The provider-assigned unique ID for this managed resource.
    StorageCredentialId string
    Unique ID of storage credential.
    id String
    The provider-assigned unique ID for this managed resource.
    storageCredentialId String
    Unique ID of storage credential.
    id string
    The provider-assigned unique ID for this managed resource.
    storageCredentialId string
    Unique ID of storage credential.
    id str
    The provider-assigned unique ID for this managed resource.
    storage_credential_id str
    Unique ID of storage credential.
    id String
    The provider-assigned unique ID for this managed resource.
    storageCredentialId String
    Unique ID of storage credential.

    Look up Existing StorageCredential Resource

    Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
            azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
            azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
            cloudflare_api_token: Optional[StorageCredentialCloudflareApiTokenArgs] = None,
            comment: Optional[str] = None,
            databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
            force_destroy: Optional[bool] = None,
            force_update: Optional[bool] = None,
            gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
            isolation_mode: Optional[str] = None,
            metastore_id: Optional[str] = None,
            name: Optional[str] = None,
            owner: Optional[str] = None,
            read_only: Optional[bool] = None,
            skip_validation: Optional[bool] = None,
            storage_credential_id: Optional[str] = None) -> StorageCredential
    func GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)
    public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)
    public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AwsIamRole StorageCredentialAwsIamRole
    AzureManagedIdentity StorageCredentialAzureManagedIdentity
    AzureServicePrincipal StorageCredentialAzureServicePrincipal
    CloudflareApiToken StorageCredentialCloudflareApiToken
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool
    Update storage credential regardless of its dependents.
    GcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    IsolationMode string

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    StorageCredentialId string
    Unique ID of storage credential.
    AwsIamRole StorageCredentialAwsIamRoleArgs
    AzureManagedIdentity StorageCredentialAzureManagedIdentityArgs
    AzureServicePrincipal StorageCredentialAzureServicePrincipalArgs
    CloudflareApiToken StorageCredentialCloudflareApiTokenArgs
    Comment string
    DatabricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccountArgs
    ForceDestroy bool
    Delete storage credential regardless of its dependencies.
    ForceUpdate bool
    Update storage credential regardless of its dependents.
    GcpServiceAccountKey StorageCredentialGcpServiceAccountKeyArgs
    IsolationMode string

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    MetastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    Name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    Owner string
    Username/groupname/sp application_id of the storage credential owner.
    ReadOnly bool
    Indicates whether the storage credential is only usable for read operations.
    SkipValidation bool
    Suppress validation errors if any & force save the storage credential.
    StorageCredentialId string
    Unique ID of storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    cloudflareApiToken StorageCredentialCloudflareApiToken
    comment String
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean
    Update storage credential regardless of its dependents.
    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    isolationMode String

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.
    storageCredentialId String
    Unique ID of storage credential.
    awsIamRole StorageCredentialAwsIamRole
    azureManagedIdentity StorageCredentialAzureManagedIdentity
    azureServicePrincipal StorageCredentialAzureServicePrincipal
    cloudflareApiToken StorageCredentialCloudflareApiToken
    comment string
    databricksGcpServiceAccount StorageCredentialDatabricksGcpServiceAccount
    forceDestroy boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate boolean
    Update storage credential regardless of its dependents.
    gcpServiceAccountKey StorageCredentialGcpServiceAccountKey
    isolationMode string

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastoreId string
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name string
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner string
    Username/groupname/sp application_id of the storage credential owner.
    readOnly boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation boolean
    Suppress validation errors if any & force save the storage credential.
    storageCredentialId string
    Unique ID of storage credential.
    aws_iam_role StorageCredentialAwsIamRoleArgs
    azure_managed_identity StorageCredentialAzureManagedIdentityArgs
    azure_service_principal StorageCredentialAzureServicePrincipalArgs
    cloudflare_api_token StorageCredentialCloudflareApiTokenArgs
    comment str
    databricks_gcp_service_account StorageCredentialDatabricksGcpServiceAccountArgs
    force_destroy bool
    Delete storage credential regardless of its dependencies.
    force_update bool
    Update storage credential regardless of its dependents.
    gcp_service_account_key StorageCredentialGcpServiceAccountKeyArgs
    isolation_mode str

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastore_id str
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name str
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner str
    Username/groupname/sp application_id of the storage credential owner.
    read_only bool
    Indicates whether the storage credential is only usable for read operations.
    skip_validation bool
    Suppress validation errors if any & force save the storage credential.
    storage_credential_id str
    Unique ID of storage credential.
    awsIamRole Property Map
    azureManagedIdentity Property Map
    azureServicePrincipal Property Map
    cloudflareApiToken Property Map
    comment String
    databricksGcpServiceAccount Property Map
    forceDestroy Boolean
    Delete storage credential regardless of its dependencies.
    forceUpdate Boolean
    Update storage credential regardless of its dependents.
    gcpServiceAccountKey Property Map
    isolationMode String

    Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be ISOLATION_MODE_ISOLATED or ISOLATION_MODE_OPEN. Setting the credential to ISOLATION_MODE_ISOLATED will automatically allow access from the current workspace.

    aws_iam_role optional configuration block for credential details for AWS:

    metastoreId String
    Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
    name String
    Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
    owner String
    Username/groupname/sp application_id of the storage credential owner.
    readOnly Boolean
    Indicates whether the storage credential is only usable for read operations.
    skipValidation Boolean
    Suppress validation errors if any & force save the storage credential.
    storageCredentialId String
    Unique ID of storage credential.

    Supporting Types

    StorageCredentialAwsIamRole, StorageCredentialAwsIamRoleArgs

    RoleArn string

    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    ExternalId string
    UnityCatalogIamArn string
    RoleArn string

    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    ExternalId string
    UnityCatalogIamArn string
    roleArn String

    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    externalId String
    unityCatalogIamArn String
    roleArn string

    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    externalId string
    unityCatalogIamArn string
    role_arn str

    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    external_id str
    unity_catalog_iam_arn str
    roleArn String

    The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF

    azure_managed_identity optional configuration block for using managed identity as credential details for Azure (recommended over service principal):

    externalId String
    unityCatalogIamArn String

    StorageCredentialAzureManagedIdentity, StorageCredentialAzureManagedIdentityArgs

    AccessConnectorId string
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    CredentialId string
    ManagedIdentityId string

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    AccessConnectorId string
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    CredentialId string
    ManagedIdentityId string

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    accessConnectorId String
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credentialId String
    managedIdentityId String

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    accessConnectorId string
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credentialId string
    managedIdentityId string

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    access_connector_id str
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credential_id str
    managed_identity_id str

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    accessConnectorId String
    The Resource ID of the Azure Databricks Access Connector resource, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name.
    credentialId String
    managedIdentityId String

    The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name.

    databricks_gcp_service_account optional configuration block for creating a Databricks-managed GCP Service Account:

    StorageCredentialAzureServicePrincipal, StorageCredentialAzureServicePrincipalArgs

    ApplicationId string
    The application ID of the application registration within the referenced AAD tenant
    ClientSecret string
    The client secret generated for the above app ID in AAD. This field is redacted on output
    DirectoryId string
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    ApplicationId string
    The application ID of the application registration within the referenced AAD tenant
    ClientSecret string
    The client secret generated for the above app ID in AAD. This field is redacted on output
    DirectoryId string
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    applicationId String
    The application ID of the application registration within the referenced AAD tenant
    clientSecret String
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directoryId String
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    applicationId string
    The application ID of the application registration within the referenced AAD tenant
    clientSecret string
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directoryId string
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    application_id str
    The application ID of the application registration within the referenced AAD tenant
    client_secret str
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directory_id str
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
    applicationId String
    The application ID of the application registration within the referenced AAD tenant
    clientSecret String
    The client secret generated for the above app ID in AAD. This field is redacted on output
    directoryId String
    The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application

    StorageCredentialCloudflareApiToken, StorageCredentialCloudflareApiTokenArgs

    AccessKeyId string
    R2 API token access key ID
    AccountId string
    R2 account ID
    SecretAccessKey string

    R2 API token secret access key

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    AccessKeyId string
    R2 API token access key ID
    AccountId string
    R2 account ID
    SecretAccessKey string

    R2 API token secret access key

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    accessKeyId String
    R2 API token access key ID
    accountId String
    R2 account ID
    secretAccessKey String

    R2 API token secret access key

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    accessKeyId string
    R2 API token access key ID
    accountId string
    R2 account ID
    secretAccessKey string

    R2 API token secret access key

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    access_key_id str
    R2 API token access key ID
    account_id str
    R2 account ID
    secret_access_key str

    R2 API token secret access key

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    accessKeyId String
    R2 API token access key ID
    accountId String
    R2 account ID
    secretAccessKey String

    R2 API token secret access key

    azure_service_principal optional configuration block to use service principal as credential details for Azure (Legacy):

    StorageCredentialDatabricksGcpServiceAccount, StorageCredentialDatabricksGcpServiceAccountArgs

    CredentialId string
    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    CredentialId string
    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    credentialId String
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    credentialId string
    email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    credential_id str
    email str

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    credentialId String
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    StorageCredentialGcpServiceAccountKey, StorageCredentialGcpServiceAccountKeyArgs

    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    PrivateKey string
    PrivateKeyId string
    Email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    PrivateKey string
    PrivateKeyId string
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    privateKey String
    privateKeyId String
    email string

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    privateKey string
    privateKeyId string
    email str

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    private_key str
    private_key_id str
    email String

    The email of the GCP service account created, to be granted access to relevant buckets.

    cloudflare_api_token optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:

    privateKey String
    privateKeyId String

    Import

    This resource can be imported by name:

    bash

    $ pulumi import databricks:index/storageCredential:StorageCredential this <name>
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi