databricks.StorageCredential
Explore with Pulumi AI
This resource can be used with an account or workspace-level provider.
To work with external tables, Unity Catalog introduces two new objects to access and work with external cloud storage:
databricks.StorageCredential
represents authentication methods to access cloud storage (e.g. an IAM role for Amazon S3 or a service principal/managed identity for Azure Storage). Storage credentials are access-controlled to determine which users can use the credential.- databricks.ExternalLocation are objects that combine a cloud storage path with a Storage Credential that can be used to access the location.
Example Usage
For AWS
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
name: externalDataAccess.name,
awsIamRole: {
roleArn: externalDataAccess.arn,
},
comment: "Managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE_EXTERNAL_TABLE"],
}],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
name=external_data_access["name"],
aws_iam_role={
"role_arn": external_data_access["arn"],
},
comment="Managed by TF")
external_creds = databricks.Grants("external_creds",
storage_credential=external.id,
grants=[{
"principal": "Data Engineers",
"privileges": ["CREATE_EXTERNAL_TABLE"],
}])
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
Name: pulumi.Any(externalDataAccess.Name),
AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
RoleArn: pulumi.Any(externalDataAccess.Arn),
},
Comment: pulumi.String("Managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
StorageCredential: external.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE_EXTERNAL_TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var external = new Databricks.StorageCredential("external", new()
{
Name = externalDataAccess.Name,
AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
{
RoleArn = externalDataAccess.Arn,
},
Comment = "Managed by TF",
});
var externalCreds = new Databricks.Grants("external_creds", new()
{
StorageCredential = external.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"CREATE_EXTERNAL_TABLE",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAwsIamRoleArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var external = new StorageCredential("external", StorageCredentialArgs.builder()
.name(externalDataAccess.name())
.awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
.roleArn(externalDataAccess.arn())
.build())
.comment("Managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.storageCredential(external.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("CREATE_EXTERNAL_TABLE")
.build())
.build());
}
}
resources:
external:
type: databricks:StorageCredential
properties:
name: ${externalDataAccess.name}
awsIamRole:
roleArn: ${externalDataAccess.arn}
comment: Managed by TF
externalCreds:
type: databricks:Grants
name: external_creds
properties:
storageCredential: ${external.id}
grants:
- principal: Data Engineers
privileges:
- CREATE_EXTERNAL_TABLE
For Azure
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const externalMi = new databricks.StorageCredential("external_mi", {
name: "mi_credential",
azureManagedIdentity: {
accessConnectorId: example.id,
},
comment: "Managed identity credential managed by TF",
});
const externalCreds = new databricks.Grants("external_creds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE_EXTERNAL_TABLE"],
}],
});
import pulumi
import pulumi_databricks as databricks
external_mi = databricks.StorageCredential("external_mi",
name="mi_credential",
azure_managed_identity={
"access_connector_id": example["id"],
},
comment="Managed identity credential managed by TF")
external_creds = databricks.Grants("external_creds",
storage_credential=external["id"],
grants=[{
"principal": "Data Engineers",
"privileges": ["CREATE_EXTERNAL_TABLE"],
}])
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewStorageCredential(ctx, "external_mi", &databricks.StorageCredentialArgs{
Name: pulumi.String("mi_credential"),
AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
AccessConnectorId: pulumi.Any(example.Id),
},
Comment: pulumi.String("Managed identity credential managed by TF"),
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
StorageCredential: pulumi.Any(external.Id),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE_EXTERNAL_TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var externalMi = new Databricks.StorageCredential("external_mi", new()
{
Name = "mi_credential",
AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
{
AccessConnectorId = example.Id,
},
Comment = "Managed identity credential managed by TF",
});
var externalCreds = new Databricks.Grants("external_creds", new()
{
StorageCredential = external.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"CREATE_EXTERNAL_TABLE",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialAzureManagedIdentityArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var externalMi = new StorageCredential("externalMi", StorageCredentialArgs.builder()
.name("mi_credential")
.azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
.accessConnectorId(example.id())
.build())
.comment("Managed identity credential managed by TF")
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.storageCredential(external.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("CREATE_EXTERNAL_TABLE")
.build())
.build());
}
}
resources:
externalMi:
type: databricks:StorageCredential
name: external_mi
properties:
name: mi_credential
azureManagedIdentity:
accessConnectorId: ${example.id}
comment: Managed identity credential managed by TF
externalCreds:
type: databricks:Grants
name: external_creds
properties:
storageCredential: ${external.id}
grants:
- principal: Data Engineers
privileges:
- CREATE_EXTERNAL_TABLE
For GCP
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const external = new databricks.StorageCredential("external", {
name: "the-creds",
databricksGcpServiceAccount: {},
});
const externalCreds = new databricks.Grants("external_creds", {
storageCredential: external.id,
grants: [{
principal: "Data Engineers",
privileges: ["CREATE_EXTERNAL_TABLE"],
}],
});
import pulumi
import pulumi_databricks as databricks
external = databricks.StorageCredential("external",
name="the-creds",
databricks_gcp_service_account={})
external_creds = databricks.Grants("external_creds",
storage_credential=external.id,
grants=[{
"principal": "Data Engineers",
"privileges": ["CREATE_EXTERNAL_TABLE"],
}])
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
external, err := databricks.NewStorageCredential(ctx, "external", &databricks.StorageCredentialArgs{
Name: pulumi.String("the-creds"),
DatabricksGcpServiceAccount: &databricks.StorageCredentialDatabricksGcpServiceAccountArgs{},
})
if err != nil {
return err
}
_, err = databricks.NewGrants(ctx, "external_creds", &databricks.GrantsArgs{
StorageCredential: external.ID(),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("Data Engineers"),
Privileges: pulumi.StringArray{
pulumi.String("CREATE_EXTERNAL_TABLE"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var external = new Databricks.StorageCredential("external", new()
{
Name = "the-creds",
DatabricksGcpServiceAccount = null,
});
var externalCreds = new Databricks.Grants("external_creds", new()
{
StorageCredential = external.Id,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "Data Engineers",
Privileges = new[]
{
"CREATE_EXTERNAL_TABLE",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.StorageCredential;
import com.pulumi.databricks.StorageCredentialArgs;
import com.pulumi.databricks.inputs.StorageCredentialDatabricksGcpServiceAccountArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var external = new StorageCredential("external", StorageCredentialArgs.builder()
.name("the-creds")
.databricksGcpServiceAccount()
.build());
var externalCreds = new Grants("externalCreds", GrantsArgs.builder()
.storageCredential(external.id())
.grants(GrantsGrantArgs.builder()
.principal("Data Engineers")
.privileges("CREATE_EXTERNAL_TABLE")
.build())
.build());
}
}
resources:
external:
type: databricks:StorageCredential
properties:
name: the-creds
databricksGcpServiceAccount: {}
externalCreds:
type: databricks:Grants
name: external_creds
properties:
storageCredential: ${external.id}
grants:
- principal: Data Engineers
privileges:
- CREATE_EXTERNAL_TABLE
Create StorageCredential Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new StorageCredential(name: string, args?: StorageCredentialArgs, opts?: CustomResourceOptions);
@overload
def StorageCredential(resource_name: str,
args: Optional[StorageCredentialArgs] = None,
opts: Optional[ResourceOptions] = None)
@overload
def StorageCredential(resource_name: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
cloudflare_api_token: Optional[StorageCredentialCloudflareApiTokenArgs] = None,
comment: Optional[str] = None,
databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
force_destroy: Optional[bool] = None,
force_update: Optional[bool] = None,
gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
isolation_mode: Optional[str] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None,
owner: Optional[str] = None,
read_only: Optional[bool] = None,
skip_validation: Optional[bool] = None)
func NewStorageCredential(ctx *Context, name string, args *StorageCredentialArgs, opts ...ResourceOption) (*StorageCredential, error)
public StorageCredential(string name, StorageCredentialArgs? args = null, CustomResourceOptions? opts = null)
public StorageCredential(String name, StorageCredentialArgs args)
public StorageCredential(String name, StorageCredentialArgs args, CustomResourceOptions options)
type: databricks:StorageCredential
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StorageCredentialArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var storageCredentialResource = new Databricks.StorageCredential("storageCredentialResource", new()
{
AwsIamRole = new Databricks.Inputs.StorageCredentialAwsIamRoleArgs
{
RoleArn = "string",
ExternalId = "string",
UnityCatalogIamArn = "string",
},
AzureManagedIdentity = new Databricks.Inputs.StorageCredentialAzureManagedIdentityArgs
{
AccessConnectorId = "string",
CredentialId = "string",
ManagedIdentityId = "string",
},
AzureServicePrincipal = new Databricks.Inputs.StorageCredentialAzureServicePrincipalArgs
{
ApplicationId = "string",
ClientSecret = "string",
DirectoryId = "string",
},
CloudflareApiToken = new Databricks.Inputs.StorageCredentialCloudflareApiTokenArgs
{
AccessKeyId = "string",
AccountId = "string",
SecretAccessKey = "string",
},
Comment = "string",
DatabricksGcpServiceAccount = new Databricks.Inputs.StorageCredentialDatabricksGcpServiceAccountArgs
{
CredentialId = "string",
Email = "string",
},
ForceDestroy = false,
ForceUpdate = false,
GcpServiceAccountKey = new Databricks.Inputs.StorageCredentialGcpServiceAccountKeyArgs
{
Email = "string",
PrivateKey = "string",
PrivateKeyId = "string",
},
IsolationMode = "string",
MetastoreId = "string",
Name = "string",
Owner = "string",
ReadOnly = false,
SkipValidation = false,
});
example, err := databricks.NewStorageCredential(ctx, "storageCredentialResource", &databricks.StorageCredentialArgs{
AwsIamRole: &databricks.StorageCredentialAwsIamRoleArgs{
RoleArn: pulumi.String("string"),
ExternalId: pulumi.String("string"),
UnityCatalogIamArn: pulumi.String("string"),
},
AzureManagedIdentity: &databricks.StorageCredentialAzureManagedIdentityArgs{
AccessConnectorId: pulumi.String("string"),
CredentialId: pulumi.String("string"),
ManagedIdentityId: pulumi.String("string"),
},
AzureServicePrincipal: &databricks.StorageCredentialAzureServicePrincipalArgs{
ApplicationId: pulumi.String("string"),
ClientSecret: pulumi.String("string"),
DirectoryId: pulumi.String("string"),
},
CloudflareApiToken: &databricks.StorageCredentialCloudflareApiTokenArgs{
AccessKeyId: pulumi.String("string"),
AccountId: pulumi.String("string"),
SecretAccessKey: pulumi.String("string"),
},
Comment: pulumi.String("string"),
DatabricksGcpServiceAccount: &databricks.StorageCredentialDatabricksGcpServiceAccountArgs{
CredentialId: pulumi.String("string"),
Email: pulumi.String("string"),
},
ForceDestroy: pulumi.Bool(false),
ForceUpdate: pulumi.Bool(false),
GcpServiceAccountKey: &databricks.StorageCredentialGcpServiceAccountKeyArgs{
Email: pulumi.String("string"),
PrivateKey: pulumi.String("string"),
PrivateKeyId: pulumi.String("string"),
},
IsolationMode: pulumi.String("string"),
MetastoreId: pulumi.String("string"),
Name: pulumi.String("string"),
Owner: pulumi.String("string"),
ReadOnly: pulumi.Bool(false),
SkipValidation: pulumi.Bool(false),
})
var storageCredentialResource = new StorageCredential("storageCredentialResource", StorageCredentialArgs.builder()
.awsIamRole(StorageCredentialAwsIamRoleArgs.builder()
.roleArn("string")
.externalId("string")
.unityCatalogIamArn("string")
.build())
.azureManagedIdentity(StorageCredentialAzureManagedIdentityArgs.builder()
.accessConnectorId("string")
.credentialId("string")
.managedIdentityId("string")
.build())
.azureServicePrincipal(StorageCredentialAzureServicePrincipalArgs.builder()
.applicationId("string")
.clientSecret("string")
.directoryId("string")
.build())
.cloudflareApiToken(StorageCredentialCloudflareApiTokenArgs.builder()
.accessKeyId("string")
.accountId("string")
.secretAccessKey("string")
.build())
.comment("string")
.databricksGcpServiceAccount(StorageCredentialDatabricksGcpServiceAccountArgs.builder()
.credentialId("string")
.email("string")
.build())
.forceDestroy(false)
.forceUpdate(false)
.gcpServiceAccountKey(StorageCredentialGcpServiceAccountKeyArgs.builder()
.email("string")
.privateKey("string")
.privateKeyId("string")
.build())
.isolationMode("string")
.metastoreId("string")
.name("string")
.owner("string")
.readOnly(false)
.skipValidation(false)
.build());
storage_credential_resource = databricks.StorageCredential("storageCredentialResource",
aws_iam_role={
"role_arn": "string",
"external_id": "string",
"unity_catalog_iam_arn": "string",
},
azure_managed_identity={
"access_connector_id": "string",
"credential_id": "string",
"managed_identity_id": "string",
},
azure_service_principal={
"application_id": "string",
"client_secret": "string",
"directory_id": "string",
},
cloudflare_api_token={
"access_key_id": "string",
"account_id": "string",
"secret_access_key": "string",
},
comment="string",
databricks_gcp_service_account={
"credential_id": "string",
"email": "string",
},
force_destroy=False,
force_update=False,
gcp_service_account_key={
"email": "string",
"private_key": "string",
"private_key_id": "string",
},
isolation_mode="string",
metastore_id="string",
name="string",
owner="string",
read_only=False,
skip_validation=False)
const storageCredentialResource = new databricks.StorageCredential("storageCredentialResource", {
awsIamRole: {
roleArn: "string",
externalId: "string",
unityCatalogIamArn: "string",
},
azureManagedIdentity: {
accessConnectorId: "string",
credentialId: "string",
managedIdentityId: "string",
},
azureServicePrincipal: {
applicationId: "string",
clientSecret: "string",
directoryId: "string",
},
cloudflareApiToken: {
accessKeyId: "string",
accountId: "string",
secretAccessKey: "string",
},
comment: "string",
databricksGcpServiceAccount: {
credentialId: "string",
email: "string",
},
forceDestroy: false,
forceUpdate: false,
gcpServiceAccountKey: {
email: "string",
privateKey: "string",
privateKeyId: "string",
},
isolationMode: "string",
metastoreId: "string",
name: "string",
owner: "string",
readOnly: false,
skipValidation: false,
});
type: databricks:StorageCredential
properties:
awsIamRole:
externalId: string
roleArn: string
unityCatalogIamArn: string
azureManagedIdentity:
accessConnectorId: string
credentialId: string
managedIdentityId: string
azureServicePrincipal:
applicationId: string
clientSecret: string
directoryId: string
cloudflareApiToken:
accessKeyId: string
accountId: string
secretAccessKey: string
comment: string
databricksGcpServiceAccount:
credentialId: string
email: string
forceDestroy: false
forceUpdate: false
gcpServiceAccountKey:
email: string
privateKey: string
privateKeyId: string
isolationMode: string
metastoreId: string
name: string
owner: string
readOnly: false
skipValidation: false
StorageCredential Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The StorageCredential resource accepts the following input properties:
- Aws
Iam StorageRole Credential Aws Iam Role - Azure
Managed StorageIdentity Credential Azure Managed Identity - Azure
Service StoragePrincipal Credential Azure Service Principal - Cloudflare
Api StorageToken Credential Cloudflare Api Token - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account - Force
Destroy bool - Delete storage credential regardless of its dependencies.
- Force
Update bool - Update storage credential regardless of its dependents.
- Gcp
Service StorageAccount Key Credential Gcp Service Account Key - Isolation
Mode string Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- Read
Only bool - Indicates whether the storage credential is only usable for read operations.
- Skip
Validation bool - Suppress validation errors if any & force save the storage credential.
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Managed StorageIdentity Credential Azure Managed Identity Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Cloudflare
Api StorageToken Credential Cloudflare Api Token Args - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - Force
Destroy bool - Delete storage credential regardless of its dependencies.
- Force
Update bool - Update storage credential regardless of its dependents.
- Gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - Isolation
Mode string Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- Read
Only bool - Indicates whether the storage credential is only usable for read operations.
- Skip
Validation bool - Suppress validation errors if any & force save the storage credential.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Managed StorageIdentity Credential Azure Managed Identity - azure
Service StoragePrincipal Credential Azure Service Principal - cloudflare
Api StorageToken Credential Cloudflare Api Token - comment String
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account - force
Destroy Boolean - Delete storage credential regardless of its dependencies.
- force
Update Boolean - Update storage credential regardless of its dependents.
- gcp
Service StorageAccount Key Credential Gcp Service Account Key - isolation
Mode String Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- read
Only Boolean - Indicates whether the storage credential is only usable for read operations.
- skip
Validation Boolean - Suppress validation errors if any & force save the storage credential.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Managed StorageIdentity Credential Azure Managed Identity - azure
Service StoragePrincipal Credential Azure Service Principal - cloudflare
Api StorageToken Credential Cloudflare Api Token - comment string
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account - force
Destroy boolean - Delete storage credential regardless of its dependencies.
- force
Update boolean - Update storage credential regardless of its dependents.
- gcp
Service StorageAccount Key Credential Gcp Service Account Key - isolation
Mode string Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id string - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the storage credential owner.
- read
Only boolean - Indicates whether the storage credential is only usable for read operations.
- skip
Validation boolean - Suppress validation errors if any & force save the storage credential.
- aws_
iam_ Storagerole Credential Aws Iam Role Args - azure_
managed_ Storageidentity Credential Azure Managed Identity Args - azure_
service_ Storageprincipal Credential Azure Service Principal Args - cloudflare_
api_ Storagetoken Credential Cloudflare Api Token Args - comment str
- databricks_
gcp_ Storageservice_ account Credential Databricks Gcp Service Account Args - force_
destroy bool - Delete storage credential regardless of its dependencies.
- force_
update bool - Update storage credential regardless of its dependents.
- gcp_
service_ Storageaccount_ key Credential Gcp Service Account Key Args - isolation_
mode str Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore_
id str - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name str
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the storage credential owner.
- read_
only bool - Indicates whether the storage credential is only usable for read operations.
- skip_
validation bool - Suppress validation errors if any & force save the storage credential.
- aws
Iam Property MapRole - azure
Managed Property MapIdentity - azure
Service Property MapPrincipal - cloudflare
Api Property MapToken - comment String
- databricks
Gcp Property MapService Account - force
Destroy Boolean - Delete storage credential regardless of its dependencies.
- force
Update Boolean - Update storage credential regardless of its dependents.
- gcp
Service Property MapAccount Key - isolation
Mode String Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- read
Only Boolean - Indicates whether the storage credential is only usable for read operations.
- skip
Validation Boolean - Suppress validation errors if any & force save the storage credential.
Outputs
All input properties are implicitly available as output properties. Additionally, the StorageCredential resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Storage
Credential stringId - Unique ID of storage credential.
- Id string
- The provider-assigned unique ID for this managed resource.
- Storage
Credential stringId - Unique ID of storage credential.
- id String
- The provider-assigned unique ID for this managed resource.
- storage
Credential StringId - Unique ID of storage credential.
- id string
- The provider-assigned unique ID for this managed resource.
- storage
Credential stringId - Unique ID of storage credential.
- id str
- The provider-assigned unique ID for this managed resource.
- storage_
credential_ strid - Unique ID of storage credential.
- id String
- The provider-assigned unique ID for this managed resource.
- storage
Credential StringId - Unique ID of storage credential.
Look up Existing StorageCredential Resource
Get an existing StorageCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StorageCredentialState, opts?: CustomResourceOptions): StorageCredential
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
aws_iam_role: Optional[StorageCredentialAwsIamRoleArgs] = None,
azure_managed_identity: Optional[StorageCredentialAzureManagedIdentityArgs] = None,
azure_service_principal: Optional[StorageCredentialAzureServicePrincipalArgs] = None,
cloudflare_api_token: Optional[StorageCredentialCloudflareApiTokenArgs] = None,
comment: Optional[str] = None,
databricks_gcp_service_account: Optional[StorageCredentialDatabricksGcpServiceAccountArgs] = None,
force_destroy: Optional[bool] = None,
force_update: Optional[bool] = None,
gcp_service_account_key: Optional[StorageCredentialGcpServiceAccountKeyArgs] = None,
isolation_mode: Optional[str] = None,
metastore_id: Optional[str] = None,
name: Optional[str] = None,
owner: Optional[str] = None,
read_only: Optional[bool] = None,
skip_validation: Optional[bool] = None,
storage_credential_id: Optional[str] = None) -> StorageCredential
func GetStorageCredential(ctx *Context, name string, id IDInput, state *StorageCredentialState, opts ...ResourceOption) (*StorageCredential, error)
public static StorageCredential Get(string name, Input<string> id, StorageCredentialState? state, CustomResourceOptions? opts = null)
public static StorageCredential get(String name, Output<String> id, StorageCredentialState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Aws
Iam StorageRole Credential Aws Iam Role - Azure
Managed StorageIdentity Credential Azure Managed Identity - Azure
Service StoragePrincipal Credential Azure Service Principal - Cloudflare
Api StorageToken Credential Cloudflare Api Token - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account - Force
Destroy bool - Delete storage credential regardless of its dependencies.
- Force
Update bool - Update storage credential regardless of its dependents.
- Gcp
Service StorageAccount Key Credential Gcp Service Account Key - Isolation
Mode string Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- Read
Only bool - Indicates whether the storage credential is only usable for read operations.
- Skip
Validation bool - Suppress validation errors if any & force save the storage credential.
- Storage
Credential stringId - Unique ID of storage credential.
- Aws
Iam StorageRole Credential Aws Iam Role Args - Azure
Managed StorageIdentity Credential Azure Managed Identity Args - Azure
Service StoragePrincipal Credential Azure Service Principal Args - Cloudflare
Api StorageToken Credential Cloudflare Api Token Args - Comment string
- Databricks
Gcp StorageService Account Credential Databricks Gcp Service Account Args - Force
Destroy bool - Delete storage credential regardless of its dependencies.
- Force
Update bool - Update storage credential regardless of its dependents.
- Gcp
Service StorageAccount Key Credential Gcp Service Account Key Args - Isolation
Mode string Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- Metastore
Id string - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- Name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- Owner string
- Username/groupname/sp application_id of the storage credential owner.
- Read
Only bool - Indicates whether the storage credential is only usable for read operations.
- Skip
Validation bool - Suppress validation errors if any & force save the storage credential.
- Storage
Credential stringId - Unique ID of storage credential.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Managed StorageIdentity Credential Azure Managed Identity - azure
Service StoragePrincipal Credential Azure Service Principal - cloudflare
Api StorageToken Credential Cloudflare Api Token - comment String
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account - force
Destroy Boolean - Delete storage credential regardless of its dependencies.
- force
Update Boolean - Update storage credential regardless of its dependents.
- gcp
Service StorageAccount Key Credential Gcp Service Account Key - isolation
Mode String Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- read
Only Boolean - Indicates whether the storage credential is only usable for read operations.
- skip
Validation Boolean - Suppress validation errors if any & force save the storage credential.
- storage
Credential StringId - Unique ID of storage credential.
- aws
Iam StorageRole Credential Aws Iam Role - azure
Managed StorageIdentity Credential Azure Managed Identity - azure
Service StoragePrincipal Credential Azure Service Principal - cloudflare
Api StorageToken Credential Cloudflare Api Token - comment string
- databricks
Gcp StorageService Account Credential Databricks Gcp Service Account - force
Destroy boolean - Delete storage credential regardless of its dependencies.
- force
Update boolean - Update storage credential regardless of its dependents.
- gcp
Service StorageAccount Key Credential Gcp Service Account Key - isolation
Mode string Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id string - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name string
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner string
- Username/groupname/sp application_id of the storage credential owner.
- read
Only boolean - Indicates whether the storage credential is only usable for read operations.
- skip
Validation boolean - Suppress validation errors if any & force save the storage credential.
- storage
Credential stringId - Unique ID of storage credential.
- aws_
iam_ Storagerole Credential Aws Iam Role Args - azure_
managed_ Storageidentity Credential Azure Managed Identity Args - azure_
service_ Storageprincipal Credential Azure Service Principal Args - cloudflare_
api_ Storagetoken Credential Cloudflare Api Token Args - comment str
- databricks_
gcp_ Storageservice_ account Credential Databricks Gcp Service Account Args - force_
destroy bool - Delete storage credential regardless of its dependencies.
- force_
update bool - Update storage credential regardless of its dependents.
- gcp_
service_ Storageaccount_ key Credential Gcp Service Account Key Args - isolation_
mode str Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore_
id str - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name str
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner str
- Username/groupname/sp application_id of the storage credential owner.
- read_
only bool - Indicates whether the storage credential is only usable for read operations.
- skip_
validation bool - Suppress validation errors if any & force save the storage credential.
- storage_
credential_ strid - Unique ID of storage credential.
- aws
Iam Property MapRole - azure
Managed Property MapIdentity - azure
Service Property MapPrincipal - cloudflare
Api Property MapToken - comment String
- databricks
Gcp Property MapService Account - force
Destroy Boolean - Delete storage credential regardless of its dependencies.
- force
Update Boolean - Update storage credential regardless of its dependents.
- gcp
Service Property MapAccount Key - isolation
Mode String Whether the storage credential is accessible from all workspaces or a specific set of workspaces. Can be
ISOLATION_MODE_ISOLATED
orISOLATION_MODE_OPEN
. Setting the credential toISOLATION_MODE_ISOLATED
will automatically allow access from the current workspace.aws_iam_role
optional configuration block for credential details for AWS:- metastore
Id String - Unique identifier of the parent Metastore. If set for workspace-level, it must match the ID of the metastore assigned to the worspace. When changing the metastore assigned to a workspace, this field becomes required.
- name String
- Name of Storage Credentials, which must be unique within the databricks_metastore. Change forces creation of a new resource.
- owner String
- Username/groupname/sp application_id of the storage credential owner.
- read
Only Boolean - Indicates whether the storage credential is only usable for read operations.
- skip
Validation Boolean - Suppress validation errors if any & force save the storage credential.
- storage
Credential StringId - Unique ID of storage credential.
Supporting Types
StorageCredentialAwsIamRole, StorageCredentialAwsIamRoleArgs
- Role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):- External
Id string - Unity
Catalog stringIam Arn
- Role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):- External
Id string - Unity
Catalog stringIam Arn
- role
Arn String The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):- external
Id String - unity
Catalog StringIam Arn
- role
Arn string The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):- external
Id string - unity
Catalog stringIam Arn
- role_
arn str The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):- external_
id str - unity_
catalog_ striam_ arn
- role
Arn String The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form
arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF
azure_managed_identity
optional configuration block for using managed identity as credential details for Azure (recommended over service principal):- external
Id String - unity
Catalog StringIam Arn
StorageCredentialAzureManagedIdentity, StorageCredentialAzureManagedIdentityArgs
- Access
Connector stringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - Credential
Id string - Managed
Identity stringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:
- Access
Connector stringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - Credential
Id string - Managed
Identity stringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:
- access
Connector StringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential
Id String - managed
Identity StringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:
- access
Connector stringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential
Id string - managed
Identity stringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:
- access_
connector_ strid - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential_
id str - managed_
identity_ strid The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:
- access
Connector StringId - The Resource ID of the Azure Databricks Access Connector resource, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name
. - credential
Id String - managed
Identity StringId The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form
/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name
.databricks_gcp_service_account
optional configuration block for creating a Databricks-managed GCP Service Account:
StorageCredentialAzureServicePrincipal, StorageCredentialAzureServicePrincipalArgs
- Application
Id string - The application ID of the application registration within the referenced AAD tenant
- Client
Secret string - The client secret generated for the above app ID in AAD. This field is redacted on output
- Directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- Application
Id string - The application ID of the application registration within the referenced AAD tenant
- Client
Secret string - The client secret generated for the above app ID in AAD. This field is redacted on output
- Directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String - The application ID of the application registration within the referenced AAD tenant
- client
Secret String - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory
Id String - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id string - The application ID of the application registration within the referenced AAD tenant
- client
Secret string - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory
Id string - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application_
id str - The application ID of the application registration within the referenced AAD tenant
- client_
secret str - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory_
id str - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
- application
Id String - The application ID of the application registration within the referenced AAD tenant
- client
Secret String - The client secret generated for the above app ID in AAD. This field is redacted on output
- directory
Id String - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
StorageCredentialCloudflareApiToken, StorageCredentialCloudflareApiTokenArgs
- Access
Key stringId - R2 API token access key ID
- Account
Id string - R2 account ID
- Secret
Access stringKey R2 API token secret access key
azure_service_principal
optional configuration block to use service principal as credential details for Azure (Legacy):
- Access
Key stringId - R2 API token access key ID
- Account
Id string - R2 account ID
- Secret
Access stringKey R2 API token secret access key
azure_service_principal
optional configuration block to use service principal as credential details for Azure (Legacy):
- access
Key StringId - R2 API token access key ID
- account
Id String - R2 account ID
- secret
Access StringKey R2 API token secret access key
azure_service_principal
optional configuration block to use service principal as credential details for Azure (Legacy):
- access
Key stringId - R2 API token access key ID
- account
Id string - R2 account ID
- secret
Access stringKey R2 API token secret access key
azure_service_principal
optional configuration block to use service principal as credential details for Azure (Legacy):
- access_
key_ strid - R2 API token access key ID
- account_
id str - R2 account ID
- secret_
access_ strkey R2 API token secret access key
azure_service_principal
optional configuration block to use service principal as credential details for Azure (Legacy):
- access
Key StringId - R2 API token access key ID
- account
Id String - R2 account ID
- secret
Access StringKey R2 API token secret access key
azure_service_principal
optional configuration block to use service principal as credential details for Azure (Legacy):
StorageCredentialDatabricksGcpServiceAccount, StorageCredentialDatabricksGcpServiceAccountArgs
- Credential
Id string - Email string
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- Credential
Id string - Email string
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credential
Id String - email String
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credential
Id string - email string
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credential_
id str - email str
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
- credential
Id String - email String
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:
StorageCredentialGcpServiceAccountKey, StorageCredentialGcpServiceAccountKeyArgs
- Email string
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:- Private
Key string - Private
Key stringId
- Email string
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:- Private
Key string - Private
Key stringId
- email String
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:- private
Key String - private
Key StringId
- email string
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:- private
Key string - private
Key stringId
- email str
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:- private_
key str - private_
key_ strid
- email String
The email of the GCP service account created, to be granted access to relevant buckets.
cloudflare_api_token
optional configuration block for using a Cloudflare API Token as credential details. This requires account admin access:- private
Key String - private
Key StringId
Import
This resource can be imported by name:
bash
$ pulumi import databricks:index/storageCredential:StorageCredential this <name>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.