1. Packages
  2. Databricks
  3. API Docs
  4. Secret
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

databricks.Secret

Explore with Pulumi AI

databricks logo
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

    With this resource you can insert a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret’s value. The server encrypts the secret using the secret scope’s encryption settings before storing it. You must have WRITE or MANAGE permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. You can read a secret value only from within a command on a cluster (for example, through a notebook); there is no API to read a secret value outside of a cluster. The permission applied is based on who is invoking the command and you must have at least READ permission. Please consult Secrets User Guide for more details.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const app = new databricks.SecretScope("app", {name: "application-secret-scope"});
    const publishingApi = new databricks.Secret("publishing_api", {
        key: "publishing_api",
        stringValue: example.value,
        scope: app.id,
    });
    const _this = new databricks.Cluster("this", {sparkConf: {
        "fs.azure.account.oauth2.client.secret": publishingApi.configReference,
    }});
    
    import pulumi
    import pulumi_databricks as databricks
    
    app = databricks.SecretScope("app", name="application-secret-scope")
    publishing_api = databricks.Secret("publishing_api",
        key="publishing_api",
        string_value=example["value"],
        scope=app.id)
    this = databricks.Cluster("this", spark_conf={
        "fs.azure.account.oauth2.client.secret": publishing_api.config_reference,
    })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		app, err := databricks.NewSecretScope(ctx, "app", &databricks.SecretScopeArgs{
    			Name: pulumi.String("application-secret-scope"),
    		})
    		if err != nil {
    			return err
    		}
    		publishingApi, err := databricks.NewSecret(ctx, "publishing_api", &databricks.SecretArgs{
    			Key:         pulumi.String("publishing_api"),
    			StringValue: pulumi.Any(example.Value),
    			Scope:       app.ID(),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewCluster(ctx, "this", &databricks.ClusterArgs{
    			SparkConf: pulumi.StringMap{
    				"fs.azure.account.oauth2.client.secret": publishingApi.ConfigReference,
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var app = new Databricks.SecretScope("app", new()
        {
            Name = "application-secret-scope",
        });
    
        var publishingApi = new Databricks.Secret("publishing_api", new()
        {
            Key = "publishing_api",
            StringValue = example.Value,
            Scope = app.Id,
        });
    
        var @this = new Databricks.Cluster("this", new()
        {
            SparkConf = 
            {
                { "fs.azure.account.oauth2.client.secret", publishingApi.ConfigReference },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.SecretScope;
    import com.pulumi.databricks.SecretScopeArgs;
    import com.pulumi.databricks.Secret;
    import com.pulumi.databricks.SecretArgs;
    import com.pulumi.databricks.Cluster;
    import com.pulumi.databricks.ClusterArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var app = new SecretScope("app", SecretScopeArgs.builder()
                .name("application-secret-scope")
                .build());
    
            var publishingApi = new Secret("publishingApi", SecretArgs.builder()
                .key("publishing_api")
                .stringValue(example.value())
                .scope(app.id())
                .build());
    
            var this_ = new Cluster("this", ClusterArgs.builder()
                .sparkConf(Map.of("fs.azure.account.oauth2.client.secret", publishingApi.configReference()))
                .build());
    
        }
    }
    
    resources:
      app:
        type: databricks:SecretScope
        properties:
          name: application-secret-scope
      publishingApi:
        type: databricks:Secret
        name: publishing_api
        properties:
          key: publishing_api
          stringValue: ${example.value}
          scope: ${app.id}
      this:
        type: databricks:Cluster
        properties:
          sparkConf:
            fs.azure.account.oauth2.client.secret: ${publishingApi.configReference}
    

    The following resources are often used in the same context:

    Create Secret Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Secret(name: string, args: SecretArgs, opts?: CustomResourceOptions);
    @overload
    def Secret(resource_name: str,
               args: SecretArgs,
               opts: Optional[ResourceOptions] = None)
    
    @overload
    def Secret(resource_name: str,
               opts: Optional[ResourceOptions] = None,
               key: Optional[str] = None,
               scope: Optional[str] = None,
               string_value: Optional[str] = None)
    func NewSecret(ctx *Context, name string, args SecretArgs, opts ...ResourceOption) (*Secret, error)
    public Secret(string name, SecretArgs args, CustomResourceOptions? opts = null)
    public Secret(String name, SecretArgs args)
    public Secret(String name, SecretArgs args, CustomResourceOptions options)
    
    type: databricks:Secret
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args SecretArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SecretArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SecretArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SecretArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SecretArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var secretResource = new Databricks.Secret("secretResource", new()
    {
        Key = "string",
        Scope = "string",
        StringValue = "string",
    });
    
    example, err := databricks.NewSecret(ctx, "secretResource", &databricks.SecretArgs{
    	Key:         pulumi.String("string"),
    	Scope:       pulumi.String("string"),
    	StringValue: pulumi.String("string"),
    })
    
    var secretResource = new Secret("secretResource", SecretArgs.builder()
        .key("string")
        .scope("string")
        .stringValue("string")
        .build());
    
    secret_resource = databricks.Secret("secretResource",
        key="string",
        scope="string",
        string_value="string")
    
    const secretResource = new databricks.Secret("secretResource", {
        key: "string",
        scope: "string",
        stringValue: "string",
    });
    
    type: databricks:Secret
    properties:
        key: string
        scope: string
        stringValue: string
    

    Secret Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The Secret resource accepts the following input properties:

    Key string
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    Scope string
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    StringValue string
    (String) super secret sensitive value.
    Key string
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    Scope string
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    StringValue string
    (String) super secret sensitive value.
    key String
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    scope String
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    stringValue String
    (String) super secret sensitive value.
    key string
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    scope string
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    stringValue string
    (String) super secret sensitive value.
    key str
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    scope str
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    string_value str
    (String) super secret sensitive value.
    key String
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    scope String
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    stringValue String
    (String) super secret sensitive value.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Secret resource produces the following output properties:

    ConfigReference string
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastUpdatedTimestamp int
    (Integer) time secret was updated
    ConfigReference string
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastUpdatedTimestamp int
    (Integer) time secret was updated
    configReference String
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    id String
    The provider-assigned unique ID for this managed resource.
    lastUpdatedTimestamp Integer
    (Integer) time secret was updated
    configReference string
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    id string
    The provider-assigned unique ID for this managed resource.
    lastUpdatedTimestamp number
    (Integer) time secret was updated
    config_reference str
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    id str
    The provider-assigned unique ID for this managed resource.
    last_updated_timestamp int
    (Integer) time secret was updated
    configReference String
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    id String
    The provider-assigned unique ID for this managed resource.
    lastUpdatedTimestamp Number
    (Integer) time secret was updated

    Look up Existing Secret Resource

    Get an existing Secret resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SecretState, opts?: CustomResourceOptions): Secret
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            config_reference: Optional[str] = None,
            key: Optional[str] = None,
            last_updated_timestamp: Optional[int] = None,
            scope: Optional[str] = None,
            string_value: Optional[str] = None) -> Secret
    func GetSecret(ctx *Context, name string, id IDInput, state *SecretState, opts ...ResourceOption) (*Secret, error)
    public static Secret Get(string name, Input<string> id, SecretState? state, CustomResourceOptions? opts = null)
    public static Secret get(String name, Output<String> id, SecretState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ConfigReference string
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    Key string
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    LastUpdatedTimestamp int
    (Integer) time secret was updated
    Scope string
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    StringValue string
    (String) super secret sensitive value.
    ConfigReference string
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    Key string
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    LastUpdatedTimestamp int
    (Integer) time secret was updated
    Scope string
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    StringValue string
    (String) super secret sensitive value.
    configReference String
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    key String
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    lastUpdatedTimestamp Integer
    (Integer) time secret was updated
    scope String
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    stringValue String
    (String) super secret sensitive value.
    configReference string
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    key string
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    lastUpdatedTimestamp number
    (Integer) time secret was updated
    scope string
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    stringValue string
    (String) super secret sensitive value.
    config_reference str
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    key str
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    last_updated_timestamp int
    (Integer) time secret was updated
    scope str
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    string_value str
    (String) super secret sensitive value.
    configReference String
    (String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.
    key String
    (String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    lastUpdatedTimestamp Number
    (Integer) time secret was updated
    scope String
    (String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.
    stringValue String
    (String) super secret sensitive value.

    Import

    The resource secret can be imported using scopeName|||secretKey combination. This may change in future versions.

    bash

    $ pulumi import databricks:index/secret:Secret app `scopeName|||secretKey`
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi