1. Packages
  2. Databricks
  3. API Docs
  4. Entitlements
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

databricks.Entitlements

Explore with Pulumi AI

databricks logo
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

    This resource allows you to set entitlements to existing databricks_users, databricks.Group or databricks_service_principal.

    You must define entitlements of a principal using either databricks.Entitlements or directly within one of databricks_users, databricks.Group or databricks_service_principal. Having entitlements defined in both resources will result in non-deterministic behaviour.

    Example Usage

    Setting entitlements for a regular user:

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const me = databricks.getUser({
        userName: "me@example.com",
    });
    const meEntitlements = new databricks.Entitlements("me", {
        userId: me.then(me => me.id),
        allowClusterCreate: true,
        allowInstancePoolCreate: true,
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    me = databricks.get_user(user_name="me@example.com")
    me_entitlements = databricks.Entitlements("me",
        user_id=me.id,
        allow_cluster_create=True,
        allow_instance_pool_create=True)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		me, err := databricks.LookupUser(ctx, &databricks.LookupUserArgs{
    			UserName: pulumi.StringRef("me@example.com"),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewEntitlements(ctx, "me", &databricks.EntitlementsArgs{
    			UserId:                  pulumi.String(me.Id),
    			AllowClusterCreate:      pulumi.Bool(true),
    			AllowInstancePoolCreate: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var me = Databricks.GetUser.Invoke(new()
        {
            UserName = "me@example.com",
        });
    
        var meEntitlements = new Databricks.Entitlements("me", new()
        {
            UserId = me.Apply(getUserResult => getUserResult.Id),
            AllowClusterCreate = true,
            AllowInstancePoolCreate = true,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetUserArgs;
    import com.pulumi.databricks.Entitlements;
    import com.pulumi.databricks.EntitlementsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var me = DatabricksFunctions.getUser(GetUserArgs.builder()
                .userName("me@example.com")
                .build());
    
            var meEntitlements = new Entitlements("meEntitlements", EntitlementsArgs.builder()
                .userId(me.applyValue(getUserResult -> getUserResult.id()))
                .allowClusterCreate(true)
                .allowInstancePoolCreate(true)
                .build());
    
        }
    }
    
    resources:
      meEntitlements:
        type: databricks:Entitlements
        name: me
        properties:
          userId: ${me.id}
          allowClusterCreate: true
          allowInstancePoolCreate: true
    variables:
      me:
        fn::invoke:
          Function: databricks:getUser
          Arguments:
            userName: me@example.com
    

    Setting entitlements for a service principal:

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const this = databricks.getServicePrincipal({
        applicationId: "11111111-2222-3333-4444-555666777888",
    });
    const thisEntitlements = new databricks.Entitlements("this", {
        servicePrincipalId: _this.then(_this => _this.spId),
        allowClusterCreate: true,
        allowInstancePoolCreate: true,
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    this = databricks.get_service_principal(application_id="11111111-2222-3333-4444-555666777888")
    this_entitlements = databricks.Entitlements("this",
        service_principal_id=this.sp_id,
        allow_cluster_create=True,
        allow_instance_pool_create=True)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		this, err := databricks.LookupServicePrincipal(ctx, &databricks.LookupServicePrincipalArgs{
    			ApplicationId: pulumi.StringRef("11111111-2222-3333-4444-555666777888"),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewEntitlements(ctx, "this", &databricks.EntitlementsArgs{
    			ServicePrincipalId:      pulumi.String(this.SpId),
    			AllowClusterCreate:      pulumi.Bool(true),
    			AllowInstancePoolCreate: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var @this = Databricks.GetServicePrincipal.Invoke(new()
        {
            ApplicationId = "11111111-2222-3333-4444-555666777888",
        });
    
        var thisEntitlements = new Databricks.Entitlements("this", new()
        {
            ServicePrincipalId = @this.Apply(@this => @this.Apply(getServicePrincipalResult => getServicePrincipalResult.SpId)),
            AllowClusterCreate = true,
            AllowInstancePoolCreate = true,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetServicePrincipalArgs;
    import com.pulumi.databricks.Entitlements;
    import com.pulumi.databricks.EntitlementsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var this = DatabricksFunctions.getServicePrincipal(GetServicePrincipalArgs.builder()
                .applicationId("11111111-2222-3333-4444-555666777888")
                .build());
    
            var thisEntitlements = new Entitlements("thisEntitlements", EntitlementsArgs.builder()
                .servicePrincipalId(this_.spId())
                .allowClusterCreate(true)
                .allowInstancePoolCreate(true)
                .build());
    
        }
    }
    
    resources:
      thisEntitlements:
        type: databricks:Entitlements
        name: this
        properties:
          servicePrincipalId: ${this.spId}
          allowClusterCreate: true
          allowInstancePoolCreate: true
    variables:
      this:
        fn::invoke:
          Function: databricks:getServicePrincipal
          Arguments:
            applicationId: 11111111-2222-3333-4444-555666777888
    

    Setting entitlements to all users in a workspace - referencing special users databricks.Group

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const users = databricks.getGroup({
        displayName: "users",
    });
    const workspace_users = new databricks.Entitlements("workspace-users", {
        groupId: users.then(users => users.id),
        allowClusterCreate: true,
        allowInstancePoolCreate: true,
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    users = databricks.get_group(display_name="users")
    workspace_users = databricks.Entitlements("workspace-users",
        group_id=users.id,
        allow_cluster_create=True,
        allow_instance_pool_create=True)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		users, err := databricks.LookupGroup(ctx, &databricks.LookupGroupArgs{
    			DisplayName: "users",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewEntitlements(ctx, "workspace-users", &databricks.EntitlementsArgs{
    			GroupId:                 pulumi.String(users.Id),
    			AllowClusterCreate:      pulumi.Bool(true),
    			AllowInstancePoolCreate: pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var users = Databricks.GetGroup.Invoke(new()
        {
            DisplayName = "users",
        });
    
        var workspace_users = new Databricks.Entitlements("workspace-users", new()
        {
            GroupId = users.Apply(getGroupResult => getGroupResult.Id),
            AllowClusterCreate = true,
            AllowInstancePoolCreate = true,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetGroupArgs;
    import com.pulumi.databricks.Entitlements;
    import com.pulumi.databricks.EntitlementsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var users = DatabricksFunctions.getGroup(GetGroupArgs.builder()
                .displayName("users")
                .build());
    
            var workspace_users = new Entitlements("workspace-users", EntitlementsArgs.builder()
                .groupId(users.applyValue(getGroupResult -> getGroupResult.id()))
                .allowClusterCreate(true)
                .allowInstancePoolCreate(true)
                .build());
    
        }
    }
    
    resources:
      workspace-users:
        type: databricks:Entitlements
        properties:
          groupId: ${users.id}
          allowClusterCreate: true
          allowInstancePoolCreate: true
    variables:
      users:
        fn::invoke:
          Function: databricks:getGroup
          Arguments:
            displayName: users
    

    The following resources are often used in the same context:

    • End to end workspace management guide.
    • databricks.Group to manage groups in Databricks Workspace or Account Console (for AWS deployments).
    • databricks.Group data to retrieve information about databricks.Group members, entitlements and instance profiles.
    • databricks.GroupInstanceProfile to attach databricks.InstanceProfile (AWS) to databricks_group.
    • databricks.GroupMember to attach users and groups as group members.
    • databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
    • databricks.User data to retrieve information about databricks_user.

    Create Entitlements Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Entitlements(name: string, args?: EntitlementsArgs, opts?: CustomResourceOptions);
    @overload
    def Entitlements(resource_name: str,
                     args: Optional[EntitlementsArgs] = None,
                     opts: Optional[ResourceOptions] = None)
    
    @overload
    def Entitlements(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     allow_cluster_create: Optional[bool] = None,
                     allow_instance_pool_create: Optional[bool] = None,
                     databricks_sql_access: Optional[bool] = None,
                     group_id: Optional[str] = None,
                     service_principal_id: Optional[str] = None,
                     user_id: Optional[str] = None,
                     workspace_access: Optional[bool] = None)
    func NewEntitlements(ctx *Context, name string, args *EntitlementsArgs, opts ...ResourceOption) (*Entitlements, error)
    public Entitlements(string name, EntitlementsArgs? args = null, CustomResourceOptions? opts = null)
    public Entitlements(String name, EntitlementsArgs args)
    public Entitlements(String name, EntitlementsArgs args, CustomResourceOptions options)
    
    type: databricks:Entitlements
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args EntitlementsArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args EntitlementsArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args EntitlementsArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args EntitlementsArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args EntitlementsArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var entitlementsResource = new Databricks.Entitlements("entitlementsResource", new()
    {
        AllowClusterCreate = false,
        AllowInstancePoolCreate = false,
        DatabricksSqlAccess = false,
        GroupId = "string",
        ServicePrincipalId = "string",
        UserId = "string",
        WorkspaceAccess = false,
    });
    
    example, err := databricks.NewEntitlements(ctx, "entitlementsResource", &databricks.EntitlementsArgs{
    	AllowClusterCreate:      pulumi.Bool(false),
    	AllowInstancePoolCreate: pulumi.Bool(false),
    	DatabricksSqlAccess:     pulumi.Bool(false),
    	GroupId:                 pulumi.String("string"),
    	ServicePrincipalId:      pulumi.String("string"),
    	UserId:                  pulumi.String("string"),
    	WorkspaceAccess:         pulumi.Bool(false),
    })
    
    var entitlementsResource = new Entitlements("entitlementsResource", EntitlementsArgs.builder()
        .allowClusterCreate(false)
        .allowInstancePoolCreate(false)
        .databricksSqlAccess(false)
        .groupId("string")
        .servicePrincipalId("string")
        .userId("string")
        .workspaceAccess(false)
        .build());
    
    entitlements_resource = databricks.Entitlements("entitlementsResource",
        allow_cluster_create=False,
        allow_instance_pool_create=False,
        databricks_sql_access=False,
        group_id="string",
        service_principal_id="string",
        user_id="string",
        workspace_access=False)
    
    const entitlementsResource = new databricks.Entitlements("entitlementsResource", {
        allowClusterCreate: false,
        allowInstancePoolCreate: false,
        databricksSqlAccess: false,
        groupId: "string",
        servicePrincipalId: "string",
        userId: "string",
        workspaceAccess: false,
    });
    
    type: databricks:Entitlements
    properties:
        allowClusterCreate: false
        allowInstancePoolCreate: false
        databricksSqlAccess: false
        groupId: string
        servicePrincipalId: string
        userId: string
        workspaceAccess: false
    

    Entitlements Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The Entitlements resource accepts the following input properties:

    AllowClusterCreate bool
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    GroupId string
    Canonical unique identifier for the group.
    ServicePrincipalId string

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    UserId string
    Canonical unique identifier for the user.
    WorkspaceAccess bool
    This is a field to allow the principal to have access to Databricks Workspace.
    AllowClusterCreate bool
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    GroupId string
    Canonical unique identifier for the group.
    ServicePrincipalId string

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    UserId string
    Canonical unique identifier for the user.
    WorkspaceAccess bool
    This is a field to allow the principal to have access to Databricks Workspace.
    allowClusterCreate Boolean
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    groupId String
    Canonical unique identifier for the group.
    servicePrincipalId String

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    userId String
    Canonical unique identifier for the user.
    workspaceAccess Boolean
    This is a field to allow the principal to have access to Databricks Workspace.
    allowClusterCreate boolean
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate boolean
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess boolean
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    groupId string
    Canonical unique identifier for the group.
    servicePrincipalId string

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    userId string
    Canonical unique identifier for the user.
    workspaceAccess boolean
    This is a field to allow the principal to have access to Databricks Workspace.
    allow_cluster_create bool
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allow_instance_pool_create bool
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricks_sql_access bool
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    group_id str
    Canonical unique identifier for the group.
    service_principal_id str

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    user_id str
    Canonical unique identifier for the user.
    workspace_access bool
    This is a field to allow the principal to have access to Databricks Workspace.
    allowClusterCreate Boolean
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    groupId String
    Canonical unique identifier for the group.
    servicePrincipalId String

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    userId String
    Canonical unique identifier for the user.
    workspaceAccess Boolean
    This is a field to allow the principal to have access to Databricks Workspace.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Entitlements resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing Entitlements Resource

    Get an existing Entitlements resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: EntitlementsState, opts?: CustomResourceOptions): Entitlements
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            allow_cluster_create: Optional[bool] = None,
            allow_instance_pool_create: Optional[bool] = None,
            databricks_sql_access: Optional[bool] = None,
            group_id: Optional[str] = None,
            service_principal_id: Optional[str] = None,
            user_id: Optional[str] = None,
            workspace_access: Optional[bool] = None) -> Entitlements
    func GetEntitlements(ctx *Context, name string, id IDInput, state *EntitlementsState, opts ...ResourceOption) (*Entitlements, error)
    public static Entitlements Get(string name, Input<string> id, EntitlementsState? state, CustomResourceOptions? opts = null)
    public static Entitlements get(String name, Output<String> id, EntitlementsState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AllowClusterCreate bool
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    GroupId string
    Canonical unique identifier for the group.
    ServicePrincipalId string

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    UserId string
    Canonical unique identifier for the user.
    WorkspaceAccess bool
    This is a field to allow the principal to have access to Databricks Workspace.
    AllowClusterCreate bool
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    AllowInstancePoolCreate bool
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    DatabricksSqlAccess bool
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    GroupId string
    Canonical unique identifier for the group.
    ServicePrincipalId string

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    UserId string
    Canonical unique identifier for the user.
    WorkspaceAccess bool
    This is a field to allow the principal to have access to Databricks Workspace.
    allowClusterCreate Boolean
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    groupId String
    Canonical unique identifier for the group.
    servicePrincipalId String

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    userId String
    Canonical unique identifier for the user.
    workspaceAccess Boolean
    This is a field to allow the principal to have access to Databricks Workspace.
    allowClusterCreate boolean
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate boolean
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess boolean
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    groupId string
    Canonical unique identifier for the group.
    servicePrincipalId string

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    userId string
    Canonical unique identifier for the user.
    workspaceAccess boolean
    This is a field to allow the principal to have access to Databricks Workspace.
    allow_cluster_create bool
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allow_instance_pool_create bool
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricks_sql_access bool
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    group_id str
    Canonical unique identifier for the group.
    service_principal_id str

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    user_id str
    Canonical unique identifier for the user.
    workspace_access bool
    This is a field to allow the principal to have access to Databricks Workspace.
    allowClusterCreate Boolean
    Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and cluster_id argument. Everyone without allow_cluster_create argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
    allowInstancePoolCreate Boolean
    Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with databricks.Permissions and instance_pool_id argument.
    databricksSqlAccess Boolean
    This is a field to allow the principal to have access to Databricks SQL feature in User Interface and through databricks_sql_endpoint.
    groupId String
    Canonical unique identifier for the group.
    servicePrincipalId String

    Canonical unique identifier for the service principal.

    The following entitlements are available.

    userId String
    Canonical unique identifier for the user.
    workspaceAccess Boolean
    This is a field to allow the principal to have access to Databricks Workspace.

    Import

    The resource can be imported using a synthetic identifier. Examples of valid synthetic identifiers are:

    • user/user_id - user user_id.

    • group/group_id - group group_id.

    • spn/spn_id - service principal spn_id.

    bash

    $ pulumi import databricks:index/entitlements:Entitlements me user/<user-id>
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi