databricks.SqlGlobalConfig
Explore with Pulumi AI
This resource configures the security policy, databricks_instance_profile, and data access properties for all databricks.SqlEndpoint of workspace. Please note that changing parameters of this resource will restart all running databricks_sql_endpoint. To use this resource you need to be an administrator.
Example Usage
AWS example
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const _this = new databricks.SqlGlobalConfig("this", {
securityPolicy: "DATA_ACCESS_CONTROL",
instanceProfileArn: "arn:....",
dataAccessConfig: {
"spark.sql.session.timeZone": "UTC",
},
});
import pulumi
import pulumi_databricks as databricks
this = databricks.SqlGlobalConfig("this",
security_policy="DATA_ACCESS_CONTROL",
instance_profile_arn="arn:....",
data_access_config={
"spark.sql.session.timeZone": "UTC",
})
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewSqlGlobalConfig(ctx, "this", &databricks.SqlGlobalConfigArgs{
SecurityPolicy: pulumi.String("DATA_ACCESS_CONTROL"),
InstanceProfileArn: pulumi.String("arn:...."),
DataAccessConfig: pulumi.StringMap{
"spark.sql.session.timeZone": pulumi.String("UTC"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var @this = new Databricks.SqlGlobalConfig("this", new()
{
SecurityPolicy = "DATA_ACCESS_CONTROL",
InstanceProfileArn = "arn:....",
DataAccessConfig =
{
{ "spark.sql.session.timeZone", "UTC" },
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SqlGlobalConfig;
import com.pulumi.databricks.SqlGlobalConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var this_ = new SqlGlobalConfig("this", SqlGlobalConfigArgs.builder()
.securityPolicy("DATA_ACCESS_CONTROL")
.instanceProfileArn("arn:....")
.dataAccessConfig(Map.of("spark.sql.session.timeZone", "UTC"))
.build());
}
}
resources:
this:
type: databricks:SqlGlobalConfig
properties:
securityPolicy: DATA_ACCESS_CONTROL
instanceProfileArn: arn:....
dataAccessConfig:
spark.sql.session.timeZone: UTC
Azure example
For Azure you should use the data_access_config
to provide the service principal configuration. You can use the Databricks SQL Admin Console UI to help you generate the right configuration values.
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const _this = new databricks.SqlGlobalConfig("this", {
securityPolicy: "DATA_ACCESS_CONTROL",
dataAccessConfig: {
"spark.hadoop.fs.azure.account.auth.type": "OAuth",
"spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
"spark.hadoop.fs.azure.account.oauth2.client.id": applicationId,
"spark.hadoop.fs.azure.account.oauth2.client.secret": `{{secrets/${secretScope}/${secretKey}}}`,
"spark.hadoop.fs.azure.account.oauth2.client.endpoint": `https://login.microsoftonline.com/${tenantId}/oauth2/token`,
},
sqlConfigParams: {
ANSI_MODE: "true",
},
});
import pulumi
import pulumi_databricks as databricks
this = databricks.SqlGlobalConfig("this",
security_policy="DATA_ACCESS_CONTROL",
data_access_config={
"spark.hadoop.fs.azure.account.auth.type": "OAuth",
"spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
"spark.hadoop.fs.azure.account.oauth2.client.id": application_id,
"spark.hadoop.fs.azure.account.oauth2.client.secret": f"{{{{secrets/{secret_scope}/{secret_key}}}}}",
"spark.hadoop.fs.azure.account.oauth2.client.endpoint": f"https://login.microsoftonline.com/{tenant_id}/oauth2/token",
},
sql_config_params={
"ANSI_MODE": "true",
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.NewSqlGlobalConfig(ctx, "this", &databricks.SqlGlobalConfigArgs{
SecurityPolicy: pulumi.String("DATA_ACCESS_CONTROL"),
DataAccessConfig: pulumi.StringMap{
"spark.hadoop.fs.azure.account.auth.type": pulumi.String("OAuth"),
"spark.hadoop.fs.azure.account.oauth.provider.type": pulumi.String("org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"),
"spark.hadoop.fs.azure.account.oauth2.client.id": pulumi.Any(applicationId),
"spark.hadoop.fs.azure.account.oauth2.client.secret": pulumi.Sprintf("{{secrets/%v/%v}}", secretScope, secretKey),
"spark.hadoop.fs.azure.account.oauth2.client.endpoint": pulumi.Sprintf("https://login.microsoftonline.com/%v/oauth2/token", tenantId),
},
SqlConfigParams: pulumi.StringMap{
"ANSI_MODE": pulumi.String("true"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var @this = new Databricks.SqlGlobalConfig("this", new()
{
SecurityPolicy = "DATA_ACCESS_CONTROL",
DataAccessConfig =
{
{ "spark.hadoop.fs.azure.account.auth.type", "OAuth" },
{ "spark.hadoop.fs.azure.account.oauth.provider.type", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider" },
{ "spark.hadoop.fs.azure.account.oauth2.client.id", applicationId },
{ "spark.hadoop.fs.azure.account.oauth2.client.secret", $"{{{{secrets/{secretScope}/{secretKey}}}}}" },
{ "spark.hadoop.fs.azure.account.oauth2.client.endpoint", $"https://login.microsoftonline.com/{tenantId}/oauth2/token" },
},
SqlConfigParams =
{
{ "ANSI_MODE", "true" },
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SqlGlobalConfig;
import com.pulumi.databricks.SqlGlobalConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var this_ = new SqlGlobalConfig("this", SqlGlobalConfigArgs.builder()
.securityPolicy("DATA_ACCESS_CONTROL")
.dataAccessConfig(Map.ofEntries(
Map.entry("spark.hadoop.fs.azure.account.auth.type", "OAuth"),
Map.entry("spark.hadoop.fs.azure.account.oauth.provider.type", "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider"),
Map.entry("spark.hadoop.fs.azure.account.oauth2.client.id", applicationId),
Map.entry("spark.hadoop.fs.azure.account.oauth2.client.secret", String.format("{{{{secrets/%s/%s}}}}", secretScope,secretKey)),
Map.entry("spark.hadoop.fs.azure.account.oauth2.client.endpoint", String.format("https://login.microsoftonline.com/%s/oauth2/token", tenantId))
))
.sqlConfigParams(Map.of("ANSI_MODE", "true"))
.build());
}
}
resources:
this:
type: databricks:SqlGlobalConfig
properties:
securityPolicy: DATA_ACCESS_CONTROL
dataAccessConfig:
spark.hadoop.fs.azure.account.auth.type: OAuth
spark.hadoop.fs.azure.account.oauth.provider.type: org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider
spark.hadoop.fs.azure.account.oauth2.client.id: ${applicationId}
spark.hadoop.fs.azure.account.oauth2.client.secret: '{{secrets/${secretScope}/${secretKey}}}'
spark.hadoop.fs.azure.account.oauth2.client.endpoint: https://login.microsoftonline.com/${tenantId}/oauth2/token
sqlConfigParams:
ANSI_MODE: 'true'
Related Resources
The following resources are often used in the same context:
- End to end workspace management guide.
- databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
- databricks.SqlDashboard to manage Databricks SQL Dashboards.
- databricks.SqlEndpoint to manage Databricks SQL Endpoints.
- databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and more.
Create SqlGlobalConfig Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new SqlGlobalConfig(name: string, args?: SqlGlobalConfigArgs, opts?: CustomResourceOptions);
@overload
def SqlGlobalConfig(resource_name: str,
args: Optional[SqlGlobalConfigArgs] = None,
opts: Optional[ResourceOptions] = None)
@overload
def SqlGlobalConfig(resource_name: str,
opts: Optional[ResourceOptions] = None,
data_access_config: Optional[Mapping[str, str]] = None,
enable_serverless_compute: Optional[bool] = None,
google_service_account: Optional[str] = None,
instance_profile_arn: Optional[str] = None,
security_policy: Optional[str] = None,
sql_config_params: Optional[Mapping[str, str]] = None)
func NewSqlGlobalConfig(ctx *Context, name string, args *SqlGlobalConfigArgs, opts ...ResourceOption) (*SqlGlobalConfig, error)
public SqlGlobalConfig(string name, SqlGlobalConfigArgs? args = null, CustomResourceOptions? opts = null)
public SqlGlobalConfig(String name, SqlGlobalConfigArgs args)
public SqlGlobalConfig(String name, SqlGlobalConfigArgs args, CustomResourceOptions options)
type: databricks:SqlGlobalConfig
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args SqlGlobalConfigArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SqlGlobalConfigArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SqlGlobalConfigArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SqlGlobalConfigArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SqlGlobalConfigArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var sqlGlobalConfigResource = new Databricks.SqlGlobalConfig("sqlGlobalConfigResource", new()
{
DataAccessConfig =
{
{ "string", "string" },
},
GoogleServiceAccount = "string",
InstanceProfileArn = "string",
SecurityPolicy = "string",
SqlConfigParams =
{
{ "string", "string" },
},
});
example, err := databricks.NewSqlGlobalConfig(ctx, "sqlGlobalConfigResource", &databricks.SqlGlobalConfigArgs{
DataAccessConfig: pulumi.StringMap{
"string": pulumi.String("string"),
},
GoogleServiceAccount: pulumi.String("string"),
InstanceProfileArn: pulumi.String("string"),
SecurityPolicy: pulumi.String("string"),
SqlConfigParams: pulumi.StringMap{
"string": pulumi.String("string"),
},
})
var sqlGlobalConfigResource = new SqlGlobalConfig("sqlGlobalConfigResource", SqlGlobalConfigArgs.builder()
.dataAccessConfig(Map.of("string", "string"))
.googleServiceAccount("string")
.instanceProfileArn("string")
.securityPolicy("string")
.sqlConfigParams(Map.of("string", "string"))
.build());
sql_global_config_resource = databricks.SqlGlobalConfig("sqlGlobalConfigResource",
data_access_config={
"string": "string",
},
google_service_account="string",
instance_profile_arn="string",
security_policy="string",
sql_config_params={
"string": "string",
})
const sqlGlobalConfigResource = new databricks.SqlGlobalConfig("sqlGlobalConfigResource", {
dataAccessConfig: {
string: "string",
},
googleServiceAccount: "string",
instanceProfileArn: "string",
securityPolicy: "string",
sqlConfigParams: {
string: "string",
},
});
type: databricks:SqlGlobalConfig
properties:
dataAccessConfig:
string: string
googleServiceAccount: string
instanceProfileArn: string
securityPolicy: string
sqlConfigParams:
string: string
SqlGlobalConfig Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The SqlGlobalConfig resource accepts the following input properties:
- Data
Access Dictionary<string, string>Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- Enable
Serverless boolCompute - Google
Service stringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- Instance
Profile stringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- Security
Policy string - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - Sql
Config Dictionary<string, string>Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- Data
Access map[string]stringConfig - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- Enable
Serverless boolCompute - Google
Service stringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- Instance
Profile stringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- Security
Policy string - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - Sql
Config map[string]stringParams - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data
Access Map<String,String>Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable
Serverless BooleanCompute - google
Service StringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance
Profile StringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security
Policy String - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql
Config Map<String,String>Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data
Access {[key: string]: string}Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable
Serverless booleanCompute - google
Service stringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance
Profile stringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security
Policy string - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql
Config {[key: string]: string}Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data_
access_ Mapping[str, str]config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable_
serverless_ boolcompute - google_
service_ straccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance_
profile_ strarn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security_
policy str - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql_
config_ Mapping[str, str]params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data
Access Map<String>Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable
Serverless BooleanCompute - google
Service StringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance
Profile StringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security
Policy String - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql
Config Map<String>Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
Outputs
All input properties are implicitly available as output properties. Additionally, the SqlGlobalConfig resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing SqlGlobalConfig Resource
Get an existing SqlGlobalConfig resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SqlGlobalConfigState, opts?: CustomResourceOptions): SqlGlobalConfig
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
data_access_config: Optional[Mapping[str, str]] = None,
enable_serverless_compute: Optional[bool] = None,
google_service_account: Optional[str] = None,
instance_profile_arn: Optional[str] = None,
security_policy: Optional[str] = None,
sql_config_params: Optional[Mapping[str, str]] = None) -> SqlGlobalConfig
func GetSqlGlobalConfig(ctx *Context, name string, id IDInput, state *SqlGlobalConfigState, opts ...ResourceOption) (*SqlGlobalConfig, error)
public static SqlGlobalConfig Get(string name, Input<string> id, SqlGlobalConfigState? state, CustomResourceOptions? opts = null)
public static SqlGlobalConfig get(String name, Output<String> id, SqlGlobalConfigState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Data
Access Dictionary<string, string>Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- Enable
Serverless boolCompute - Google
Service stringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- Instance
Profile stringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- Security
Policy string - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - Sql
Config Dictionary<string, string>Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- Data
Access map[string]stringConfig - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- Enable
Serverless boolCompute - Google
Service stringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- Instance
Profile stringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- Security
Policy string - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - Sql
Config map[string]stringParams - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data
Access Map<String,String>Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable
Serverless BooleanCompute - google
Service StringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance
Profile StringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security
Policy String - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql
Config Map<String,String>Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data
Access {[key: string]: string}Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable
Serverless booleanCompute - google
Service stringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance
Profile stringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security
Policy string - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql
Config {[key: string]: string}Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data_
access_ Mapping[str, str]config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable_
serverless_ boolcompute - google_
service_ straccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance_
profile_ strarn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security_
policy str - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql_
config_ Mapping[str, str]params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
- data
Access Map<String>Config - Data access configuration for databricks_sql_endpoint, such as configuration for an external Hive metastore, Hadoop Filesystem configuration, etc. Please note that the list of supported configuration properties is limited, so refer to the documentation for a full list. Apply will fail if you're specifying not permitted configuration.
- enable
Serverless BooleanCompute - google
Service StringAccount - used to access GCP services, such as Cloud Storage, from databricks_sql_endpoint. Please note that this parameter is only for GCP, and will generate an error if used on other clouds.
- instance
Profile StringArn - databricks_instance_profile used to access storage from databricks_sql_endpoint. Please note that this parameter is only for AWS, and will generate an error if used on other clouds.
- security
Policy String - The policy for controlling access to datasets. Default value:
DATA_ACCESS_CONTROL
, consult documentation for list of possible values - sql
Config Map<String>Params - SQL Configuration Parameters let you override the default behavior for all sessions with all endpoints.
Import
You can import a databricks_sql_global_config
resource with command like the following (you need to use global
as ID):
bash
$ pulumi import databricks:index/sqlGlobalConfig:SqlGlobalConfig this global
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.