dbtcloud.GlobalConnection
Explore with Pulumi AI
This resource can be used to create global connections as introduced in dbt Cloud in August 2024.
Those connections are not linked to a specific project and can be linked to environments from different projects by using the connection_id
field in the dbtcloud.Environment
resource.
All connections types are supported, and the old resources dbtcloud.Connection
, dbtcloud.BigQueryConnection
and dbtcloud.FabricConnection
are now flagged as deprecated and will be removed from the next major version of the provider.
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.dbtcloud.GlobalConnection;
import com.pulumi.dbtcloud.GlobalConnectionArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionApacheSparkArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionAthenaArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionBigqueryArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionDatabricksArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionFabricArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionPostgresArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionRedshiftArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionSnowflakeArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionStarburstArgs;
import com.pulumi.dbtcloud.inputs.GlobalConnectionSynapseArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var apacheSpark = new GlobalConnection("apacheSpark", GlobalConnectionArgs.builder()
.name("My Apache Spark connection")
.apacheSpark(GlobalConnectionApacheSparkArgs.builder()
.method("http")
.host("my-spark-host.com")
.cluster("my-cluster")
.connect_timeout(100)
.build())
.build());
var athena = new GlobalConnection("athena", GlobalConnectionArgs.builder()
.name("My Athena connection")
.athena(GlobalConnectionAthenaArgs.builder()
.region_name("us-east-1")
.database("mydatabase")
.s3_staging_dir("my_dir")
.work_group("my_work_group")
.build())
.build());
var bigquery = new GlobalConnection("bigquery", GlobalConnectionArgs.builder()
.name("My BigQuery connection")
.bigquery(GlobalConnectionBigqueryArgs.builder()
.gcp_project_id("my-gcp-project-id")
.timeout_seconds(1000)
.private_key_id("my-private-key-id")
.private_key("ABCDEFGHIJKL")
.client_email("my_client_email")
.client_id("my_client_id")
.auth_uri("my_auth_uri")
.token_uri("my_token_uri")
.auth_provider_x509_cert_url("my_auth_provider_x509_cert_url")
.client_x509_cert_url("my_client_x509_cert_url")
.application_id("oauth_application_id")
.application_secret("oauth_secret_id")
.build())
.build());
var databricks = new GlobalConnection("databricks", GlobalConnectionArgs.builder()
.name("My Databricks connection")
.databricks(GlobalConnectionDatabricksArgs.builder()
.host("my-databricks-host.cloud.databricks.com")
.http_path("/sql/my/http/path")
.catalog("dbt_catalog")
.client_id("yourclientid")
.client_secret("yourclientsecret")
.build())
.build());
var fabric = new GlobalConnection("fabric", GlobalConnectionArgs.builder()
.name("My Fabric connection")
.fabric(GlobalConnectionFabricArgs.builder()
.server("my-fabric-server.com")
.database("mydb")
.port(1234)
.retries(3)
.login_timeout(60)
.query_timeout(3600)
.build())
.build());
var postgres = new GlobalConnection("postgres", GlobalConnectionArgs.builder()
.name("My PostgreSQL connection")
.postgres(GlobalConnectionPostgresArgs.builder()
.hostname("my-postgresql-server.com")
.port(5432)
.dbname("my_database")
.build())
.build());
var redshift = new GlobalConnection("redshift", GlobalConnectionArgs.builder()
.name("My Redshift connection")
.redshift(GlobalConnectionRedshiftArgs.builder()
.hostname("my-redshift-connection.com")
.port(5432)
.dbname("my_database")
.build())
.build());
var snowflake = new GlobalConnection("snowflake", GlobalConnectionArgs.builder()
.name("My Snowflake connection")
.privateLinkEndpointId(myPrivateLink.id())
.snowflake(GlobalConnectionSnowflakeArgs.builder()
.account("my-snowflake-account")
.database("MY_DATABASE")
.warehouse("MY_WAREHOUSE")
.client_session_keep_alive(false)
.allow_sso(true)
.oauth_client_id("yourclientid")
.oauth_client_secret("yourclientsecret")
.build())
.build());
var starburst = new GlobalConnection("starburst", GlobalConnectionArgs.builder()
.name("My Starburst connection")
.starburst(GlobalConnectionStarburstArgs.builder()
.host("my-starburst-host.com")
.database("mydb")
.build())
.build());
var synapse = new GlobalConnection("synapse", GlobalConnectionArgs.builder()
.name("My Synapse connection")
.synapse(GlobalConnectionSynapseArgs.builder()
.host("my-synapse-server.com")
.database("mydb")
.port(1234)
.retries(3)
.login_timeout(60)
.query_timeout(3600)
.build())
.build());
}
}
resources:
apacheSpark:
type: dbtcloud:GlobalConnection
name: apache_spark
properties:
name: My Apache Spark connection
apacheSpark:
method: http
host: my-spark-host.com
cluster: my-cluster
connect_timeout: 100
athena:
type: dbtcloud:GlobalConnection
properties:
name: My Athena connection
athena:
region_name: us-east-1
database: mydatabase
s3_staging_dir: my_dir
work_group: my_work_group
bigquery:
type: dbtcloud:GlobalConnection
properties:
name: My BigQuery connection
bigquery:
gcp_project_id: my-gcp-project-id
timeout_seconds: 1000
private_key_id: my-private-key-id
private_key: ABCDEFGHIJKL
client_email: my_client_email
client_id: my_client_id
auth_uri: my_auth_uri
token_uri: my_token_uri
auth_provider_x509_cert_url: my_auth_provider_x509_cert_url
client_x509_cert_url: my_client_x509_cert_url
application_id: oauth_application_id
application_secret: oauth_secret_id
databricks:
type: dbtcloud:GlobalConnection
properties:
name: My Databricks connection
databricks:
host: my-databricks-host.cloud.databricks.com
http_path: /sql/my/http/path
catalog: dbt_catalog
client_id: yourclientid
client_secret: yourclientsecret
fabric:
type: dbtcloud:GlobalConnection
properties:
name: My Fabric connection
fabric:
server: my-fabric-server.com
database: mydb
port: 1234
retries: 3
login_timeout: 60
query_timeout: 3600
postgres:
type: dbtcloud:GlobalConnection
properties:
name: My PostgreSQL connection
postgres:
hostname: my-postgresql-server.com
port: 5432
dbname: my_database
redshift:
type: dbtcloud:GlobalConnection
properties:
name: My Redshift connection
redshift:
hostname: my-redshift-connection.com
port: 5432
dbname: my_database
snowflake:
type: dbtcloud:GlobalConnection
properties:
name: My Snowflake connection
privateLinkEndpointId: ${myPrivateLink.id}
snowflake:
account: my-snowflake-account
database: MY_DATABASE
warehouse: MY_WAREHOUSE
client_session_keep_alive: false
allow_sso: true
oauth_client_id: yourclientid
oauth_client_secret: yourclientsecret
starburst:
type: dbtcloud:GlobalConnection
properties:
name: My Starburst connection
starburst:
host: my-starburst-host.com
database: mydb
synapse:
type: dbtcloud:GlobalConnection
properties:
name: My Synapse connection
synapse:
host: my-synapse-server.com
database: mydb
port: 1234
retries: 3
login_timeout: 60
query_timeout: 3600
Create GlobalConnection Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new GlobalConnection(name: string, args?: GlobalConnectionArgs, opts?: CustomResourceOptions);
@overload
def GlobalConnection(resource_name: str,
args: Optional[GlobalConnectionArgs] = None,
opts: Optional[ResourceOptions] = None)
@overload
def GlobalConnection(resource_name: str,
opts: Optional[ResourceOptions] = None,
apache_spark: Optional[GlobalConnectionApacheSparkArgs] = None,
athena: Optional[GlobalConnectionAthenaArgs] = None,
bigquery: Optional[GlobalConnectionBigqueryArgs] = None,
databricks: Optional[GlobalConnectionDatabricksArgs] = None,
fabric: Optional[GlobalConnectionFabricArgs] = None,
name: Optional[str] = None,
oauth_configuration_id: Optional[int] = None,
postgres: Optional[GlobalConnectionPostgresArgs] = None,
private_link_endpoint_id: Optional[str] = None,
redshift: Optional[GlobalConnectionRedshiftArgs] = None,
snowflake: Optional[GlobalConnectionSnowflakeArgs] = None,
starburst: Optional[GlobalConnectionStarburstArgs] = None,
synapse: Optional[GlobalConnectionSynapseArgs] = None)
func NewGlobalConnection(ctx *Context, name string, args *GlobalConnectionArgs, opts ...ResourceOption) (*GlobalConnection, error)
public GlobalConnection(string name, GlobalConnectionArgs? args = null, CustomResourceOptions? opts = null)
public GlobalConnection(String name, GlobalConnectionArgs args)
public GlobalConnection(String name, GlobalConnectionArgs args, CustomResourceOptions options)
type: dbtcloud:GlobalConnection
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args GlobalConnectionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args GlobalConnectionArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args GlobalConnectionArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args GlobalConnectionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args GlobalConnectionArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var globalConnectionResource = new DbtCloud.GlobalConnection("globalConnectionResource", new()
{
ApacheSpark = new DbtCloud.Inputs.GlobalConnectionApacheSparkArgs
{
Cluster = "string",
Host = "string",
Method = "string",
Auth = "string",
ConnectRetries = 0,
ConnectTimeout = 0,
Organization = "string",
Port = 0,
User = "string",
},
Athena = new DbtCloud.Inputs.GlobalConnectionAthenaArgs
{
Database = "string",
RegionName = "string",
S3StagingDir = "string",
NumBoto3Retries = 0,
NumIcebergRetries = 0,
NumRetries = 0,
PollInterval = 0,
S3DataDir = "string",
S3DataNaming = "string",
S3TmpTableDir = "string",
SparkWorkGroup = "string",
WorkGroup = "string",
},
Bigquery = new DbtCloud.Inputs.GlobalConnectionBigqueryArgs
{
PrivateKey = "string",
TokenUri = "string",
AuthProviderX509CertUrl = "string",
AuthUri = "string",
ClientEmail = "string",
ClientId = "string",
ClientX509CertUrl = "string",
GcpProjectId = "string",
PrivateKeyId = "string",
Scopes = new[]
{
"string",
},
MaximumBytesBilled = 0,
ApplicationSecret = "string",
JobRetryDeadlineSeconds = 0,
JobCreationTimeoutSeconds = 0,
ImpersonateServiceAccount = "string",
Location = "string",
ExecutionProject = "string",
Priority = "string",
ApplicationId = "string",
DataprocRegion = "string",
Retries = 0,
GcsBucket = "string",
TimeoutSeconds = 0,
DataprocClusterName = "string",
},
Databricks = new DbtCloud.Inputs.GlobalConnectionDatabricksArgs
{
Host = "string",
HttpPath = "string",
Catalog = "string",
ClientId = "string",
ClientSecret = "string",
},
Fabric = new DbtCloud.Inputs.GlobalConnectionFabricArgs
{
Database = "string",
Server = "string",
LoginTimeout = 0,
Port = 0,
QueryTimeout = 0,
Retries = 0,
},
Name = "string",
OauthConfigurationId = 0,
Postgres = new DbtCloud.Inputs.GlobalConnectionPostgresArgs
{
Dbname = "string",
Hostname = "string",
Port = 0,
SshTunnel = new DbtCloud.Inputs.GlobalConnectionPostgresSshTunnelArgs
{
Hostname = "string",
Port = 0,
Username = "string",
Id = 0,
PublicKey = "string",
},
},
PrivateLinkEndpointId = "string",
Redshift = new DbtCloud.Inputs.GlobalConnectionRedshiftArgs
{
Dbname = "string",
Hostname = "string",
Port = 0,
SshTunnel = new DbtCloud.Inputs.GlobalConnectionRedshiftSshTunnelArgs
{
Hostname = "string",
Port = 0,
Username = "string",
Id = 0,
PublicKey = "string",
},
},
Snowflake = new DbtCloud.Inputs.GlobalConnectionSnowflakeArgs
{
Account = "string",
Database = "string",
Warehouse = "string",
AllowSso = false,
ClientSessionKeepAlive = false,
OauthClientId = "string",
OauthClientSecret = "string",
Role = "string",
},
Starburst = new DbtCloud.Inputs.GlobalConnectionStarburstArgs
{
Host = "string",
Method = "string",
Port = 0,
},
Synapse = new DbtCloud.Inputs.GlobalConnectionSynapseArgs
{
Database = "string",
Host = "string",
LoginTimeout = 0,
Port = 0,
QueryTimeout = 0,
Retries = 0,
},
});
example, err := dbtcloud.NewGlobalConnection(ctx, "globalConnectionResource", &dbtcloud.GlobalConnectionArgs{
ApacheSpark: &dbtcloud.GlobalConnectionApacheSparkArgs{
Cluster: pulumi.String("string"),
Host: pulumi.String("string"),
Method: pulumi.String("string"),
Auth: pulumi.String("string"),
ConnectRetries: pulumi.Int(0),
ConnectTimeout: pulumi.Int(0),
Organization: pulumi.String("string"),
Port: pulumi.Int(0),
User: pulumi.String("string"),
},
Athena: &dbtcloud.GlobalConnectionAthenaArgs{
Database: pulumi.String("string"),
RegionName: pulumi.String("string"),
S3StagingDir: pulumi.String("string"),
NumBoto3Retries: pulumi.Int(0),
NumIcebergRetries: pulumi.Int(0),
NumRetries: pulumi.Int(0),
PollInterval: pulumi.Int(0),
S3DataDir: pulumi.String("string"),
S3DataNaming: pulumi.String("string"),
S3TmpTableDir: pulumi.String("string"),
SparkWorkGroup: pulumi.String("string"),
WorkGroup: pulumi.String("string"),
},
Bigquery: &dbtcloud.GlobalConnectionBigqueryArgs{
PrivateKey: pulumi.String("string"),
TokenUri: pulumi.String("string"),
AuthProviderX509CertUrl: pulumi.String("string"),
AuthUri: pulumi.String("string"),
ClientEmail: pulumi.String("string"),
ClientId: pulumi.String("string"),
ClientX509CertUrl: pulumi.String("string"),
GcpProjectId: pulumi.String("string"),
PrivateKeyId: pulumi.String("string"),
Scopes: pulumi.StringArray{
pulumi.String("string"),
},
MaximumBytesBilled: pulumi.Int(0),
ApplicationSecret: pulumi.String("string"),
JobRetryDeadlineSeconds: pulumi.Int(0),
JobCreationTimeoutSeconds: pulumi.Int(0),
ImpersonateServiceAccount: pulumi.String("string"),
Location: pulumi.String("string"),
ExecutionProject: pulumi.String("string"),
Priority: pulumi.String("string"),
ApplicationId: pulumi.String("string"),
DataprocRegion: pulumi.String("string"),
Retries: pulumi.Int(0),
GcsBucket: pulumi.String("string"),
TimeoutSeconds: pulumi.Int(0),
DataprocClusterName: pulumi.String("string"),
},
Databricks: &dbtcloud.GlobalConnectionDatabricksArgs{
Host: pulumi.String("string"),
HttpPath: pulumi.String("string"),
Catalog: pulumi.String("string"),
ClientId: pulumi.String("string"),
ClientSecret: pulumi.String("string"),
},
Fabric: &dbtcloud.GlobalConnectionFabricArgs{
Database: pulumi.String("string"),
Server: pulumi.String("string"),
LoginTimeout: pulumi.Int(0),
Port: pulumi.Int(0),
QueryTimeout: pulumi.Int(0),
Retries: pulumi.Int(0),
},
Name: pulumi.String("string"),
OauthConfigurationId: pulumi.Int(0),
Postgres: &dbtcloud.GlobalConnectionPostgresArgs{
Dbname: pulumi.String("string"),
Hostname: pulumi.String("string"),
Port: pulumi.Int(0),
SshTunnel: &dbtcloud.GlobalConnectionPostgresSshTunnelArgs{
Hostname: pulumi.String("string"),
Port: pulumi.Int(0),
Username: pulumi.String("string"),
Id: pulumi.Int(0),
PublicKey: pulumi.String("string"),
},
},
PrivateLinkEndpointId: pulumi.String("string"),
Redshift: &dbtcloud.GlobalConnectionRedshiftArgs{
Dbname: pulumi.String("string"),
Hostname: pulumi.String("string"),
Port: pulumi.Int(0),
SshTunnel: &dbtcloud.GlobalConnectionRedshiftSshTunnelArgs{
Hostname: pulumi.String("string"),
Port: pulumi.Int(0),
Username: pulumi.String("string"),
Id: pulumi.Int(0),
PublicKey: pulumi.String("string"),
},
},
Snowflake: &dbtcloud.GlobalConnectionSnowflakeArgs{
Account: pulumi.String("string"),
Database: pulumi.String("string"),
Warehouse: pulumi.String("string"),
AllowSso: pulumi.Bool(false),
ClientSessionKeepAlive: pulumi.Bool(false),
OauthClientId: pulumi.String("string"),
OauthClientSecret: pulumi.String("string"),
Role: pulumi.String("string"),
},
Starburst: &dbtcloud.GlobalConnectionStarburstArgs{
Host: pulumi.String("string"),
Method: pulumi.String("string"),
Port: pulumi.Int(0),
},
Synapse: &dbtcloud.GlobalConnectionSynapseArgs{
Database: pulumi.String("string"),
Host: pulumi.String("string"),
LoginTimeout: pulumi.Int(0),
Port: pulumi.Int(0),
QueryTimeout: pulumi.Int(0),
Retries: pulumi.Int(0),
},
})
var globalConnectionResource = new GlobalConnection("globalConnectionResource", GlobalConnectionArgs.builder()
.apacheSpark(GlobalConnectionApacheSparkArgs.builder()
.cluster("string")
.host("string")
.method("string")
.auth("string")
.connectRetries(0)
.connectTimeout(0)
.organization("string")
.port(0)
.user("string")
.build())
.athena(GlobalConnectionAthenaArgs.builder()
.database("string")
.regionName("string")
.s3StagingDir("string")
.numBoto3Retries(0)
.numIcebergRetries(0)
.numRetries(0)
.pollInterval(0)
.s3DataDir("string")
.s3DataNaming("string")
.s3TmpTableDir("string")
.sparkWorkGroup("string")
.workGroup("string")
.build())
.bigquery(GlobalConnectionBigqueryArgs.builder()
.privateKey("string")
.tokenUri("string")
.authProviderX509CertUrl("string")
.authUri("string")
.clientEmail("string")
.clientId("string")
.clientX509CertUrl("string")
.gcpProjectId("string")
.privateKeyId("string")
.scopes("string")
.maximumBytesBilled(0)
.applicationSecret("string")
.jobRetryDeadlineSeconds(0)
.jobCreationTimeoutSeconds(0)
.impersonateServiceAccount("string")
.location("string")
.executionProject("string")
.priority("string")
.applicationId("string")
.dataprocRegion("string")
.retries(0)
.gcsBucket("string")
.timeoutSeconds(0)
.dataprocClusterName("string")
.build())
.databricks(GlobalConnectionDatabricksArgs.builder()
.host("string")
.httpPath("string")
.catalog("string")
.clientId("string")
.clientSecret("string")
.build())
.fabric(GlobalConnectionFabricArgs.builder()
.database("string")
.server("string")
.loginTimeout(0)
.port(0)
.queryTimeout(0)
.retries(0)
.build())
.name("string")
.oauthConfigurationId(0)
.postgres(GlobalConnectionPostgresArgs.builder()
.dbname("string")
.hostname("string")
.port(0)
.sshTunnel(GlobalConnectionPostgresSshTunnelArgs.builder()
.hostname("string")
.port(0)
.username("string")
.id(0)
.publicKey("string")
.build())
.build())
.privateLinkEndpointId("string")
.redshift(GlobalConnectionRedshiftArgs.builder()
.dbname("string")
.hostname("string")
.port(0)
.sshTunnel(GlobalConnectionRedshiftSshTunnelArgs.builder()
.hostname("string")
.port(0)
.username("string")
.id(0)
.publicKey("string")
.build())
.build())
.snowflake(GlobalConnectionSnowflakeArgs.builder()
.account("string")
.database("string")
.warehouse("string")
.allowSso(false)
.clientSessionKeepAlive(false)
.oauthClientId("string")
.oauthClientSecret("string")
.role("string")
.build())
.starburst(GlobalConnectionStarburstArgs.builder()
.host("string")
.method("string")
.port(0)
.build())
.synapse(GlobalConnectionSynapseArgs.builder()
.database("string")
.host("string")
.loginTimeout(0)
.port(0)
.queryTimeout(0)
.retries(0)
.build())
.build());
global_connection_resource = dbtcloud.GlobalConnection("globalConnectionResource",
apache_spark={
"cluster": "string",
"host": "string",
"method": "string",
"auth": "string",
"connect_retries": 0,
"connect_timeout": 0,
"organization": "string",
"port": 0,
"user": "string",
},
athena={
"database": "string",
"region_name": "string",
"s3_staging_dir": "string",
"num_boto3_retries": 0,
"num_iceberg_retries": 0,
"num_retries": 0,
"poll_interval": 0,
"s3_data_dir": "string",
"s3_data_naming": "string",
"s3_tmp_table_dir": "string",
"spark_work_group": "string",
"work_group": "string",
},
bigquery={
"private_key": "string",
"token_uri": "string",
"auth_provider_x509_cert_url": "string",
"auth_uri": "string",
"client_email": "string",
"client_id": "string",
"client_x509_cert_url": "string",
"gcp_project_id": "string",
"private_key_id": "string",
"scopes": ["string"],
"maximum_bytes_billed": 0,
"application_secret": "string",
"job_retry_deadline_seconds": 0,
"job_creation_timeout_seconds": 0,
"impersonate_service_account": "string",
"location": "string",
"execution_project": "string",
"priority": "string",
"application_id": "string",
"dataproc_region": "string",
"retries": 0,
"gcs_bucket": "string",
"timeout_seconds": 0,
"dataproc_cluster_name": "string",
},
databricks={
"host": "string",
"http_path": "string",
"catalog": "string",
"client_id": "string",
"client_secret": "string",
},
fabric={
"database": "string",
"server": "string",
"login_timeout": 0,
"port": 0,
"query_timeout": 0,
"retries": 0,
},
name="string",
oauth_configuration_id=0,
postgres={
"dbname": "string",
"hostname": "string",
"port": 0,
"ssh_tunnel": {
"hostname": "string",
"port": 0,
"username": "string",
"id": 0,
"public_key": "string",
},
},
private_link_endpoint_id="string",
redshift={
"dbname": "string",
"hostname": "string",
"port": 0,
"ssh_tunnel": {
"hostname": "string",
"port": 0,
"username": "string",
"id": 0,
"public_key": "string",
},
},
snowflake={
"account": "string",
"database": "string",
"warehouse": "string",
"allow_sso": False,
"client_session_keep_alive": False,
"oauth_client_id": "string",
"oauth_client_secret": "string",
"role": "string",
},
starburst={
"host": "string",
"method": "string",
"port": 0,
},
synapse={
"database": "string",
"host": "string",
"login_timeout": 0,
"port": 0,
"query_timeout": 0,
"retries": 0,
})
const globalConnectionResource = new dbtcloud.GlobalConnection("globalConnectionResource", {
apacheSpark: {
cluster: "string",
host: "string",
method: "string",
auth: "string",
connectRetries: 0,
connectTimeout: 0,
organization: "string",
port: 0,
user: "string",
},
athena: {
database: "string",
regionName: "string",
s3StagingDir: "string",
numBoto3Retries: 0,
numIcebergRetries: 0,
numRetries: 0,
pollInterval: 0,
s3DataDir: "string",
s3DataNaming: "string",
s3TmpTableDir: "string",
sparkWorkGroup: "string",
workGroup: "string",
},
bigquery: {
privateKey: "string",
tokenUri: "string",
authProviderX509CertUrl: "string",
authUri: "string",
clientEmail: "string",
clientId: "string",
clientX509CertUrl: "string",
gcpProjectId: "string",
privateKeyId: "string",
scopes: ["string"],
maximumBytesBilled: 0,
applicationSecret: "string",
jobRetryDeadlineSeconds: 0,
jobCreationTimeoutSeconds: 0,
impersonateServiceAccount: "string",
location: "string",
executionProject: "string",
priority: "string",
applicationId: "string",
dataprocRegion: "string",
retries: 0,
gcsBucket: "string",
timeoutSeconds: 0,
dataprocClusterName: "string",
},
databricks: {
host: "string",
httpPath: "string",
catalog: "string",
clientId: "string",
clientSecret: "string",
},
fabric: {
database: "string",
server: "string",
loginTimeout: 0,
port: 0,
queryTimeout: 0,
retries: 0,
},
name: "string",
oauthConfigurationId: 0,
postgres: {
dbname: "string",
hostname: "string",
port: 0,
sshTunnel: {
hostname: "string",
port: 0,
username: "string",
id: 0,
publicKey: "string",
},
},
privateLinkEndpointId: "string",
redshift: {
dbname: "string",
hostname: "string",
port: 0,
sshTunnel: {
hostname: "string",
port: 0,
username: "string",
id: 0,
publicKey: "string",
},
},
snowflake: {
account: "string",
database: "string",
warehouse: "string",
allowSso: false,
clientSessionKeepAlive: false,
oauthClientId: "string",
oauthClientSecret: "string",
role: "string",
},
starburst: {
host: "string",
method: "string",
port: 0,
},
synapse: {
database: "string",
host: "string",
loginTimeout: 0,
port: 0,
queryTimeout: 0,
retries: 0,
},
});
type: dbtcloud:GlobalConnection
properties:
apacheSpark:
auth: string
cluster: string
connectRetries: 0
connectTimeout: 0
host: string
method: string
organization: string
port: 0
user: string
athena:
database: string
numBoto3Retries: 0
numIcebergRetries: 0
numRetries: 0
pollInterval: 0
regionName: string
s3DataDir: string
s3DataNaming: string
s3StagingDir: string
s3TmpTableDir: string
sparkWorkGroup: string
workGroup: string
bigquery:
applicationId: string
applicationSecret: string
authProviderX509CertUrl: string
authUri: string
clientEmail: string
clientId: string
clientX509CertUrl: string
dataprocClusterName: string
dataprocRegion: string
executionProject: string
gcpProjectId: string
gcsBucket: string
impersonateServiceAccount: string
jobCreationTimeoutSeconds: 0
jobRetryDeadlineSeconds: 0
location: string
maximumBytesBilled: 0
priority: string
privateKey: string
privateKeyId: string
retries: 0
scopes:
- string
timeoutSeconds: 0
tokenUri: string
databricks:
catalog: string
clientId: string
clientSecret: string
host: string
httpPath: string
fabric:
database: string
loginTimeout: 0
port: 0
queryTimeout: 0
retries: 0
server: string
name: string
oauthConfigurationId: 0
postgres:
dbname: string
hostname: string
port: 0
sshTunnel:
hostname: string
id: 0
port: 0
publicKey: string
username: string
privateLinkEndpointId: string
redshift:
dbname: string
hostname: string
port: 0
sshTunnel:
hostname: string
id: 0
port: 0
publicKey: string
username: string
snowflake:
account: string
allowSso: false
clientSessionKeepAlive: false
database: string
oauthClientId: string
oauthClientSecret: string
role: string
warehouse: string
starburst:
host: string
method: string
port: 0
synapse:
database: string
host: string
loginTimeout: 0
port: 0
queryTimeout: 0
retries: 0
GlobalConnection Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The GlobalConnection resource accepts the following input properties:
- Apache
Spark Pulumi.Dbt Cloud. Inputs. Global Connection Apache Spark - Apache Spark connection configuration.
- Athena
Pulumi.
Dbt Cloud. Inputs. Global Connection Athena - Athena connection configuration.
- Bigquery
Pulumi.
Dbt Cloud. Inputs. Global Connection Bigquery - Databricks
Pulumi.
Dbt Cloud. Inputs. Global Connection Databricks - Databricks connection configuration
- Fabric
Pulumi.
Dbt Cloud. Inputs. Global Connection Fabric - Microsoft Fabric connection configuration.
- Name string
- Connection name
- Oauth
Configuration intId - External OAuth configuration ID (only Snowflake for now)
- Postgres
Pulumi.
Dbt Cloud. Inputs. Global Connection Postgres - PostgreSQL connection configuration.
- Private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - Redshift
Pulumi.
Dbt Cloud. Inputs. Global Connection Redshift - Redshift connection configuration
- Snowflake
Pulumi.
Dbt Cloud. Inputs. Global Connection Snowflake - Snowflake connection configuration
- Starburst
Pulumi.
Dbt Cloud. Inputs. Global Connection Starburst - Starburst/Trino connection configuration.
- Synapse
Pulumi.
Dbt Cloud. Inputs. Global Connection Synapse - Azure Synapse Analytics connection configuration.
- Apache
Spark GlobalConnection Apache Spark Args - Apache Spark connection configuration.
- Athena
Global
Connection Athena Args - Athena connection configuration.
- Bigquery
Global
Connection Bigquery Args - Databricks
Global
Connection Databricks Args - Databricks connection configuration
- Fabric
Global
Connection Fabric Args - Microsoft Fabric connection configuration.
- Name string
- Connection name
- Oauth
Configuration intId - External OAuth configuration ID (only Snowflake for now)
- Postgres
Global
Connection Postgres Args - PostgreSQL connection configuration.
- Private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - Redshift
Global
Connection Redshift Args - Redshift connection configuration
- Snowflake
Global
Connection Snowflake Args - Snowflake connection configuration
- Starburst
Global
Connection Starburst Args - Starburst/Trino connection configuration.
- Synapse
Global
Connection Synapse Args - Azure Synapse Analytics connection configuration.
- apache
Spark GlobalConnection Apache Spark - Apache Spark connection configuration.
- athena
Global
Connection Athena - Athena connection configuration.
- bigquery
Global
Connection Bigquery - databricks
Global
Connection Databricks - Databricks connection configuration
- fabric
Global
Connection Fabric - Microsoft Fabric connection configuration.
- name String
- Connection name
- oauth
Configuration IntegerId - External OAuth configuration ID (only Snowflake for now)
- postgres
Global
Connection Postgres - PostgreSQL connection configuration.
- private
Link StringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Global
Connection Redshift - Redshift connection configuration
- snowflake
Global
Connection Snowflake - Snowflake connection configuration
- starburst
Global
Connection Starburst - Starburst/Trino connection configuration.
- synapse
Global
Connection Synapse - Azure Synapse Analytics connection configuration.
- apache
Spark GlobalConnection Apache Spark - Apache Spark connection configuration.
- athena
Global
Connection Athena - Athena connection configuration.
- bigquery
Global
Connection Bigquery - databricks
Global
Connection Databricks - Databricks connection configuration
- fabric
Global
Connection Fabric - Microsoft Fabric connection configuration.
- name string
- Connection name
- oauth
Configuration numberId - External OAuth configuration ID (only Snowflake for now)
- postgres
Global
Connection Postgres - PostgreSQL connection configuration.
- private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Global
Connection Redshift - Redshift connection configuration
- snowflake
Global
Connection Snowflake - Snowflake connection configuration
- starburst
Global
Connection Starburst - Starburst/Trino connection configuration.
- synapse
Global
Connection Synapse - Azure Synapse Analytics connection configuration.
- apache_
spark GlobalConnection Apache Spark Args - Apache Spark connection configuration.
- athena
Global
Connection Athena Args - Athena connection configuration.
- bigquery
Global
Connection Bigquery Args - databricks
Global
Connection Databricks Args - Databricks connection configuration
- fabric
Global
Connection Fabric Args - Microsoft Fabric connection configuration.
- name str
- Connection name
- oauth_
configuration_ intid - External OAuth configuration ID (only Snowflake for now)
- postgres
Global
Connection Postgres Args - PostgreSQL connection configuration.
- private_
link_ strendpoint_ id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Global
Connection Redshift Args - Redshift connection configuration
- snowflake
Global
Connection Snowflake Args - Snowflake connection configuration
- starburst
Global
Connection Starburst Args - Starburst/Trino connection configuration.
- synapse
Global
Connection Synapse Args - Azure Synapse Analytics connection configuration.
- apache
Spark Property Map - Apache Spark connection configuration.
- athena Property Map
- Athena connection configuration.
- bigquery Property Map
- databricks Property Map
- Databricks connection configuration
- fabric Property Map
- Microsoft Fabric connection configuration.
- name String
- Connection name
- oauth
Configuration NumberId - External OAuth configuration ID (only Snowflake for now)
- postgres Property Map
- PostgreSQL connection configuration.
- private
Link StringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift Property Map
- Redshift connection configuration
- snowflake Property Map
- Snowflake connection configuration
- starburst Property Map
- Starburst/Trino connection configuration.
- synapse Property Map
- Azure Synapse Analytics connection configuration.
Outputs
All input properties are implicitly available as output properties. Additionally, the GlobalConnection resource produces the following output properties:
- Adapter
Version string - Version of the adapter
- Id string
- The provider-assigned unique ID for this managed resource.
- Is
Ssh boolTunnel Enabled - Whether the connection can use an SSH tunnel
- Adapter
Version string - Version of the adapter
- Id string
- The provider-assigned unique ID for this managed resource.
- Is
Ssh boolTunnel Enabled - Whether the connection can use an SSH tunnel
- adapter
Version String - Version of the adapter
- id String
- The provider-assigned unique ID for this managed resource.
- is
Ssh BooleanTunnel Enabled - Whether the connection can use an SSH tunnel
- adapter
Version string - Version of the adapter
- id string
- The provider-assigned unique ID for this managed resource.
- is
Ssh booleanTunnel Enabled - Whether the connection can use an SSH tunnel
- adapter_
version str - Version of the adapter
- id str
- The provider-assigned unique ID for this managed resource.
- is_
ssh_ booltunnel_ enabled - Whether the connection can use an SSH tunnel
- adapter
Version String - Version of the adapter
- id String
- The provider-assigned unique ID for this managed resource.
- is
Ssh BooleanTunnel Enabled - Whether the connection can use an SSH tunnel
Look up Existing GlobalConnection Resource
Get an existing GlobalConnection resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: GlobalConnectionState, opts?: CustomResourceOptions): GlobalConnection
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
adapter_version: Optional[str] = None,
apache_spark: Optional[GlobalConnectionApacheSparkArgs] = None,
athena: Optional[GlobalConnectionAthenaArgs] = None,
bigquery: Optional[GlobalConnectionBigqueryArgs] = None,
databricks: Optional[GlobalConnectionDatabricksArgs] = None,
fabric: Optional[GlobalConnectionFabricArgs] = None,
is_ssh_tunnel_enabled: Optional[bool] = None,
name: Optional[str] = None,
oauth_configuration_id: Optional[int] = None,
postgres: Optional[GlobalConnectionPostgresArgs] = None,
private_link_endpoint_id: Optional[str] = None,
redshift: Optional[GlobalConnectionRedshiftArgs] = None,
snowflake: Optional[GlobalConnectionSnowflakeArgs] = None,
starburst: Optional[GlobalConnectionStarburstArgs] = None,
synapse: Optional[GlobalConnectionSynapseArgs] = None) -> GlobalConnection
func GetGlobalConnection(ctx *Context, name string, id IDInput, state *GlobalConnectionState, opts ...ResourceOption) (*GlobalConnection, error)
public static GlobalConnection Get(string name, Input<string> id, GlobalConnectionState? state, CustomResourceOptions? opts = null)
public static GlobalConnection get(String name, Output<String> id, GlobalConnectionState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Adapter
Version string - Version of the adapter
- Apache
Spark Pulumi.Dbt Cloud. Inputs. Global Connection Apache Spark - Apache Spark connection configuration.
- Athena
Pulumi.
Dbt Cloud. Inputs. Global Connection Athena - Athena connection configuration.
- Bigquery
Pulumi.
Dbt Cloud. Inputs. Global Connection Bigquery - Databricks
Pulumi.
Dbt Cloud. Inputs. Global Connection Databricks - Databricks connection configuration
- Fabric
Pulumi.
Dbt Cloud. Inputs. Global Connection Fabric - Microsoft Fabric connection configuration.
- Is
Ssh boolTunnel Enabled - Whether the connection can use an SSH tunnel
- Name string
- Connection name
- Oauth
Configuration intId - External OAuth configuration ID (only Snowflake for now)
- Postgres
Pulumi.
Dbt Cloud. Inputs. Global Connection Postgres - PostgreSQL connection configuration.
- Private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - Redshift
Pulumi.
Dbt Cloud. Inputs. Global Connection Redshift - Redshift connection configuration
- Snowflake
Pulumi.
Dbt Cloud. Inputs. Global Connection Snowflake - Snowflake connection configuration
- Starburst
Pulumi.
Dbt Cloud. Inputs. Global Connection Starburst - Starburst/Trino connection configuration.
- Synapse
Pulumi.
Dbt Cloud. Inputs. Global Connection Synapse - Azure Synapse Analytics connection configuration.
- Adapter
Version string - Version of the adapter
- Apache
Spark GlobalConnection Apache Spark Args - Apache Spark connection configuration.
- Athena
Global
Connection Athena Args - Athena connection configuration.
- Bigquery
Global
Connection Bigquery Args - Databricks
Global
Connection Databricks Args - Databricks connection configuration
- Fabric
Global
Connection Fabric Args - Microsoft Fabric connection configuration.
- Is
Ssh boolTunnel Enabled - Whether the connection can use an SSH tunnel
- Name string
- Connection name
- Oauth
Configuration intId - External OAuth configuration ID (only Snowflake for now)
- Postgres
Global
Connection Postgres Args - PostgreSQL connection configuration.
- Private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - Redshift
Global
Connection Redshift Args - Redshift connection configuration
- Snowflake
Global
Connection Snowflake Args - Snowflake connection configuration
- Starburst
Global
Connection Starburst Args - Starburst/Trino connection configuration.
- Synapse
Global
Connection Synapse Args - Azure Synapse Analytics connection configuration.
- adapter
Version String - Version of the adapter
- apache
Spark GlobalConnection Apache Spark - Apache Spark connection configuration.
- athena
Global
Connection Athena - Athena connection configuration.
- bigquery
Global
Connection Bigquery - databricks
Global
Connection Databricks - Databricks connection configuration
- fabric
Global
Connection Fabric - Microsoft Fabric connection configuration.
- is
Ssh BooleanTunnel Enabled - Whether the connection can use an SSH tunnel
- name String
- Connection name
- oauth
Configuration IntegerId - External OAuth configuration ID (only Snowflake for now)
- postgres
Global
Connection Postgres - PostgreSQL connection configuration.
- private
Link StringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Global
Connection Redshift - Redshift connection configuration
- snowflake
Global
Connection Snowflake - Snowflake connection configuration
- starburst
Global
Connection Starburst - Starburst/Trino connection configuration.
- synapse
Global
Connection Synapse - Azure Synapse Analytics connection configuration.
- adapter
Version string - Version of the adapter
- apache
Spark GlobalConnection Apache Spark - Apache Spark connection configuration.
- athena
Global
Connection Athena - Athena connection configuration.
- bigquery
Global
Connection Bigquery - databricks
Global
Connection Databricks - Databricks connection configuration
- fabric
Global
Connection Fabric - Microsoft Fabric connection configuration.
- is
Ssh booleanTunnel Enabled - Whether the connection can use an SSH tunnel
- name string
- Connection name
- oauth
Configuration numberId - External OAuth configuration ID (only Snowflake for now)
- postgres
Global
Connection Postgres - PostgreSQL connection configuration.
- private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Global
Connection Redshift - Redshift connection configuration
- snowflake
Global
Connection Snowflake - Snowflake connection configuration
- starburst
Global
Connection Starburst - Starburst/Trino connection configuration.
- synapse
Global
Connection Synapse - Azure Synapse Analytics connection configuration.
- adapter_
version str - Version of the adapter
- apache_
spark GlobalConnection Apache Spark Args - Apache Spark connection configuration.
- athena
Global
Connection Athena Args - Athena connection configuration.
- bigquery
Global
Connection Bigquery Args - databricks
Global
Connection Databricks Args - Databricks connection configuration
- fabric
Global
Connection Fabric Args - Microsoft Fabric connection configuration.
- is_
ssh_ booltunnel_ enabled - Whether the connection can use an SSH tunnel
- name str
- Connection name
- oauth_
configuration_ intid - External OAuth configuration ID (only Snowflake for now)
- postgres
Global
Connection Postgres Args - PostgreSQL connection configuration.
- private_
link_ strendpoint_ id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Global
Connection Redshift Args - Redshift connection configuration
- snowflake
Global
Connection Snowflake Args - Snowflake connection configuration
- starburst
Global
Connection Starburst Args - Starburst/Trino connection configuration.
- synapse
Global
Connection Synapse Args - Azure Synapse Analytics connection configuration.
- adapter
Version String - Version of the adapter
- apache
Spark Property Map - Apache Spark connection configuration.
- athena Property Map
- Athena connection configuration.
- bigquery Property Map
- databricks Property Map
- Databricks connection configuration
- fabric Property Map
- Microsoft Fabric connection configuration.
- is
Ssh BooleanTunnel Enabled - Whether the connection can use an SSH tunnel
- name String
- Connection name
- oauth
Configuration NumberId - External OAuth configuration ID (only Snowflake for now)
- postgres Property Map
- PostgreSQL connection configuration.
- private
Link StringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift Property Map
- Redshift connection configuration
- snowflake Property Map
- Snowflake connection configuration
- starburst Property Map
- Starburst/Trino connection configuration.
- synapse Property Map
- Azure Synapse Analytics connection configuration.
Supporting Types
GlobalConnectionApacheSpark, GlobalConnectionApacheSparkArgs
- Cluster string
- Spark cluster for the connection
- Host string
- Hostname of the connection
- Method string
- Authentication method for the connection (http or thrift).
- Auth string
- Auth
- Connect
Retries int - Connection retries. Default=0
- Connect
Timeout int - Connection time out in seconds. Default=10
- Organization string
- Organization ID
- Port int
- Port for the connection. Default=443
- User string
- User
- Cluster string
- Spark cluster for the connection
- Host string
- Hostname of the connection
- Method string
- Authentication method for the connection (http or thrift).
- Auth string
- Auth
- Connect
Retries int - Connection retries. Default=0
- Connect
Timeout int - Connection time out in seconds. Default=10
- Organization string
- Organization ID
- Port int
- Port for the connection. Default=443
- User string
- User
- cluster String
- Spark cluster for the connection
- host String
- Hostname of the connection
- method String
- Authentication method for the connection (http or thrift).
- auth String
- Auth
- connect
Retries Integer - Connection retries. Default=0
- connect
Timeout Integer - Connection time out in seconds. Default=10
- organization String
- Organization ID
- port Integer
- Port for the connection. Default=443
- user String
- User
- cluster string
- Spark cluster for the connection
- host string
- Hostname of the connection
- method string
- Authentication method for the connection (http or thrift).
- auth string
- Auth
- connect
Retries number - Connection retries. Default=0
- connect
Timeout number - Connection time out in seconds. Default=10
- organization string
- Organization ID
- port number
- Port for the connection. Default=443
- user string
- User
- cluster str
- Spark cluster for the connection
- host str
- Hostname of the connection
- method str
- Authentication method for the connection (http or thrift).
- auth str
- Auth
- connect_
retries int - Connection retries. Default=0
- connect_
timeout int - Connection time out in seconds. Default=10
- organization str
- Organization ID
- port int
- Port for the connection. Default=443
- user str
- User
- cluster String
- Spark cluster for the connection
- host String
- Hostname of the connection
- method String
- Authentication method for the connection (http or thrift).
- auth String
- Auth
- connect
Retries Number - Connection retries. Default=0
- connect
Timeout Number - Connection time out in seconds. Default=10
- organization String
- Organization ID
- port Number
- Port for the connection. Default=443
- user String
- User
GlobalConnectionAthena, GlobalConnectionAthenaArgs
- Database string
- Specify the database (data catalog) to build models into (lowercase only).
- Region
Name string - AWS region of your Athena instance.
- S3Staging
Dir string - S3 location to store Athena query results and metadata.
- Num
Boto3Retries int - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- Num
Iceberg intRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- Num
Retries int - Number of times to retry a failing query.
- Poll
Interval int - Interval in seconds to use for polling the status of query results in Athena.
- S3Data
Dir string - Prefix for storing tables, if different from the connection's S3 staging directory.
- S3Data
Naming string - How to generate table paths in the S3 data directory.
- S3Tmp
Table stringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- Spark
Work stringGroup - Identifier of Athena Spark workgroup for running Python models.
- Work
Group string - Identifier of Athena workgroup.
- Database string
- Specify the database (data catalog) to build models into (lowercase only).
- Region
Name string - AWS region of your Athena instance.
- S3Staging
Dir string - S3 location to store Athena query results and metadata.
- Num
Boto3Retries int - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- Num
Iceberg intRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- Num
Retries int - Number of times to retry a failing query.
- Poll
Interval int - Interval in seconds to use for polling the status of query results in Athena.
- S3Data
Dir string - Prefix for storing tables, if different from the connection's S3 staging directory.
- S3Data
Naming string - How to generate table paths in the S3 data directory.
- S3Tmp
Table stringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- Spark
Work stringGroup - Identifier of Athena Spark workgroup for running Python models.
- Work
Group string - Identifier of Athena workgroup.
- database String
- Specify the database (data catalog) to build models into (lowercase only).
- region
Name String - AWS region of your Athena instance.
- s3Staging
Dir String - S3 location to store Athena query results and metadata.
- num
Boto3Retries Integer - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num
Iceberg IntegerRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num
Retries Integer - Number of times to retry a failing query.
- poll
Interval Integer - Interval in seconds to use for polling the status of query results in Athena.
- s3Data
Dir String - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3Data
Naming String - How to generate table paths in the S3 data directory.
- s3Tmp
Table StringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark
Work StringGroup - Identifier of Athena Spark workgroup for running Python models.
- work
Group String - Identifier of Athena workgroup.
- database string
- Specify the database (data catalog) to build models into (lowercase only).
- region
Name string - AWS region of your Athena instance.
- s3Staging
Dir string - S3 location to store Athena query results and metadata.
- num
Boto3Retries number - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num
Iceberg numberRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num
Retries number - Number of times to retry a failing query.
- poll
Interval number - Interval in seconds to use for polling the status of query results in Athena.
- s3Data
Dir string - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3Data
Naming string - How to generate table paths in the S3 data directory.
- s3Tmp
Table stringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark
Work stringGroup - Identifier of Athena Spark workgroup for running Python models.
- work
Group string - Identifier of Athena workgroup.
- database str
- Specify the database (data catalog) to build models into (lowercase only).
- region_
name str - AWS region of your Athena instance.
- s3_
staging_ strdir - S3 location to store Athena query results and metadata.
- num_
boto3_ intretries - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num_
iceberg_ intretries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num_
retries int - Number of times to retry a failing query.
- poll_
interval int - Interval in seconds to use for polling the status of query results in Athena.
- s3_
data_ strdir - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3_
data_ strnaming - How to generate table paths in the S3 data directory.
- s3_
tmp_ strtable_ dir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark_
work_ strgroup - Identifier of Athena Spark workgroup for running Python models.
- work_
group str - Identifier of Athena workgroup.
- database String
- Specify the database (data catalog) to build models into (lowercase only).
- region
Name String - AWS region of your Athena instance.
- s3Staging
Dir String - S3 location to store Athena query results and metadata.
- num
Boto3Retries Number - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num
Iceberg NumberRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num
Retries Number - Number of times to retry a failing query.
- poll
Interval Number - Interval in seconds to use for polling the status of query results in Athena.
- s3Data
Dir String - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3Data
Naming String - How to generate table paths in the S3 data directory.
- s3Tmp
Table StringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark
Work StringGroup - Identifier of Athena Spark workgroup for running Python models.
- work
Group String - Identifier of Athena workgroup.
GlobalConnectionBigquery, GlobalConnectionBigqueryArgs
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Gcp
Project stringId - The GCP project ID to use for the connection
- Private
Key string - Private Key for the Service Account
- Private
Key stringId - Private Key ID for the Service Account
- Token
Uri string - Token URI for the Service Account
- Application
Id string - OAuth Client ID
- Application
Secret string - OAuth Client Secret
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Impersonate
Service stringAccount - Service Account to impersonate when running queries
- Job
Creation intTimeout Seconds - Maximum timeout for the job creation step
- Job
Retry intDeadline Seconds - Total number of seconds to wait while retrying the same query
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Retries int
- Number of retries for queries
- Scopes List<string>
- OAuth scopes for the BigQuery connection
- Timeout
Seconds int - Timeout in seconds for queries
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Gcp
Project stringId - The GCP project ID to use for the connection
- Private
Key string - Private Key for the Service Account
- Private
Key stringId - Private Key ID for the Service Account
- Token
Uri string - Token URI for the Service Account
- Application
Id string - OAuth Client ID
- Application
Secret string - OAuth Client Secret
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Impersonate
Service stringAccount - Service Account to impersonate when running queries
- Job
Creation intTimeout Seconds - Maximum timeout for the job creation step
- Job
Retry intDeadline Seconds - Total number of seconds to wait while retrying the same query
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Retries int
- Number of retries for queries
- Scopes []string
- OAuth scopes for the BigQuery connection
- Timeout
Seconds int - Timeout in seconds for queries
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- gcp
Project StringId - The GCP project ID to use for the connection
- private
Key String - Private Key for the Service Account
- private
Key StringId - Private Key ID for the Service Account
- token
Uri String - Token URI for the Service Account
- application
Id String - OAuth Client ID
- application
Secret String - OAuth Client Secret
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate
Service StringAccount - Service Account to impersonate when running queries
- job
Creation IntegerTimeout Seconds - Maximum timeout for the job creation step
- job
Retry IntegerDeadline Seconds - Total number of seconds to wait while retrying the same query
- location String
- Location to create new Datasets in
- maximum
Bytes IntegerBilled - Max number of bytes that can be billed for a given BigQuery query
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- retries Integer
- Number of retries for queries
- scopes List<String>
- OAuth scopes for the BigQuery connection
- timeout
Seconds Integer - Timeout in seconds for queries
- auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri string - Auth URI for the Service Account
- client
Email string - Service Account email
- client
Id string - Client ID of the Service Account
- client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- gcp
Project stringId - The GCP project ID to use for the connection
- private
Key string - Private Key for the Service Account
- private
Key stringId - Private Key ID for the Service Account
- token
Uri string - Token URI for the Service Account
- application
Id string - OAuth Client ID
- application
Secret string - OAuth Client Secret
- dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- execution
Project string - Project to bill for query execution
- gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate
Service stringAccount - Service Account to impersonate when running queries
- job
Creation numberTimeout Seconds - Maximum timeout for the job creation step
- job
Retry numberDeadline Seconds - Total number of seconds to wait while retrying the same query
- location string
- Location to create new Datasets in
- maximum
Bytes numberBilled - Max number of bytes that can be billed for a given BigQuery query
- priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- retries number
- Number of retries for queries
- scopes string[]
- OAuth scopes for the BigQuery connection
- timeout
Seconds number - Timeout in seconds for queries
- auth_
provider_ strx509_ cert_ url - Auth Provider X509 Cert URL for the Service Account
- auth_
uri str - Auth URI for the Service Account
- client_
email str - Service Account email
- client_
id str - Client ID of the Service Account
- client_
x509_ strcert_ url - Client X509 Cert URL for the Service Account
- gcp_
project_ strid - The GCP project ID to use for the connection
- private_
key str - Private Key for the Service Account
- private_
key_ strid - Private Key ID for the Service Account
- token_
uri str - Token URI for the Service Account
- application_
id str - OAuth Client ID
- application_
secret str - OAuth Client Secret
- dataproc_
cluster_ strname - Dataproc cluster name for PySpark workloads
- dataproc_
region str - Google Cloud region for PySpark workloads on Dataproc
- execution_
project str - Project to bill for query execution
- gcs_
bucket str - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate_
service_ straccount - Service Account to impersonate when running queries
- job_
creation_ inttimeout_ seconds - Maximum timeout for the job creation step
- job_
retry_ intdeadline_ seconds - Total number of seconds to wait while retrying the same query
- location str
- Location to create new Datasets in
- maximum_
bytes_ intbilled - Max number of bytes that can be billed for a given BigQuery query
- priority str
- The priority with which to execute BigQuery queries (batch or interactive)
- retries int
- Number of retries for queries
- scopes Sequence[str]
- OAuth scopes for the BigQuery connection
- timeout_
seconds int - Timeout in seconds for queries
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- gcp
Project StringId - The GCP project ID to use for the connection
- private
Key String - Private Key for the Service Account
- private
Key StringId - Private Key ID for the Service Account
- token
Uri String - Token URI for the Service Account
- application
Id String - OAuth Client ID
- application
Secret String - OAuth Client Secret
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate
Service StringAccount - Service Account to impersonate when running queries
- job
Creation NumberTimeout Seconds - Maximum timeout for the job creation step
- job
Retry NumberDeadline Seconds - Total number of seconds to wait while retrying the same query
- location String
- Location to create new Datasets in
- maximum
Bytes NumberBilled - Max number of bytes that can be billed for a given BigQuery query
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- retries Number
- Number of retries for queries
- scopes List<String>
- OAuth scopes for the BigQuery connection
- timeout
Seconds Number - Timeout in seconds for queries
GlobalConnectionDatabricks, GlobalConnectionDatabricksArgs
- Host string
- The hostname of the Databricks cluster or SQL warehouse.
- Http
Path string - The HTTP path of the Databricks cluster or SQL warehouse.
- Catalog string
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- Client
Id string - Required to enable Databricks OAuth authentication for IDE developers.
- Client
Secret string - Required to enable Databricks OAuth authentication for IDE developers.
- Host string
- The hostname of the Databricks cluster or SQL warehouse.
- Http
Path string - The HTTP path of the Databricks cluster or SQL warehouse.
- Catalog string
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- Client
Id string - Required to enable Databricks OAuth authentication for IDE developers.
- Client
Secret string - Required to enable Databricks OAuth authentication for IDE developers.
- host String
- The hostname of the Databricks cluster or SQL warehouse.
- http
Path String - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog String
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client
Id String - Required to enable Databricks OAuth authentication for IDE developers.
- client
Secret String - Required to enable Databricks OAuth authentication for IDE developers.
- host string
- The hostname of the Databricks cluster or SQL warehouse.
- http
Path string - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog string
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client
Id string - Required to enable Databricks OAuth authentication for IDE developers.
- client
Secret string - Required to enable Databricks OAuth authentication for IDE developers.
- host str
- The hostname of the Databricks cluster or SQL warehouse.
- http_
path str - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog str
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client_
id str - Required to enable Databricks OAuth authentication for IDE developers.
- client_
secret str - Required to enable Databricks OAuth authentication for IDE developers.
- host String
- The hostname of the Databricks cluster or SQL warehouse.
- http
Path String - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog String
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client
Id String - Required to enable Databricks OAuth authentication for IDE developers.
- client
Secret String - Required to enable Databricks OAuth authentication for IDE developers.
GlobalConnectionFabric, GlobalConnectionFabricArgs
- Database string
- The database to connect to for this connection.
- Server string
- The server hostname.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- Database string
- The database to connect to for this connection.
- Server string
- The server hostname.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database String
- The database to connect to for this connection.
- server String
- The server hostname.
- login
Timeout Integer - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Integer
- The port to connect to for this connection. Default=1433
- query
Timeout Integer - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Integer
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database string
- The database to connect to for this connection.
- server string
- The server hostname.
- login
Timeout number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port number
- The port to connect to for this connection. Default=1433
- query
Timeout number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database str
- The database to connect to for this connection.
- server str
- The server hostname.
- login_
timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port int
- The port to connect to for this connection. Default=1433
- query_
timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database String
- The database to connect to for this connection.
- server String
- The server hostname.
- login
Timeout Number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Number
- The port to connect to for this connection. Default=1433
- query
Timeout Number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
GlobalConnectionPostgres, GlobalConnectionPostgresArgs
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the database.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel Pulumi.Dbt Cloud. Inputs. Global Connection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the database.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel GlobalConnection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the database.
- port Integer
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GlobalConnection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname string
- The database name for this connection.
- hostname string
- The hostname of the database.
- port number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GlobalConnection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname str
- The database name for this connection.
- hostname str
- The hostname of the database.
- port int
- The port to connect to for this connection. Default=5432
- ssh_
tunnel GlobalConnection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the database.
- port Number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel Property Map - PostgreSQL SSH Tunnel configuration
GlobalConnectionPostgresSshTunnel, GlobalConnectionPostgresSshTunnelArgs
- hostname str
- The hostname for the SSH tunnel.
- port int
- The HTTP port for the SSH tunnel.
- username str
- The username to use for the SSH tunnel.
- id int
- The ID of the SSH tunnel connection.
- public_
key str - The SSH public key generated to allow connecting via SSH tunnel.
GlobalConnectionRedshift, GlobalConnectionRedshiftArgs
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the data warehouse.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel Pulumi.Dbt Cloud. Inputs. Global Connection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the data warehouse.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel GlobalConnection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the data warehouse.
- port Integer
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GlobalConnection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname string
- The database name for this connection.
- hostname string
- The hostname of the data warehouse.
- port number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GlobalConnection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname str
- The database name for this connection.
- hostname str
- The hostname of the data warehouse.
- port int
- The port to connect to for this connection. Default=5432
- ssh_
tunnel GlobalConnection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the data warehouse.
- port Number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel Property Map - Redshift SSH Tunnel configuration
GlobalConnectionRedshiftSshTunnel, GlobalConnectionRedshiftSshTunnelArgs
- hostname str
- The hostname for the SSH tunnel.
- port int
- The HTTP port for the SSH tunnel.
- username str
- The username to use for the SSH tunnel.
- id int
- The ID of the SSH tunnel connection.
- public_
key str - The SSH public key generated to allow connecting via SSH tunnel.
GlobalConnectionSnowflake, GlobalConnectionSnowflakeArgs
- Account string
- The Snowflake account name
- Database string
- The default database for the connection
- Warehouse string
- The default Snowflake Warehouse to use for the connection
- Allow
Sso bool - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - Client
Session boolKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- Oauth
Client stringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- Oauth
Client stringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- Role string
- The Snowflake role to use when running queries on the connection
- Account string
- The Snowflake account name
- Database string
- The default database for the connection
- Warehouse string
- The default Snowflake Warehouse to use for the connection
- Allow
Sso bool - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - Client
Session boolKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- Oauth
Client stringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- Oauth
Client stringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- Role string
- The Snowflake role to use when running queries on the connection
- account String
- The Snowflake account name
- database String
- The default database for the connection
- warehouse String
- The default Snowflake Warehouse to use for the connection
- allow
Sso Boolean - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client
Session BooleanKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- oauth
Client StringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth
Client StringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role String
- The Snowflake role to use when running queries on the connection
- account string
- The Snowflake account name
- database string
- The default database for the connection
- warehouse string
- The default Snowflake Warehouse to use for the connection
- allow
Sso boolean - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client
Session booleanKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- oauth
Client stringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth
Client stringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role string
- The Snowflake role to use when running queries on the connection
- account str
- The Snowflake account name
- database str
- The default database for the connection
- warehouse str
- The default Snowflake Warehouse to use for the connection
- allow_
sso bool - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client_
session_ boolkeep_ alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- oauth_
client_ strid - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth_
client_ strsecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role str
- The Snowflake role to use when running queries on the connection
- account String
- The Snowflake account name
- database String
- The default database for the connection
- warehouse String
- The default Snowflake Warehouse to use for the connection
- allow
Sso Boolean - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client
Session BooleanKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- oauth
Client StringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth
Client StringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role String
- The Snowflake role to use when running queries on the connection
GlobalConnectionStarburst, GlobalConnectionStarburstArgs
GlobalConnectionSynapse, GlobalConnectionSynapseArgs
- Database string
- The database to connect to for this connection.
- Host string
- The server hostname.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- Database string
- The database to connect to for this connection.
- Host string
- The server hostname.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database String
- The database to connect to for this connection.
- host String
- The server hostname.
- login
Timeout Integer - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Integer
- The port to connect to for this connection. Default=1433
- query
Timeout Integer - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Integer
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database string
- The database to connect to for this connection.
- host string
- The server hostname.
- login
Timeout number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port number
- The port to connect to for this connection. Default=1433
- query
Timeout number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database str
- The database to connect to for this connection.
- host str
- The server hostname.
- login_
timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port int
- The port to connect to for this connection. Default=1433
- query_
timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database String
- The database to connect to for this connection.
- host String
- The server hostname.
- login
Timeout Number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Number
- The port to connect to for this connection. Default=1433
- query
Timeout Number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
Import
A project-scoped connection can be imported as a global connection by specifying the connection ID
Migrating from project-scoped connections to global connections could be done by:
Adding the config for the global connection and importing it (see below)
Removing the project-scoped connection from the config AND from the state
- CAREFUL: If the connection is removed from the config but not the state, it will be destroyed on the next apply
using import blocks (requires Terraform >= 1.5)
import {
to = dbtcloud_global_connection.my_connection
id = “connection_id”
}
import {
to = dbtcloud_global_connection.my_connection
id = “1234”
}
using the older import command
$ pulumi import dbtcloud:index/globalConnection:GlobalConnection my_connection "connection_id"
$ pulumi import dbtcloud:index/globalConnection:GlobalConnection my_connection 1234
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- dbtcloud pulumi/pulumi-dbtcloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
dbtcloud
Terraform Provider.