dbt Cloud v0.1.25 published on Friday, Nov 8, 2024 by Pulumi
dbtcloud.getGlobalConnection
Explore with Pulumi AI
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as dbtcloud from "@pulumi/dbtcloud";
const myConnection = dbtcloud.getGlobalConnection({
id: 1234,
});
import pulumi
import pulumi_dbtcloud as dbtcloud
my_connection = dbtcloud.get_global_connection(id=1234)
package main
import (
"github.com/pulumi/pulumi-dbtcloud/sdk/go/dbtcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dbtcloud.LookupGlobalConnection(ctx, &dbtcloud.LookupGlobalConnectionArgs{
Id: 1234,
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using DbtCloud = Pulumi.DbtCloud;
return await Deployment.RunAsync(() =>
{
var myConnection = DbtCloud.GetGlobalConnection.Invoke(new()
{
Id = 1234,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.dbtcloud.DbtcloudFunctions;
import com.pulumi.dbtcloud.inputs.GetGlobalConnectionArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var myConnection = DbtcloudFunctions.getGlobalConnection(GetGlobalConnectionArgs.builder()
.id(1234)
.build());
}
}
variables:
myConnection:
fn::invoke:
Function: dbtcloud:getGlobalConnection
Arguments:
id: 1234
Using getGlobalConnection
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getGlobalConnection(args: GetGlobalConnectionArgs, opts?: InvokeOptions): Promise<GetGlobalConnectionResult>
function getGlobalConnectionOutput(args: GetGlobalConnectionOutputArgs, opts?: InvokeOptions): Output<GetGlobalConnectionResult>
def get_global_connection(id: Optional[int] = None,
opts: Optional[InvokeOptions] = None) -> GetGlobalConnectionResult
def get_global_connection_output(id: Optional[pulumi.Input[int]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetGlobalConnectionResult]
func LookupGlobalConnection(ctx *Context, args *LookupGlobalConnectionArgs, opts ...InvokeOption) (*LookupGlobalConnectionResult, error)
func LookupGlobalConnectionOutput(ctx *Context, args *LookupGlobalConnectionOutputArgs, opts ...InvokeOption) LookupGlobalConnectionResultOutput
> Note: This function is named LookupGlobalConnection
in the Go SDK.
public static class GetGlobalConnection
{
public static Task<GetGlobalConnectionResult> InvokeAsync(GetGlobalConnectionArgs args, InvokeOptions? opts = null)
public static Output<GetGlobalConnectionResult> Invoke(GetGlobalConnectionInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetGlobalConnectionResult> getGlobalConnection(GetGlobalConnectionArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: dbtcloud:index/getGlobalConnection:getGlobalConnection
arguments:
# arguments dictionary
The following arguments are supported:
- Id int
- Connection Identifier
- Id int
- Connection Identifier
- id Integer
- Connection Identifier
- id number
- Connection Identifier
- id int
- Connection Identifier
- id Number
- Connection Identifier
getGlobalConnection Result
The following output properties are available:
- Adapter
Version string - Version of the adapter
- Apache
Spark Pulumi.Dbt Cloud. Outputs. Get Global Connection Apache Spark - Apache Spark connection configuration.
- Athena
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Athena - Athena connection configuration.
- Bigquery
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Bigquery - Databricks
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Databricks - Databricks connection configuration
- Fabric
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Fabric - Microsoft Fabric connection configuration.
- Id int
- Connection Identifier
- Is
Ssh boolTunnel Enabled - Whether the connection can use an SSH tunnel
- Name string
- Connection name
- Oauth
Configuration intId - Postgres
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Postgres - PostgreSQL connection configuration.
- Private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - Redshift
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Redshift - Redshift connection configuration
- Snowflake
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Snowflake - Snowflake connection configuration
- Starburst
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Starburst - Starburst/Trino connection configuration.
- Synapse
Pulumi.
Dbt Cloud. Outputs. Get Global Connection Synapse - Azure Synapse Analytics connection configuration.
- Adapter
Version string - Version of the adapter
- Apache
Spark GetGlobal Connection Apache Spark - Apache Spark connection configuration.
- Athena
Get
Global Connection Athena - Athena connection configuration.
- Bigquery
Get
Global Connection Bigquery - Databricks
Get
Global Connection Databricks - Databricks connection configuration
- Fabric
Get
Global Connection Fabric - Microsoft Fabric connection configuration.
- Id int
- Connection Identifier
- Is
Ssh boolTunnel Enabled - Whether the connection can use an SSH tunnel
- Name string
- Connection name
- Oauth
Configuration intId - Postgres
Get
Global Connection Postgres - PostgreSQL connection configuration.
- Private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - Redshift
Get
Global Connection Redshift - Redshift connection configuration
- Snowflake
Get
Global Connection Snowflake - Snowflake connection configuration
- Starburst
Get
Global Connection Starburst - Starburst/Trino connection configuration.
- Synapse
Get
Global Connection Synapse - Azure Synapse Analytics connection configuration.
- adapter
Version String - Version of the adapter
- apache
Spark GetGlobal Connection Apache Spark - Apache Spark connection configuration.
- athena
Get
Global Connection Athena - Athena connection configuration.
- bigquery
Get
Global Connection Bigquery - databricks
Get
Global Connection Databricks - Databricks connection configuration
- fabric
Get
Global Connection Fabric - Microsoft Fabric connection configuration.
- id Integer
- Connection Identifier
- is
Ssh BooleanTunnel Enabled - Whether the connection can use an SSH tunnel
- name String
- Connection name
- oauth
Configuration IntegerId - postgres
Get
Global Connection Postgres - PostgreSQL connection configuration.
- private
Link StringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Get
Global Connection Redshift - Redshift connection configuration
- snowflake
Get
Global Connection Snowflake - Snowflake connection configuration
- starburst
Get
Global Connection Starburst - Starburst/Trino connection configuration.
- synapse
Get
Global Connection Synapse - Azure Synapse Analytics connection configuration.
- adapter
Version string - Version of the adapter
- apache
Spark GetGlobal Connection Apache Spark - Apache Spark connection configuration.
- athena
Get
Global Connection Athena - Athena connection configuration.
- bigquery
Get
Global Connection Bigquery - databricks
Get
Global Connection Databricks - Databricks connection configuration
- fabric
Get
Global Connection Fabric - Microsoft Fabric connection configuration.
- id number
- Connection Identifier
- is
Ssh booleanTunnel Enabled - Whether the connection can use an SSH tunnel
- name string
- Connection name
- oauth
Configuration numberId - postgres
Get
Global Connection Postgres - PostgreSQL connection configuration.
- private
Link stringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Get
Global Connection Redshift - Redshift connection configuration
- snowflake
Get
Global Connection Snowflake - Snowflake connection configuration
- starburst
Get
Global Connection Starburst - Starburst/Trino connection configuration.
- synapse
Get
Global Connection Synapse - Azure Synapse Analytics connection configuration.
- adapter_
version str - Version of the adapter
- apache_
spark GetGlobal Connection Apache Spark - Apache Spark connection configuration.
- athena
Get
Global Connection Athena - Athena connection configuration.
- bigquery
Get
Global Connection Bigquery - databricks
Get
Global Connection Databricks - Databricks connection configuration
- fabric
Get
Global Connection Fabric - Microsoft Fabric connection configuration.
- id int
- Connection Identifier
- is_
ssh_ booltunnel_ enabled - Whether the connection can use an SSH tunnel
- name str
- Connection name
- oauth_
configuration_ intid - postgres
Get
Global Connection Postgres - PostgreSQL connection configuration.
- private_
link_ strendpoint_ id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift
Get
Global Connection Redshift - Redshift connection configuration
- snowflake
Get
Global Connection Snowflake - Snowflake connection configuration
- starburst
Get
Global Connection Starburst - Starburst/Trino connection configuration.
- synapse
Get
Global Connection Synapse - Azure Synapse Analytics connection configuration.
- adapter
Version String - Version of the adapter
- apache
Spark Property Map - Apache Spark connection configuration.
- athena Property Map
- Athena connection configuration.
- bigquery Property Map
- databricks Property Map
- Databricks connection configuration
- fabric Property Map
- Microsoft Fabric connection configuration.
- id Number
- Connection Identifier
- is
Ssh BooleanTunnel Enabled - Whether the connection can use an SSH tunnel
- name String
- Connection name
- oauth
Configuration NumberId - postgres Property Map
- PostgreSQL connection configuration.
- private
Link StringEndpoint Id - Private Link Endpoint ID. This ID can be found using the
privatelink_endpoint
data source - redshift Property Map
- Redshift connection configuration
- snowflake Property Map
- Snowflake connection configuration
- starburst Property Map
- Starburst/Trino connection configuration.
- synapse Property Map
- Azure Synapse Analytics connection configuration.
Supporting Types
GetGlobalConnectionApacheSpark
- Auth string
- Auth
- Cluster string
- Spark cluster for the connection
- Connect
Retries int - Connection retries. Default=0
- Connect
Timeout int - Connection time out in seconds. Default=10
- Host string
- Hostname of the connection
- Method string
- Authentication method for the connection (http or thrift).
- Organization string
- Organization ID
- Port int
- Port for the connection. Default=443
- User string
- User
- Auth string
- Auth
- Cluster string
- Spark cluster for the connection
- Connect
Retries int - Connection retries. Default=0
- Connect
Timeout int - Connection time out in seconds. Default=10
- Host string
- Hostname of the connection
- Method string
- Authentication method for the connection (http or thrift).
- Organization string
- Organization ID
- Port int
- Port for the connection. Default=443
- User string
- User
- auth String
- Auth
- cluster String
- Spark cluster for the connection
- connect
Retries Integer - Connection retries. Default=0
- connect
Timeout Integer - Connection time out in seconds. Default=10
- host String
- Hostname of the connection
- method String
- Authentication method for the connection (http or thrift).
- organization String
- Organization ID
- port Integer
- Port for the connection. Default=443
- user String
- User
- auth string
- Auth
- cluster string
- Spark cluster for the connection
- connect
Retries number - Connection retries. Default=0
- connect
Timeout number - Connection time out in seconds. Default=10
- host string
- Hostname of the connection
- method string
- Authentication method for the connection (http or thrift).
- organization string
- Organization ID
- port number
- Port for the connection. Default=443
- user string
- User
- auth str
- Auth
- cluster str
- Spark cluster for the connection
- connect_
retries int - Connection retries. Default=0
- connect_
timeout int - Connection time out in seconds. Default=10
- host str
- Hostname of the connection
- method str
- Authentication method for the connection (http or thrift).
- organization str
- Organization ID
- port int
- Port for the connection. Default=443
- user str
- User
- auth String
- Auth
- cluster String
- Spark cluster for the connection
- connect
Retries Number - Connection retries. Default=0
- connect
Timeout Number - Connection time out in seconds. Default=10
- host String
- Hostname of the connection
- method String
- Authentication method for the connection (http or thrift).
- organization String
- Organization ID
- port Number
- Port for the connection. Default=443
- user String
- User
GetGlobalConnectionAthena
- Database string
- Specify the database (data catalog) to build models into (lowercase only).
- Num
Boto3Retries int - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- Num
Iceberg intRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- Num
Retries int - Number of times to retry a failing query.
- Poll
Interval int - Interval in seconds to use for polling the status of query results in Athena.
- Region
Name string - AWS region of your Athena instance.
- S3Data
Dir string - Prefix for storing tables, if different from the connection's S3 staging directory.
- S3Data
Naming string - How to generate table paths in the S3 data directory.
- S3Staging
Dir string - S3 location to store Athena query results and metadata.
- S3Tmp
Table stringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- Spark
Work stringGroup - Identifier of Athena Spark workgroup for running Python models.
- Work
Group string - Identifier of Athena workgroup.
- Database string
- Specify the database (data catalog) to build models into (lowercase only).
- Num
Boto3Retries int - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- Num
Iceberg intRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- Num
Retries int - Number of times to retry a failing query.
- Poll
Interval int - Interval in seconds to use for polling the status of query results in Athena.
- Region
Name string - AWS region of your Athena instance.
- S3Data
Dir string - Prefix for storing tables, if different from the connection's S3 staging directory.
- S3Data
Naming string - How to generate table paths in the S3 data directory.
- S3Staging
Dir string - S3 location to store Athena query results and metadata.
- S3Tmp
Table stringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- Spark
Work stringGroup - Identifier of Athena Spark workgroup for running Python models.
- Work
Group string - Identifier of Athena workgroup.
- database String
- Specify the database (data catalog) to build models into (lowercase only).
- num
Boto3Retries Integer - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num
Iceberg IntegerRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num
Retries Integer - Number of times to retry a failing query.
- poll
Interval Integer - Interval in seconds to use for polling the status of query results in Athena.
- region
Name String - AWS region of your Athena instance.
- s3Data
Dir String - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3Data
Naming String - How to generate table paths in the S3 data directory.
- s3Staging
Dir String - S3 location to store Athena query results and metadata.
- s3Tmp
Table StringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark
Work StringGroup - Identifier of Athena Spark workgroup for running Python models.
- work
Group String - Identifier of Athena workgroup.
- database string
- Specify the database (data catalog) to build models into (lowercase only).
- num
Boto3Retries number - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num
Iceberg numberRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num
Retries number - Number of times to retry a failing query.
- poll
Interval number - Interval in seconds to use for polling the status of query results in Athena.
- region
Name string - AWS region of your Athena instance.
- s3Data
Dir string - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3Data
Naming string - How to generate table paths in the S3 data directory.
- s3Staging
Dir string - S3 location to store Athena query results and metadata.
- s3Tmp
Table stringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark
Work stringGroup - Identifier of Athena Spark workgroup for running Python models.
- work
Group string - Identifier of Athena workgroup.
- database str
- Specify the database (data catalog) to build models into (lowercase only).
- num_
boto3_ intretries - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num_
iceberg_ intretries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num_
retries int - Number of times to retry a failing query.
- poll_
interval int - Interval in seconds to use for polling the status of query results in Athena.
- region_
name str - AWS region of your Athena instance.
- s3_
data_ strdir - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3_
data_ strnaming - How to generate table paths in the S3 data directory.
- s3_
staging_ strdir - S3 location to store Athena query results and metadata.
- s3_
tmp_ strtable_ dir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark_
work_ strgroup - Identifier of Athena Spark workgroup for running Python models.
- work_
group str - Identifier of Athena workgroup.
- database String
- Specify the database (data catalog) to build models into (lowercase only).
- num
Boto3Retries Number - Number of times to retry boto3 requests (e.g. deleting S3 files for materialized tables).
- num
Iceberg NumberRetries - Number of times to retry iceberg commit queries to fix ICEBERGCOMMITERROR.
- num
Retries Number - Number of times to retry a failing query.
- poll
Interval Number - Interval in seconds to use for polling the status of query results in Athena.
- region
Name String - AWS region of your Athena instance.
- s3Data
Dir String - Prefix for storing tables, if different from the connection's S3 staging directory.
- s3Data
Naming String - How to generate table paths in the S3 data directory.
- s3Staging
Dir String - S3 location to store Athena query results and metadata.
- s3Tmp
Table StringDir - Prefix for storing temporary tables, if different from the connection's S3 data directory.
- spark
Work StringGroup - Identifier of Athena Spark workgroup for running Python models.
- work
Group String - Identifier of Athena workgroup.
GetGlobalConnectionBigquery
- Application
Id string - OAuth Client ID
- Application
Secret string - OAuth Client Secret
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcp
Project stringId - The GCP project ID to use for the connection
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Impersonate
Service stringAccount - Service Account to impersonate when running queries
- Job
Creation intTimeout Seconds - Maximum timeout for the job creation step
- Job
Retry intDeadline Seconds - Total number of seconds to wait while retrying the same query
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Private
Key string - Private Key for the Service Account
- Private
Key stringId - Private Key ID for the Service Account
- Retries int
- Number of retries for queries
- Scopes List<string>
- OAuth scopes for the BigQuery connection
- Timeout
Seconds int - Timeout in seconds for queries
- Token
Uri string - Token URI for the Service Account
- Application
Id string - OAuth Client ID
- Application
Secret string - OAuth Client Secret
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcp
Project stringId - The GCP project ID to use for the connection
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Impersonate
Service stringAccount - Service Account to impersonate when running queries
- Job
Creation intTimeout Seconds - Maximum timeout for the job creation step
- Job
Retry intDeadline Seconds - Total number of seconds to wait while retrying the same query
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Private
Key string - Private Key for the Service Account
- Private
Key stringId - Private Key ID for the Service Account
- Retries int
- Number of retries for queries
- Scopes []string
- OAuth scopes for the BigQuery connection
- Timeout
Seconds int - Timeout in seconds for queries
- Token
Uri string - Token URI for the Service Account
- application
Id String - OAuth Client ID
- application
Secret String - OAuth Client Secret
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcp
Project StringId - The GCP project ID to use for the connection
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate
Service StringAccount - Service Account to impersonate when running queries
- job
Creation IntegerTimeout Seconds - Maximum timeout for the job creation step
- job
Retry IntegerDeadline Seconds - Total number of seconds to wait while retrying the same query
- location String
- Location to create new Datasets in
- maximum
Bytes IntegerBilled - Max number of bytes that can be billed for a given BigQuery query
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- private
Key String - Private Key for the Service Account
- private
Key StringId - Private Key ID for the Service Account
- retries Integer
- Number of retries for queries
- scopes List<String>
- OAuth scopes for the BigQuery connection
- timeout
Seconds Integer - Timeout in seconds for queries
- token
Uri String - Token URI for the Service Account
- application
Id string - OAuth Client ID
- application
Secret string - OAuth Client Secret
- auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri string - Auth URI for the Service Account
- client
Email string - Service Account email
- client
Id string - Client ID of the Service Account
- client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- execution
Project string - Project to bill for query execution
- gcp
Project stringId - The GCP project ID to use for the connection
- gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate
Service stringAccount - Service Account to impersonate when running queries
- job
Creation numberTimeout Seconds - Maximum timeout for the job creation step
- job
Retry numberDeadline Seconds - Total number of seconds to wait while retrying the same query
- location string
- Location to create new Datasets in
- maximum
Bytes numberBilled - Max number of bytes that can be billed for a given BigQuery query
- priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- private
Key string - Private Key for the Service Account
- private
Key stringId - Private Key ID for the Service Account
- retries number
- Number of retries for queries
- scopes string[]
- OAuth scopes for the BigQuery connection
- timeout
Seconds number - Timeout in seconds for queries
- token
Uri string - Token URI for the Service Account
- application_
id str - OAuth Client ID
- application_
secret str - OAuth Client Secret
- auth_
provider_ strx509_ cert_ url - Auth Provider X509 Cert URL for the Service Account
- auth_
uri str - Auth URI for the Service Account
- client_
email str - Service Account email
- client_
id str - Client ID of the Service Account
- client_
x509_ strcert_ url - Client X509 Cert URL for the Service Account
- dataproc_
cluster_ strname - Dataproc cluster name for PySpark workloads
- dataproc_
region str - Google Cloud region for PySpark workloads on Dataproc
- execution_
project str - Project to bill for query execution
- gcp_
project_ strid - The GCP project ID to use for the connection
- gcs_
bucket str - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate_
service_ straccount - Service Account to impersonate when running queries
- job_
creation_ inttimeout_ seconds - Maximum timeout for the job creation step
- job_
retry_ intdeadline_ seconds - Total number of seconds to wait while retrying the same query
- location str
- Location to create new Datasets in
- maximum_
bytes_ intbilled - Max number of bytes that can be billed for a given BigQuery query
- priority str
- The priority with which to execute BigQuery queries (batch or interactive)
- private_
key str - Private Key for the Service Account
- private_
key_ strid - Private Key ID for the Service Account
- retries int
- Number of retries for queries
- scopes Sequence[str]
- OAuth scopes for the BigQuery connection
- timeout_
seconds int - Timeout in seconds for queries
- token_
uri str - Token URI for the Service Account
- application
Id String - OAuth Client ID
- application
Secret String - OAuth Client Secret
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcp
Project StringId - The GCP project ID to use for the connection
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- impersonate
Service StringAccount - Service Account to impersonate when running queries
- job
Creation NumberTimeout Seconds - Maximum timeout for the job creation step
- job
Retry NumberDeadline Seconds - Total number of seconds to wait while retrying the same query
- location String
- Location to create new Datasets in
- maximum
Bytes NumberBilled - Max number of bytes that can be billed for a given BigQuery query
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- private
Key String - Private Key for the Service Account
- private
Key StringId - Private Key ID for the Service Account
- retries Number
- Number of retries for queries
- scopes List<String>
- OAuth scopes for the BigQuery connection
- timeout
Seconds Number - Timeout in seconds for queries
- token
Uri String - Token URI for the Service Account
GetGlobalConnectionDatabricks
- Catalog string
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- Client
Id string - Required to enable Databricks OAuth authentication for IDE developers.
- Client
Secret string - Required to enable Databricks OAuth authentication for IDE developers.
- Host string
- The hostname of the Databricks cluster or SQL warehouse.
- Http
Path string - The HTTP path of the Databricks cluster or SQL warehouse.
- Catalog string
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- Client
Id string - Required to enable Databricks OAuth authentication for IDE developers.
- Client
Secret string - Required to enable Databricks OAuth authentication for IDE developers.
- Host string
- The hostname of the Databricks cluster or SQL warehouse.
- Http
Path string - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog String
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client
Id String - Required to enable Databricks OAuth authentication for IDE developers.
- client
Secret String - Required to enable Databricks OAuth authentication for IDE developers.
- host String
- The hostname of the Databricks cluster or SQL warehouse.
- http
Path String - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog string
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client
Id string - Required to enable Databricks OAuth authentication for IDE developers.
- client
Secret string - Required to enable Databricks OAuth authentication for IDE developers.
- host string
- The hostname of the Databricks cluster or SQL warehouse.
- http
Path string - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog str
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client_
id str - Required to enable Databricks OAuth authentication for IDE developers.
- client_
secret str - Required to enable Databricks OAuth authentication for IDE developers.
- host str
- The hostname of the Databricks cluster or SQL warehouse.
- http_
path str - The HTTP path of the Databricks cluster or SQL warehouse.
- catalog String
- Catalog name if Unity Catalog is enabled in your Databricks workspace.
- client
Id String - Required to enable Databricks OAuth authentication for IDE developers.
- client
Secret String - Required to enable Databricks OAuth authentication for IDE developers.
- host String
- The hostname of the Databricks cluster or SQL warehouse.
- http
Path String - The HTTP path of the Databricks cluster or SQL warehouse.
GetGlobalConnectionFabric
- Database string
- The database to connect to for this connection.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- Server string
- The server hostname.
- Database string
- The database to connect to for this connection.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- Server string
- The server hostname.
- database String
- The database to connect to for this connection.
- login
Timeout Integer - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Integer
- The port to connect to for this connection. Default=1433
- query
Timeout Integer - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Integer
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- server String
- The server hostname.
- database string
- The database to connect to for this connection.
- login
Timeout number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port number
- The port to connect to for this connection. Default=1433
- query
Timeout number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- server string
- The server hostname.
- database str
- The database to connect to for this connection.
- login_
timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port int
- The port to connect to for this connection. Default=1433
- query_
timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- server str
- The server hostname.
- database String
- The database to connect to for this connection.
- login
Timeout Number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Number
- The port to connect to for this connection. Default=1433
- query
Timeout Number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- server String
- The server hostname.
GetGlobalConnectionPostgres
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the database.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel Pulumi.Dbt Cloud. Inputs. Get Global Connection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the database.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel GetGlobal Connection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the database.
- port Integer
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GetGlobal Connection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname string
- The database name for this connection.
- hostname string
- The hostname of the database.
- port number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GetGlobal Connection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname str
- The database name for this connection.
- hostname str
- The hostname of the database.
- port int
- The port to connect to for this connection. Default=5432
- ssh_
tunnel GetGlobal Connection Postgres Ssh Tunnel - PostgreSQL SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the database.
- port Number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel Property Map - PostgreSQL SSH Tunnel configuration
GetGlobalConnectionPostgresSshTunnel
- hostname str
- The hostname for the SSH tunnel.
- id int
- The ID of the SSH tunnel connection.
- port int
- The HTTP port for the SSH tunnel.
- public_
key str - The SSH public key generated to allow connecting via SSH tunnel.
- username str
- The username to use for the SSH tunnel.
GetGlobalConnectionRedshift
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the data warehouse.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel Pulumi.Dbt Cloud. Inputs. Get Global Connection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- Dbname string
- The database name for this connection.
- Hostname string
- The hostname of the data warehouse.
- Port int
- The port to connect to for this connection. Default=5432
- Ssh
Tunnel GetGlobal Connection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the data warehouse.
- port Integer
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GetGlobal Connection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname string
- The database name for this connection.
- hostname string
- The hostname of the data warehouse.
- port number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel GetGlobal Connection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname str
- The database name for this connection.
- hostname str
- The hostname of the data warehouse.
- port int
- The port to connect to for this connection. Default=5432
- ssh_
tunnel GetGlobal Connection Redshift Ssh Tunnel - Redshift SSH Tunnel configuration
- dbname String
- The database name for this connection.
- hostname String
- The hostname of the data warehouse.
- port Number
- The port to connect to for this connection. Default=5432
- ssh
Tunnel Property Map - Redshift SSH Tunnel configuration
GetGlobalConnectionRedshiftSshTunnel
- hostname str
- The hostname for the SSH tunnel.
- id int
- The ID of the SSH tunnel connection.
- port int
- The HTTP port for the SSH tunnel.
- public_
key str - The SSH public key generated to allow connecting via SSH tunnel.
- username str
- The username to use for the SSH tunnel.
GetGlobalConnectionSnowflake
- Account string
- The Snowflake account name
- Allow
Sso bool - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - Client
Session boolKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- Database string
- The default database for the connection
- Oauth
Client stringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- Oauth
Client stringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- Role string
- The Snowflake role to use when running queries on the connection
- Warehouse string
- The default Snowflake Warehouse to use for the connection
- Account string
- The Snowflake account name
- Allow
Sso bool - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - Client
Session boolKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- Database string
- The default database for the connection
- Oauth
Client stringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- Oauth
Client stringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- Role string
- The Snowflake role to use when running queries on the connection
- Warehouse string
- The default Snowflake Warehouse to use for the connection
- account String
- The Snowflake account name
- allow
Sso Boolean - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client
Session BooleanKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- database String
- The default database for the connection
- oauth
Client StringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth
Client StringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role String
- The Snowflake role to use when running queries on the connection
- warehouse String
- The default Snowflake Warehouse to use for the connection
- account string
- The Snowflake account name
- allow
Sso boolean - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client
Session booleanKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- database string
- The default database for the connection
- oauth
Client stringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth
Client stringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role string
- The Snowflake role to use when running queries on the connection
- warehouse string
- The default Snowflake Warehouse to use for the connection
- account str
- The Snowflake account name
- allow_
sso bool - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client_
session_ boolkeep_ alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- database str
- The default database for the connection
- oauth_
client_ strid - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth_
client_ strsecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role str
- The Snowflake role to use when running queries on the connection
- warehouse str
- The default Snowflake Warehouse to use for the connection
- account String
- The Snowflake account name
- allow
Sso Boolean - Whether to allow Snowflake OAuth for the connection. If true, the
oauth_client_id
andoauth_client_secret
fields must be set - client
Session BooleanKeep Alive - If true, the snowflake client will keep connections for longer than the default 4 hours. This is helpful when particularly long-running queries are executing (> 4 hours)
- database String
- The default database for the connection
- oauth
Client StringId - OAuth Client ID. Required to allow OAuth between dbt Cloud and Snowflake
- oauth
Client StringSecret - OAuth Client Secret. Required to allow OAuth between dbt Cloud and Snowflake
- role String
- The Snowflake role to use when running queries on the connection
- warehouse String
- The default Snowflake Warehouse to use for the connection
GetGlobalConnectionStarburst
GetGlobalConnectionSynapse
- Database string
- The database to connect to for this connection.
- Host string
- The server hostname.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- Database string
- The database to connect to for this connection.
- Host string
- The server hostname.
- Login
Timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Port int
- The port to connect to for this connection. Default=1433
- Query
Timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- Retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database String
- The database to connect to for this connection.
- host String
- The server hostname.
- login
Timeout Integer - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Integer
- The port to connect to for this connection. Default=1433
- query
Timeout Integer - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Integer
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database string
- The database to connect to for this connection.
- host string
- The server hostname.
- login
Timeout number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port number
- The port to connect to for this connection. Default=1433
- query
Timeout number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database str
- The database to connect to for this connection.
- host str
- The server hostname.
- login_
timeout int - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port int
- The port to connect to for this connection. Default=1433
- query_
timeout int - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries int
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
- database String
- The database to connect to for this connection.
- host String
- The server hostname.
- login
Timeout Number - The number of seconds used to establish a connection before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- port Number
- The port to connect to for this connection. Default=1433
- query
Timeout Number - The number of seconds used to wait for a query before failing. Defaults to 0, which means that the timeout is disabled or uses the default system settings.
- retries Number
- The number of automatic times to retry a query before failing. Defaults to 1. Queries with syntax errors will not be retried. This setting can be used to overcome intermittent network issues.
Package Details
- Repository
- dbtcloud pulumi/pulumi-dbtcloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
dbtcloud
Terraform Provider.