1. Packages
  2. Dbtcloud Provider
  3. API Docs
  4. BigQueryConnection
dbt Cloud v0.1.25 published on Friday, Nov 8, 2024 by Pulumi

dbtcloud.BigQueryConnection

Explore with Pulumi AI

dbtcloud logo
dbt Cloud v0.1.25 published on Friday, Nov 8, 2024 by Pulumi

    Resource to create BigQuery connections in dbt Cloud. Can be set to use OAuth for developers.

    This resource is deprecated and is going to be removed in the next major release, please use the dbtcloud.GlobalConnection resource instead to create BigQuery connections.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as dbtcloud from "@pulumi/dbtcloud";
    
    const myConnection = new dbtcloud.BigQueryConnection("my_connection", {
        projectId: dbtProject.id,
        name: "Project Name",
        type: "bigquery",
        isActive: true,
        gcpProjectId: "my-gcp-project-id",
        timeoutSeconds: 100,
        privateKeyId: "my-private-key-id",
        privateKey: "ABCDEFGHIJKL",
        clientEmail: "my_client_email",
        clientId: "my_client_di",
        authUri: "my_auth_uri",
        tokenUri: "my_token_uri",
        authProviderX509CertUrl: "my_auth_provider_x509_cert_url",
        clientX509CertUrl: "my_client_x509_cert_url",
        retries: 3,
    });
    // it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
    const myConnectionWithOauth = new dbtcloud.BigQueryConnection("my_connection_with_oauth", {
        projectId: dbtProject.id,
        name: "Project Name",
        type: "bigquery",
        isActive: true,
        gcpProjectId: "my-gcp-project-id",
        timeoutSeconds: 100,
        privateKeyId: "my-private-key-id",
        privateKey: "ABCDEFGHIJKL",
        clientEmail: "my_client_email",
        clientId: "my_client_di",
        authUri: "my_auth_uri",
        tokenUri: "my_token_uri",
        authProviderX509CertUrl: "my_auth_provider_x509_cert_url",
        clientX509CertUrl: "my_client_x509_cert_url",
        retries: 3,
        applicationId: "oauth_application_id",
        applicationSecret: "oauth_secret_id",
    });
    
    import pulumi
    import pulumi_dbtcloud as dbtcloud
    
    my_connection = dbtcloud.BigQueryConnection("my_connection",
        project_id=dbt_project["id"],
        name="Project Name",
        type="bigquery",
        is_active=True,
        gcp_project_id="my-gcp-project-id",
        timeout_seconds=100,
        private_key_id="my-private-key-id",
        private_key="ABCDEFGHIJKL",
        client_email="my_client_email",
        client_id="my_client_di",
        auth_uri="my_auth_uri",
        token_uri="my_token_uri",
        auth_provider_x509_cert_url="my_auth_provider_x509_cert_url",
        client_x509_cert_url="my_client_x509_cert_url",
        retries=3)
    # it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
    my_connection_with_oauth = dbtcloud.BigQueryConnection("my_connection_with_oauth",
        project_id=dbt_project["id"],
        name="Project Name",
        type="bigquery",
        is_active=True,
        gcp_project_id="my-gcp-project-id",
        timeout_seconds=100,
        private_key_id="my-private-key-id",
        private_key="ABCDEFGHIJKL",
        client_email="my_client_email",
        client_id="my_client_di",
        auth_uri="my_auth_uri",
        token_uri="my_token_uri",
        auth_provider_x509_cert_url="my_auth_provider_x509_cert_url",
        client_x509_cert_url="my_client_x509_cert_url",
        retries=3,
        application_id="oauth_application_id",
        application_secret="oauth_secret_id")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-dbtcloud/sdk/go/dbtcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dbtcloud.NewBigQueryConnection(ctx, "my_connection", &dbtcloud.BigQueryConnectionArgs{
    			ProjectId:               pulumi.Any(dbtProject.Id),
    			Name:                    pulumi.String("Project Name"),
    			Type:                    pulumi.String("bigquery"),
    			IsActive:                pulumi.Bool(true),
    			GcpProjectId:            pulumi.String("my-gcp-project-id"),
    			TimeoutSeconds:          pulumi.Int(100),
    			PrivateKeyId:            pulumi.String("my-private-key-id"),
    			PrivateKey:              pulumi.String("ABCDEFGHIJKL"),
    			ClientEmail:             pulumi.String("my_client_email"),
    			ClientId:                pulumi.String("my_client_di"),
    			AuthUri:                 pulumi.String("my_auth_uri"),
    			TokenUri:                pulumi.String("my_token_uri"),
    			AuthProviderX509CertUrl: pulumi.String("my_auth_provider_x509_cert_url"),
    			ClientX509CertUrl:       pulumi.String("my_client_x509_cert_url"),
    			Retries:                 pulumi.Int(3),
    		})
    		if err != nil {
    			return err
    		}
    		// it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
    		_, err = dbtcloud.NewBigQueryConnection(ctx, "my_connection_with_oauth", &dbtcloud.BigQueryConnectionArgs{
    			ProjectId:               pulumi.Any(dbtProject.Id),
    			Name:                    pulumi.String("Project Name"),
    			Type:                    pulumi.String("bigquery"),
    			IsActive:                pulumi.Bool(true),
    			GcpProjectId:            pulumi.String("my-gcp-project-id"),
    			TimeoutSeconds:          pulumi.Int(100),
    			PrivateKeyId:            pulumi.String("my-private-key-id"),
    			PrivateKey:              pulumi.String("ABCDEFGHIJKL"),
    			ClientEmail:             pulumi.String("my_client_email"),
    			ClientId:                pulumi.String("my_client_di"),
    			AuthUri:                 pulumi.String("my_auth_uri"),
    			TokenUri:                pulumi.String("my_token_uri"),
    			AuthProviderX509CertUrl: pulumi.String("my_auth_provider_x509_cert_url"),
    			ClientX509CertUrl:       pulumi.String("my_client_x509_cert_url"),
    			Retries:                 pulumi.Int(3),
    			ApplicationId:           pulumi.String("oauth_application_id"),
    			ApplicationSecret:       pulumi.String("oauth_secret_id"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using DbtCloud = Pulumi.DbtCloud;
    
    return await Deployment.RunAsync(() => 
    {
        var myConnection = new DbtCloud.BigQueryConnection("my_connection", new()
        {
            ProjectId = dbtProject.Id,
            Name = "Project Name",
            Type = "bigquery",
            IsActive = true,
            GcpProjectId = "my-gcp-project-id",
            TimeoutSeconds = 100,
            PrivateKeyId = "my-private-key-id",
            PrivateKey = "ABCDEFGHIJKL",
            ClientEmail = "my_client_email",
            ClientId = "my_client_di",
            AuthUri = "my_auth_uri",
            TokenUri = "my_token_uri",
            AuthProviderX509CertUrl = "my_auth_provider_x509_cert_url",
            ClientX509CertUrl = "my_client_x509_cert_url",
            Retries = 3,
        });
    
        // it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
        var myConnectionWithOauth = new DbtCloud.BigQueryConnection("my_connection_with_oauth", new()
        {
            ProjectId = dbtProject.Id,
            Name = "Project Name",
            Type = "bigquery",
            IsActive = true,
            GcpProjectId = "my-gcp-project-id",
            TimeoutSeconds = 100,
            PrivateKeyId = "my-private-key-id",
            PrivateKey = "ABCDEFGHIJKL",
            ClientEmail = "my_client_email",
            ClientId = "my_client_di",
            AuthUri = "my_auth_uri",
            TokenUri = "my_token_uri",
            AuthProviderX509CertUrl = "my_auth_provider_x509_cert_url",
            ClientX509CertUrl = "my_client_x509_cert_url",
            Retries = 3,
            ApplicationId = "oauth_application_id",
            ApplicationSecret = "oauth_secret_id",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.dbtcloud.BigQueryConnection;
    import com.pulumi.dbtcloud.BigQueryConnectionArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myConnection = new BigQueryConnection("myConnection", BigQueryConnectionArgs.builder()
                .projectId(dbtProject.id())
                .name("Project Name")
                .type("bigquery")
                .isActive(true)
                .gcpProjectId("my-gcp-project-id")
                .timeoutSeconds(100)
                .privateKeyId("my-private-key-id")
                .privateKey("ABCDEFGHIJKL")
                .clientEmail("my_client_email")
                .clientId("my_client_di")
                .authUri("my_auth_uri")
                .tokenUri("my_token_uri")
                .authProviderX509CertUrl("my_auth_provider_x509_cert_url")
                .clientX509CertUrl("my_client_x509_cert_url")
                .retries(3)
                .build());
    
            // it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
            var myConnectionWithOauth = new BigQueryConnection("myConnectionWithOauth", BigQueryConnectionArgs.builder()
                .projectId(dbtProject.id())
                .name("Project Name")
                .type("bigquery")
                .isActive(true)
                .gcpProjectId("my-gcp-project-id")
                .timeoutSeconds(100)
                .privateKeyId("my-private-key-id")
                .privateKey("ABCDEFGHIJKL")
                .clientEmail("my_client_email")
                .clientId("my_client_di")
                .authUri("my_auth_uri")
                .tokenUri("my_token_uri")
                .authProviderX509CertUrl("my_auth_provider_x509_cert_url")
                .clientX509CertUrl("my_client_x509_cert_url")
                .retries(3)
                .applicationId("oauth_application_id")
                .applicationSecret("oauth_secret_id")
                .build());
    
        }
    }
    
    resources:
      myConnection:
        type: dbtcloud:BigQueryConnection
        name: my_connection
        properties:
          projectId: ${dbtProject.id}
          name: Project Name
          type: bigquery
          isActive: true
          gcpProjectId: my-gcp-project-id
          timeoutSeconds: 100
          privateKeyId: my-private-key-id
          privateKey: ABCDEFGHIJKL
          clientEmail: my_client_email
          clientId: my_client_di
          authUri: my_auth_uri
          tokenUri: my_token_uri
          authProviderX509CertUrl: my_auth_provider_x509_cert_url
          clientX509CertUrl: my_client_x509_cert_url
          retries: 3
      # it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
      myConnectionWithOauth:
        type: dbtcloud:BigQueryConnection
        name: my_connection_with_oauth
        properties:
          projectId: ${dbtProject.id}
          name: Project Name
          type: bigquery
          isActive: true
          gcpProjectId: my-gcp-project-id
          timeoutSeconds: 100
          privateKeyId: my-private-key-id
          privateKey: ABCDEFGHIJKL
          clientEmail: my_client_email
          clientId: my_client_di
          authUri: my_auth_uri
          tokenUri: my_token_uri
          authProviderX509CertUrl: my_auth_provider_x509_cert_url
          clientX509CertUrl: my_client_x509_cert_url
          retries: 3
          applicationId: oauth_application_id
          applicationSecret: oauth_secret_id
    

    Create BigQueryConnection Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new BigQueryConnection(name: string, args: BigQueryConnectionArgs, opts?: CustomResourceOptions);
    @overload
    def BigQueryConnection(resource_name: str,
                           args: BigQueryConnectionArgs,
                           opts: Optional[ResourceOptions] = None)
    
    @overload
    def BigQueryConnection(resource_name: str,
                           opts: Optional[ResourceOptions] = None,
                           private_key_id: Optional[str] = None,
                           timeout_seconds: Optional[int] = None,
                           auth_provider_x509_cert_url: Optional[str] = None,
                           auth_uri: Optional[str] = None,
                           client_email: Optional[str] = None,
                           client_id: Optional[str] = None,
                           client_x509_cert_url: Optional[str] = None,
                           gcp_project_id: Optional[str] = None,
                           token_uri: Optional[str] = None,
                           project_id: Optional[int] = None,
                           type: Optional[str] = None,
                           private_key: Optional[str] = None,
                           maximum_bytes_billed: Optional[int] = None,
                           location: Optional[str] = None,
                           is_active: Optional[bool] = None,
                           name: Optional[str] = None,
                           priority: Optional[str] = None,
                           application_secret: Optional[str] = None,
                           application_id: Optional[str] = None,
                           execution_project: Optional[str] = None,
                           retries: Optional[int] = None,
                           gcs_bucket: Optional[str] = None,
                           dataproc_region: Optional[str] = None,
                           dataproc_cluster_name: Optional[str] = None)
    func NewBigQueryConnection(ctx *Context, name string, args BigQueryConnectionArgs, opts ...ResourceOption) (*BigQueryConnection, error)
    public BigQueryConnection(string name, BigQueryConnectionArgs args, CustomResourceOptions? opts = null)
    public BigQueryConnection(String name, BigQueryConnectionArgs args)
    public BigQueryConnection(String name, BigQueryConnectionArgs args, CustomResourceOptions options)
    
    type: dbtcloud:BigQueryConnection
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args BigQueryConnectionArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args BigQueryConnectionArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args BigQueryConnectionArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args BigQueryConnectionArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args BigQueryConnectionArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var bigQueryConnectionResource = new DbtCloud.BigQueryConnection("bigQueryConnectionResource", new()
    {
        PrivateKeyId = "string",
        TimeoutSeconds = 0,
        AuthProviderX509CertUrl = "string",
        AuthUri = "string",
        ClientEmail = "string",
        ClientId = "string",
        ClientX509CertUrl = "string",
        GcpProjectId = "string",
        TokenUri = "string",
        ProjectId = 0,
        Type = "string",
        PrivateKey = "string",
        MaximumBytesBilled = 0,
        Location = "string",
        IsActive = false,
        Name = "string",
        Priority = "string",
        ApplicationSecret = "string",
        ApplicationId = "string",
        ExecutionProject = "string",
        Retries = 0,
        GcsBucket = "string",
        DataprocRegion = "string",
        DataprocClusterName = "string",
    });
    
    example, err := dbtcloud.NewBigQueryConnection(ctx, "bigQueryConnectionResource", &dbtcloud.BigQueryConnectionArgs{
    	PrivateKeyId:            pulumi.String("string"),
    	TimeoutSeconds:          pulumi.Int(0),
    	AuthProviderX509CertUrl: pulumi.String("string"),
    	AuthUri:                 pulumi.String("string"),
    	ClientEmail:             pulumi.String("string"),
    	ClientId:                pulumi.String("string"),
    	ClientX509CertUrl:       pulumi.String("string"),
    	GcpProjectId:            pulumi.String("string"),
    	TokenUri:                pulumi.String("string"),
    	ProjectId:               pulumi.Int(0),
    	Type:                    pulumi.String("string"),
    	PrivateKey:              pulumi.String("string"),
    	MaximumBytesBilled:      pulumi.Int(0),
    	Location:                pulumi.String("string"),
    	IsActive:                pulumi.Bool(false),
    	Name:                    pulumi.String("string"),
    	Priority:                pulumi.String("string"),
    	ApplicationSecret:       pulumi.String("string"),
    	ApplicationId:           pulumi.String("string"),
    	ExecutionProject:        pulumi.String("string"),
    	Retries:                 pulumi.Int(0),
    	GcsBucket:               pulumi.String("string"),
    	DataprocRegion:          pulumi.String("string"),
    	DataprocClusterName:     pulumi.String("string"),
    })
    
    var bigQueryConnectionResource = new BigQueryConnection("bigQueryConnectionResource", BigQueryConnectionArgs.builder()
        .privateKeyId("string")
        .timeoutSeconds(0)
        .authProviderX509CertUrl("string")
        .authUri("string")
        .clientEmail("string")
        .clientId("string")
        .clientX509CertUrl("string")
        .gcpProjectId("string")
        .tokenUri("string")
        .projectId(0)
        .type("string")
        .privateKey("string")
        .maximumBytesBilled(0)
        .location("string")
        .isActive(false)
        .name("string")
        .priority("string")
        .applicationSecret("string")
        .applicationId("string")
        .executionProject("string")
        .retries(0)
        .gcsBucket("string")
        .dataprocRegion("string")
        .dataprocClusterName("string")
        .build());
    
    big_query_connection_resource = dbtcloud.BigQueryConnection("bigQueryConnectionResource",
        private_key_id="string",
        timeout_seconds=0,
        auth_provider_x509_cert_url="string",
        auth_uri="string",
        client_email="string",
        client_id="string",
        client_x509_cert_url="string",
        gcp_project_id="string",
        token_uri="string",
        project_id=0,
        type="string",
        private_key="string",
        maximum_bytes_billed=0,
        location="string",
        is_active=False,
        name="string",
        priority="string",
        application_secret="string",
        application_id="string",
        execution_project="string",
        retries=0,
        gcs_bucket="string",
        dataproc_region="string",
        dataproc_cluster_name="string")
    
    const bigQueryConnectionResource = new dbtcloud.BigQueryConnection("bigQueryConnectionResource", {
        privateKeyId: "string",
        timeoutSeconds: 0,
        authProviderX509CertUrl: "string",
        authUri: "string",
        clientEmail: "string",
        clientId: "string",
        clientX509CertUrl: "string",
        gcpProjectId: "string",
        tokenUri: "string",
        projectId: 0,
        type: "string",
        privateKey: "string",
        maximumBytesBilled: 0,
        location: "string",
        isActive: false,
        name: "string",
        priority: "string",
        applicationSecret: "string",
        applicationId: "string",
        executionProject: "string",
        retries: 0,
        gcsBucket: "string",
        dataprocRegion: "string",
        dataprocClusterName: "string",
    });
    
    type: dbtcloud:BigQueryConnection
    properties:
        applicationId: string
        applicationSecret: string
        authProviderX509CertUrl: string
        authUri: string
        clientEmail: string
        clientId: string
        clientX509CertUrl: string
        dataprocClusterName: string
        dataprocRegion: string
        executionProject: string
        gcpProjectId: string
        gcsBucket: string
        isActive: false
        location: string
        maximumBytesBilled: 0
        name: string
        priority: string
        privateKey: string
        privateKeyId: string
        projectId: 0
        retries: 0
        timeoutSeconds: 0
        tokenUri: string
        type: string
    

    BigQueryConnection Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The BigQueryConnection resource accepts the following input properties:

    AuthProviderX509CertUrl string
    Auth Provider X509 Cert URL for the Service Account
    AuthUri string
    Auth URI for the Service Account
    ClientEmail string
    Service Account email
    ClientId string
    Client ID of the Service Account
    ClientX509CertUrl string
    Client X509 Cert URL for the Service Account
    GcpProjectId string
    GCP project ID
    PrivateKey string
    Private key of the Service Account
    PrivateKeyId string
    Private key ID of the Service Account
    ProjectId int
    Project ID to create the connection in
    TimeoutSeconds int
    Timeout in seconds for queries
    TokenUri string
    Token URI for the Service Account
    Type string
    The type of connection
    ApplicationId string
    The Application ID for BQ OAuth
    ApplicationSecret string
    The Application Secret for BQ OAuth
    DataprocClusterName string
    Dataproc cluster name for PySpark workloads
    DataprocRegion string
    Google Cloud region for PySpark workloads on Dataproc
    ExecutionProject string
    Project to bill for query execution
    GcsBucket string
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    IsActive bool
    Whether the connection is active
    Location string
    Location to create new Datasets in
    MaximumBytesBilled int
    Max number of bytes that can be billed for a given BigQuery query
    Name string
    Connection name
    Priority string
    The priority with which to execute BigQuery queries (batch or interactive)
    Retries int
    Number of retries for queries
    AuthProviderX509CertUrl string
    Auth Provider X509 Cert URL for the Service Account
    AuthUri string
    Auth URI for the Service Account
    ClientEmail string
    Service Account email
    ClientId string
    Client ID of the Service Account
    ClientX509CertUrl string
    Client X509 Cert URL for the Service Account
    GcpProjectId string
    GCP project ID
    PrivateKey string
    Private key of the Service Account
    PrivateKeyId string
    Private key ID of the Service Account
    ProjectId int
    Project ID to create the connection in
    TimeoutSeconds int
    Timeout in seconds for queries
    TokenUri string
    Token URI for the Service Account
    Type string
    The type of connection
    ApplicationId string
    The Application ID for BQ OAuth
    ApplicationSecret string
    The Application Secret for BQ OAuth
    DataprocClusterName string
    Dataproc cluster name for PySpark workloads
    DataprocRegion string
    Google Cloud region for PySpark workloads on Dataproc
    ExecutionProject string
    Project to bill for query execution
    GcsBucket string
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    IsActive bool
    Whether the connection is active
    Location string
    Location to create new Datasets in
    MaximumBytesBilled int
    Max number of bytes that can be billed for a given BigQuery query
    Name string
    Connection name
    Priority string
    The priority with which to execute BigQuery queries (batch or interactive)
    Retries int
    Number of retries for queries
    authProviderX509CertUrl String
    Auth Provider X509 Cert URL for the Service Account
    authUri String
    Auth URI for the Service Account
    clientEmail String
    Service Account email
    clientId String
    Client ID of the Service Account
    clientX509CertUrl String
    Client X509 Cert URL for the Service Account
    gcpProjectId String
    GCP project ID
    privateKey String
    Private key of the Service Account
    privateKeyId String
    Private key ID of the Service Account
    projectId Integer
    Project ID to create the connection in
    timeoutSeconds Integer
    Timeout in seconds for queries
    tokenUri String
    Token URI for the Service Account
    type String
    The type of connection
    applicationId String
    The Application ID for BQ OAuth
    applicationSecret String
    The Application Secret for BQ OAuth
    dataprocClusterName String
    Dataproc cluster name for PySpark workloads
    dataprocRegion String
    Google Cloud region for PySpark workloads on Dataproc
    executionProject String
    Project to bill for query execution
    gcsBucket String
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    isActive Boolean
    Whether the connection is active
    location String
    Location to create new Datasets in
    maximumBytesBilled Integer
    Max number of bytes that can be billed for a given BigQuery query
    name String
    Connection name
    priority String
    The priority with which to execute BigQuery queries (batch or interactive)
    retries Integer
    Number of retries for queries
    authProviderX509CertUrl string
    Auth Provider X509 Cert URL for the Service Account
    authUri string
    Auth URI for the Service Account
    clientEmail string
    Service Account email
    clientId string
    Client ID of the Service Account
    clientX509CertUrl string
    Client X509 Cert URL for the Service Account
    gcpProjectId string
    GCP project ID
    privateKey string
    Private key of the Service Account
    privateKeyId string
    Private key ID of the Service Account
    projectId number
    Project ID to create the connection in
    timeoutSeconds number
    Timeout in seconds for queries
    tokenUri string
    Token URI for the Service Account
    type string
    The type of connection
    applicationId string
    The Application ID for BQ OAuth
    applicationSecret string
    The Application Secret for BQ OAuth
    dataprocClusterName string
    Dataproc cluster name for PySpark workloads
    dataprocRegion string
    Google Cloud region for PySpark workloads on Dataproc
    executionProject string
    Project to bill for query execution
    gcsBucket string
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    isActive boolean
    Whether the connection is active
    location string
    Location to create new Datasets in
    maximumBytesBilled number
    Max number of bytes that can be billed for a given BigQuery query
    name string
    Connection name
    priority string
    The priority with which to execute BigQuery queries (batch or interactive)
    retries number
    Number of retries for queries
    auth_provider_x509_cert_url str
    Auth Provider X509 Cert URL for the Service Account
    auth_uri str
    Auth URI for the Service Account
    client_email str
    Service Account email
    client_id str
    Client ID of the Service Account
    client_x509_cert_url str
    Client X509 Cert URL for the Service Account
    gcp_project_id str
    GCP project ID
    private_key str
    Private key of the Service Account
    private_key_id str
    Private key ID of the Service Account
    project_id int
    Project ID to create the connection in
    timeout_seconds int
    Timeout in seconds for queries
    token_uri str
    Token URI for the Service Account
    type str
    The type of connection
    application_id str
    The Application ID for BQ OAuth
    application_secret str
    The Application Secret for BQ OAuth
    dataproc_cluster_name str
    Dataproc cluster name for PySpark workloads
    dataproc_region str
    Google Cloud region for PySpark workloads on Dataproc
    execution_project str
    Project to bill for query execution
    gcs_bucket str
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    is_active bool
    Whether the connection is active
    location str
    Location to create new Datasets in
    maximum_bytes_billed int
    Max number of bytes that can be billed for a given BigQuery query
    name str
    Connection name
    priority str
    The priority with which to execute BigQuery queries (batch or interactive)
    retries int
    Number of retries for queries
    authProviderX509CertUrl String
    Auth Provider X509 Cert URL for the Service Account
    authUri String
    Auth URI for the Service Account
    clientEmail String
    Service Account email
    clientId String
    Client ID of the Service Account
    clientX509CertUrl String
    Client X509 Cert URL for the Service Account
    gcpProjectId String
    GCP project ID
    privateKey String
    Private key of the Service Account
    privateKeyId String
    Private key ID of the Service Account
    projectId Number
    Project ID to create the connection in
    timeoutSeconds Number
    Timeout in seconds for queries
    tokenUri String
    Token URI for the Service Account
    type String
    The type of connection
    applicationId String
    The Application ID for BQ OAuth
    applicationSecret String
    The Application Secret for BQ OAuth
    dataprocClusterName String
    Dataproc cluster name for PySpark workloads
    dataprocRegion String
    Google Cloud region for PySpark workloads on Dataproc
    executionProject String
    Project to bill for query execution
    gcsBucket String
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    isActive Boolean
    Whether the connection is active
    location String
    Location to create new Datasets in
    maximumBytesBilled Number
    Max number of bytes that can be billed for a given BigQuery query
    name String
    Connection name
    priority String
    The priority with which to execute BigQuery queries (batch or interactive)
    retries Number
    Number of retries for queries

    Outputs

    All input properties are implicitly available as output properties. Additionally, the BigQueryConnection resource produces the following output properties:

    ConnectionId int
    Connection Identifier
    Id string
    The provider-assigned unique ID for this managed resource.
    IsConfiguredForOauth bool
    Whether the connection is configured for OAuth or not
    ConnectionId int
    Connection Identifier
    Id string
    The provider-assigned unique ID for this managed resource.
    IsConfiguredForOauth bool
    Whether the connection is configured for OAuth or not
    connectionId Integer
    Connection Identifier
    id String
    The provider-assigned unique ID for this managed resource.
    isConfiguredForOauth Boolean
    Whether the connection is configured for OAuth or not
    connectionId number
    Connection Identifier
    id string
    The provider-assigned unique ID for this managed resource.
    isConfiguredForOauth boolean
    Whether the connection is configured for OAuth or not
    connection_id int
    Connection Identifier
    id str
    The provider-assigned unique ID for this managed resource.
    is_configured_for_oauth bool
    Whether the connection is configured for OAuth or not
    connectionId Number
    Connection Identifier
    id String
    The provider-assigned unique ID for this managed resource.
    isConfiguredForOauth Boolean
    Whether the connection is configured for OAuth or not

    Look up Existing BigQueryConnection Resource

    Get an existing BigQueryConnection resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: BigQueryConnectionState, opts?: CustomResourceOptions): BigQueryConnection
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            application_id: Optional[str] = None,
            application_secret: Optional[str] = None,
            auth_provider_x509_cert_url: Optional[str] = None,
            auth_uri: Optional[str] = None,
            client_email: Optional[str] = None,
            client_id: Optional[str] = None,
            client_x509_cert_url: Optional[str] = None,
            connection_id: Optional[int] = None,
            dataproc_cluster_name: Optional[str] = None,
            dataproc_region: Optional[str] = None,
            execution_project: Optional[str] = None,
            gcp_project_id: Optional[str] = None,
            gcs_bucket: Optional[str] = None,
            is_active: Optional[bool] = None,
            is_configured_for_oauth: Optional[bool] = None,
            location: Optional[str] = None,
            maximum_bytes_billed: Optional[int] = None,
            name: Optional[str] = None,
            priority: Optional[str] = None,
            private_key: Optional[str] = None,
            private_key_id: Optional[str] = None,
            project_id: Optional[int] = None,
            retries: Optional[int] = None,
            timeout_seconds: Optional[int] = None,
            token_uri: Optional[str] = None,
            type: Optional[str] = None) -> BigQueryConnection
    func GetBigQueryConnection(ctx *Context, name string, id IDInput, state *BigQueryConnectionState, opts ...ResourceOption) (*BigQueryConnection, error)
    public static BigQueryConnection Get(string name, Input<string> id, BigQueryConnectionState? state, CustomResourceOptions? opts = null)
    public static BigQueryConnection get(String name, Output<String> id, BigQueryConnectionState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ApplicationId string
    The Application ID for BQ OAuth
    ApplicationSecret string
    The Application Secret for BQ OAuth
    AuthProviderX509CertUrl string
    Auth Provider X509 Cert URL for the Service Account
    AuthUri string
    Auth URI for the Service Account
    ClientEmail string
    Service Account email
    ClientId string
    Client ID of the Service Account
    ClientX509CertUrl string
    Client X509 Cert URL for the Service Account
    ConnectionId int
    Connection Identifier
    DataprocClusterName string
    Dataproc cluster name for PySpark workloads
    DataprocRegion string
    Google Cloud region for PySpark workloads on Dataproc
    ExecutionProject string
    Project to bill for query execution
    GcpProjectId string
    GCP project ID
    GcsBucket string
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    IsActive bool
    Whether the connection is active
    IsConfiguredForOauth bool
    Whether the connection is configured for OAuth or not
    Location string
    Location to create new Datasets in
    MaximumBytesBilled int
    Max number of bytes that can be billed for a given BigQuery query
    Name string
    Connection name
    Priority string
    The priority with which to execute BigQuery queries (batch or interactive)
    PrivateKey string
    Private key of the Service Account
    PrivateKeyId string
    Private key ID of the Service Account
    ProjectId int
    Project ID to create the connection in
    Retries int
    Number of retries for queries
    TimeoutSeconds int
    Timeout in seconds for queries
    TokenUri string
    Token URI for the Service Account
    Type string
    The type of connection
    ApplicationId string
    The Application ID for BQ OAuth
    ApplicationSecret string
    The Application Secret for BQ OAuth
    AuthProviderX509CertUrl string
    Auth Provider X509 Cert URL for the Service Account
    AuthUri string
    Auth URI for the Service Account
    ClientEmail string
    Service Account email
    ClientId string
    Client ID of the Service Account
    ClientX509CertUrl string
    Client X509 Cert URL for the Service Account
    ConnectionId int
    Connection Identifier
    DataprocClusterName string
    Dataproc cluster name for PySpark workloads
    DataprocRegion string
    Google Cloud region for PySpark workloads on Dataproc
    ExecutionProject string
    Project to bill for query execution
    GcpProjectId string
    GCP project ID
    GcsBucket string
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    IsActive bool
    Whether the connection is active
    IsConfiguredForOauth bool
    Whether the connection is configured for OAuth or not
    Location string
    Location to create new Datasets in
    MaximumBytesBilled int
    Max number of bytes that can be billed for a given BigQuery query
    Name string
    Connection name
    Priority string
    The priority with which to execute BigQuery queries (batch or interactive)
    PrivateKey string
    Private key of the Service Account
    PrivateKeyId string
    Private key ID of the Service Account
    ProjectId int
    Project ID to create the connection in
    Retries int
    Number of retries for queries
    TimeoutSeconds int
    Timeout in seconds for queries
    TokenUri string
    Token URI for the Service Account
    Type string
    The type of connection
    applicationId String
    The Application ID for BQ OAuth
    applicationSecret String
    The Application Secret for BQ OAuth
    authProviderX509CertUrl String
    Auth Provider X509 Cert URL for the Service Account
    authUri String
    Auth URI for the Service Account
    clientEmail String
    Service Account email
    clientId String
    Client ID of the Service Account
    clientX509CertUrl String
    Client X509 Cert URL for the Service Account
    connectionId Integer
    Connection Identifier
    dataprocClusterName String
    Dataproc cluster name for PySpark workloads
    dataprocRegion String
    Google Cloud region for PySpark workloads on Dataproc
    executionProject String
    Project to bill for query execution
    gcpProjectId String
    GCP project ID
    gcsBucket String
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    isActive Boolean
    Whether the connection is active
    isConfiguredForOauth Boolean
    Whether the connection is configured for OAuth or not
    location String
    Location to create new Datasets in
    maximumBytesBilled Integer
    Max number of bytes that can be billed for a given BigQuery query
    name String
    Connection name
    priority String
    The priority with which to execute BigQuery queries (batch or interactive)
    privateKey String
    Private key of the Service Account
    privateKeyId String
    Private key ID of the Service Account
    projectId Integer
    Project ID to create the connection in
    retries Integer
    Number of retries for queries
    timeoutSeconds Integer
    Timeout in seconds for queries
    tokenUri String
    Token URI for the Service Account
    type String
    The type of connection
    applicationId string
    The Application ID for BQ OAuth
    applicationSecret string
    The Application Secret for BQ OAuth
    authProviderX509CertUrl string
    Auth Provider X509 Cert URL for the Service Account
    authUri string
    Auth URI for the Service Account
    clientEmail string
    Service Account email
    clientId string
    Client ID of the Service Account
    clientX509CertUrl string
    Client X509 Cert URL for the Service Account
    connectionId number
    Connection Identifier
    dataprocClusterName string
    Dataproc cluster name for PySpark workloads
    dataprocRegion string
    Google Cloud region for PySpark workloads on Dataproc
    executionProject string
    Project to bill for query execution
    gcpProjectId string
    GCP project ID
    gcsBucket string
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    isActive boolean
    Whether the connection is active
    isConfiguredForOauth boolean
    Whether the connection is configured for OAuth or not
    location string
    Location to create new Datasets in
    maximumBytesBilled number
    Max number of bytes that can be billed for a given BigQuery query
    name string
    Connection name
    priority string
    The priority with which to execute BigQuery queries (batch or interactive)
    privateKey string
    Private key of the Service Account
    privateKeyId string
    Private key ID of the Service Account
    projectId number
    Project ID to create the connection in
    retries number
    Number of retries for queries
    timeoutSeconds number
    Timeout in seconds for queries
    tokenUri string
    Token URI for the Service Account
    type string
    The type of connection
    application_id str
    The Application ID for BQ OAuth
    application_secret str
    The Application Secret for BQ OAuth
    auth_provider_x509_cert_url str
    Auth Provider X509 Cert URL for the Service Account
    auth_uri str
    Auth URI for the Service Account
    client_email str
    Service Account email
    client_id str
    Client ID of the Service Account
    client_x509_cert_url str
    Client X509 Cert URL for the Service Account
    connection_id int
    Connection Identifier
    dataproc_cluster_name str
    Dataproc cluster name for PySpark workloads
    dataproc_region str
    Google Cloud region for PySpark workloads on Dataproc
    execution_project str
    Project to bill for query execution
    gcp_project_id str
    GCP project ID
    gcs_bucket str
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    is_active bool
    Whether the connection is active
    is_configured_for_oauth bool
    Whether the connection is configured for OAuth or not
    location str
    Location to create new Datasets in
    maximum_bytes_billed int
    Max number of bytes that can be billed for a given BigQuery query
    name str
    Connection name
    priority str
    The priority with which to execute BigQuery queries (batch or interactive)
    private_key str
    Private key of the Service Account
    private_key_id str
    Private key ID of the Service Account
    project_id int
    Project ID to create the connection in
    retries int
    Number of retries for queries
    timeout_seconds int
    Timeout in seconds for queries
    token_uri str
    Token URI for the Service Account
    type str
    The type of connection
    applicationId String
    The Application ID for BQ OAuth
    applicationSecret String
    The Application Secret for BQ OAuth
    authProviderX509CertUrl String
    Auth Provider X509 Cert URL for the Service Account
    authUri String
    Auth URI for the Service Account
    clientEmail String
    Service Account email
    clientId String
    Client ID of the Service Account
    clientX509CertUrl String
    Client X509 Cert URL for the Service Account
    connectionId Number
    Connection Identifier
    dataprocClusterName String
    Dataproc cluster name for PySpark workloads
    dataprocRegion String
    Google Cloud region for PySpark workloads on Dataproc
    executionProject String
    Project to bill for query execution
    gcpProjectId String
    GCP project ID
    gcsBucket String
    URI for a Google Cloud Storage bucket to host Python code executed via Datapro
    isActive Boolean
    Whether the connection is active
    isConfiguredForOauth Boolean
    Whether the connection is configured for OAuth or not
    location String
    Location to create new Datasets in
    maximumBytesBilled Number
    Max number of bytes that can be billed for a given BigQuery query
    name String
    Connection name
    priority String
    The priority with which to execute BigQuery queries (batch or interactive)
    privateKey String
    Private key of the Service Account
    privateKeyId String
    Private key ID of the Service Account
    projectId Number
    Project ID to create the connection in
    retries Number
    Number of retries for queries
    timeoutSeconds Number
    Timeout in seconds for queries
    tokenUri String
    Token URI for the Service Account
    type String
    The type of connection

    Import

    using import blocks (requires Terraform >= 1.5)

    import {

    to = dbtcloud_bigquery_connection.my_connection

    id = “project_id:connection_id”

    }

    import {

    to = dbtcloud_bigquery_connection.my_connection

    id = “12345:6789”

    }

    using the older import command

    $ pulumi import dbtcloud:index/bigQueryConnection:BigQueryConnection my_connection "project_id:connection_id"
    
    $ pulumi import dbtcloud:index/bigQueryConnection:BigQueryConnection my_connection 12345:6789
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    dbtcloud pulumi/pulumi-dbtcloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the dbtcloud Terraform Provider.
    dbtcloud logo
    dbt Cloud v0.1.25 published on Friday, Nov 8, 2024 by Pulumi