dbtcloud.BigQueryConnection
Explore with Pulumi AI
Resource to create BigQuery connections in dbt Cloud. Can be set to use OAuth for developers.
This resource is deprecated and is going to be removed in the next major release, please use the
dbtcloud.GlobalConnection
resource instead to create BigQuery connections.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as dbtcloud from "@pulumi/dbtcloud";
const myConnection = new dbtcloud.BigQueryConnection("my_connection", {
projectId: dbtProject.id,
name: "Project Name",
type: "bigquery",
isActive: true,
gcpProjectId: "my-gcp-project-id",
timeoutSeconds: 100,
privateKeyId: "my-private-key-id",
privateKey: "ABCDEFGHIJKL",
clientEmail: "my_client_email",
clientId: "my_client_di",
authUri: "my_auth_uri",
tokenUri: "my_token_uri",
authProviderX509CertUrl: "my_auth_provider_x509_cert_url",
clientX509CertUrl: "my_client_x509_cert_url",
retries: 3,
});
// it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
const myConnectionWithOauth = new dbtcloud.BigQueryConnection("my_connection_with_oauth", {
projectId: dbtProject.id,
name: "Project Name",
type: "bigquery",
isActive: true,
gcpProjectId: "my-gcp-project-id",
timeoutSeconds: 100,
privateKeyId: "my-private-key-id",
privateKey: "ABCDEFGHIJKL",
clientEmail: "my_client_email",
clientId: "my_client_di",
authUri: "my_auth_uri",
tokenUri: "my_token_uri",
authProviderX509CertUrl: "my_auth_provider_x509_cert_url",
clientX509CertUrl: "my_client_x509_cert_url",
retries: 3,
applicationId: "oauth_application_id",
applicationSecret: "oauth_secret_id",
});
import pulumi
import pulumi_dbtcloud as dbtcloud
my_connection = dbtcloud.BigQueryConnection("my_connection",
project_id=dbt_project["id"],
name="Project Name",
type="bigquery",
is_active=True,
gcp_project_id="my-gcp-project-id",
timeout_seconds=100,
private_key_id="my-private-key-id",
private_key="ABCDEFGHIJKL",
client_email="my_client_email",
client_id="my_client_di",
auth_uri="my_auth_uri",
token_uri="my_token_uri",
auth_provider_x509_cert_url="my_auth_provider_x509_cert_url",
client_x509_cert_url="my_client_x509_cert_url",
retries=3)
# it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
my_connection_with_oauth = dbtcloud.BigQueryConnection("my_connection_with_oauth",
project_id=dbt_project["id"],
name="Project Name",
type="bigquery",
is_active=True,
gcp_project_id="my-gcp-project-id",
timeout_seconds=100,
private_key_id="my-private-key-id",
private_key="ABCDEFGHIJKL",
client_email="my_client_email",
client_id="my_client_di",
auth_uri="my_auth_uri",
token_uri="my_token_uri",
auth_provider_x509_cert_url="my_auth_provider_x509_cert_url",
client_x509_cert_url="my_client_x509_cert_url",
retries=3,
application_id="oauth_application_id",
application_secret="oauth_secret_id")
package main
import (
"github.com/pulumi/pulumi-dbtcloud/sdk/go/dbtcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dbtcloud.NewBigQueryConnection(ctx, "my_connection", &dbtcloud.BigQueryConnectionArgs{
ProjectId: pulumi.Any(dbtProject.Id),
Name: pulumi.String("Project Name"),
Type: pulumi.String("bigquery"),
IsActive: pulumi.Bool(true),
GcpProjectId: pulumi.String("my-gcp-project-id"),
TimeoutSeconds: pulumi.Int(100),
PrivateKeyId: pulumi.String("my-private-key-id"),
PrivateKey: pulumi.String("ABCDEFGHIJKL"),
ClientEmail: pulumi.String("my_client_email"),
ClientId: pulumi.String("my_client_di"),
AuthUri: pulumi.String("my_auth_uri"),
TokenUri: pulumi.String("my_token_uri"),
AuthProviderX509CertUrl: pulumi.String("my_auth_provider_x509_cert_url"),
ClientX509CertUrl: pulumi.String("my_client_x509_cert_url"),
Retries: pulumi.Int(3),
})
if err != nil {
return err
}
// it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
_, err = dbtcloud.NewBigQueryConnection(ctx, "my_connection_with_oauth", &dbtcloud.BigQueryConnectionArgs{
ProjectId: pulumi.Any(dbtProject.Id),
Name: pulumi.String("Project Name"),
Type: pulumi.String("bigquery"),
IsActive: pulumi.Bool(true),
GcpProjectId: pulumi.String("my-gcp-project-id"),
TimeoutSeconds: pulumi.Int(100),
PrivateKeyId: pulumi.String("my-private-key-id"),
PrivateKey: pulumi.String("ABCDEFGHIJKL"),
ClientEmail: pulumi.String("my_client_email"),
ClientId: pulumi.String("my_client_di"),
AuthUri: pulumi.String("my_auth_uri"),
TokenUri: pulumi.String("my_token_uri"),
AuthProviderX509CertUrl: pulumi.String("my_auth_provider_x509_cert_url"),
ClientX509CertUrl: pulumi.String("my_client_x509_cert_url"),
Retries: pulumi.Int(3),
ApplicationId: pulumi.String("oauth_application_id"),
ApplicationSecret: pulumi.String("oauth_secret_id"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using DbtCloud = Pulumi.DbtCloud;
return await Deployment.RunAsync(() =>
{
var myConnection = new DbtCloud.BigQueryConnection("my_connection", new()
{
ProjectId = dbtProject.Id,
Name = "Project Name",
Type = "bigquery",
IsActive = true,
GcpProjectId = "my-gcp-project-id",
TimeoutSeconds = 100,
PrivateKeyId = "my-private-key-id",
PrivateKey = "ABCDEFGHIJKL",
ClientEmail = "my_client_email",
ClientId = "my_client_di",
AuthUri = "my_auth_uri",
TokenUri = "my_token_uri",
AuthProviderX509CertUrl = "my_auth_provider_x509_cert_url",
ClientX509CertUrl = "my_client_x509_cert_url",
Retries = 3,
});
// it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
var myConnectionWithOauth = new DbtCloud.BigQueryConnection("my_connection_with_oauth", new()
{
ProjectId = dbtProject.Id,
Name = "Project Name",
Type = "bigquery",
IsActive = true,
GcpProjectId = "my-gcp-project-id",
TimeoutSeconds = 100,
PrivateKeyId = "my-private-key-id",
PrivateKey = "ABCDEFGHIJKL",
ClientEmail = "my_client_email",
ClientId = "my_client_di",
AuthUri = "my_auth_uri",
TokenUri = "my_token_uri",
AuthProviderX509CertUrl = "my_auth_provider_x509_cert_url",
ClientX509CertUrl = "my_client_x509_cert_url",
Retries = 3,
ApplicationId = "oauth_application_id",
ApplicationSecret = "oauth_secret_id",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.dbtcloud.BigQueryConnection;
import com.pulumi.dbtcloud.BigQueryConnectionArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var myConnection = new BigQueryConnection("myConnection", BigQueryConnectionArgs.builder()
.projectId(dbtProject.id())
.name("Project Name")
.type("bigquery")
.isActive(true)
.gcpProjectId("my-gcp-project-id")
.timeoutSeconds(100)
.privateKeyId("my-private-key-id")
.privateKey("ABCDEFGHIJKL")
.clientEmail("my_client_email")
.clientId("my_client_di")
.authUri("my_auth_uri")
.tokenUri("my_token_uri")
.authProviderX509CertUrl("my_auth_provider_x509_cert_url")
.clientX509CertUrl("my_client_x509_cert_url")
.retries(3)
.build());
// it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
var myConnectionWithOauth = new BigQueryConnection("myConnectionWithOauth", BigQueryConnectionArgs.builder()
.projectId(dbtProject.id())
.name("Project Name")
.type("bigquery")
.isActive(true)
.gcpProjectId("my-gcp-project-id")
.timeoutSeconds(100)
.privateKeyId("my-private-key-id")
.privateKey("ABCDEFGHIJKL")
.clientEmail("my_client_email")
.clientId("my_client_di")
.authUri("my_auth_uri")
.tokenUri("my_token_uri")
.authProviderX509CertUrl("my_auth_provider_x509_cert_url")
.clientX509CertUrl("my_client_x509_cert_url")
.retries(3)
.applicationId("oauth_application_id")
.applicationSecret("oauth_secret_id")
.build());
}
}
resources:
myConnection:
type: dbtcloud:BigQueryConnection
name: my_connection
properties:
projectId: ${dbtProject.id}
name: Project Name
type: bigquery
isActive: true
gcpProjectId: my-gcp-project-id
timeoutSeconds: 100
privateKeyId: my-private-key-id
privateKey: ABCDEFGHIJKL
clientEmail: my_client_email
clientId: my_client_di
authUri: my_auth_uri
tokenUri: my_token_uri
authProviderX509CertUrl: my_auth_provider_x509_cert_url
clientX509CertUrl: my_client_x509_cert_url
retries: 3
# it is also possible to set the connection to use OAuth by filling in `application_id` and `application_secret`
myConnectionWithOauth:
type: dbtcloud:BigQueryConnection
name: my_connection_with_oauth
properties:
projectId: ${dbtProject.id}
name: Project Name
type: bigquery
isActive: true
gcpProjectId: my-gcp-project-id
timeoutSeconds: 100
privateKeyId: my-private-key-id
privateKey: ABCDEFGHIJKL
clientEmail: my_client_email
clientId: my_client_di
authUri: my_auth_uri
tokenUri: my_token_uri
authProviderX509CertUrl: my_auth_provider_x509_cert_url
clientX509CertUrl: my_client_x509_cert_url
retries: 3
applicationId: oauth_application_id
applicationSecret: oauth_secret_id
Create BigQueryConnection Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new BigQueryConnection(name: string, args: BigQueryConnectionArgs, opts?: CustomResourceOptions);
@overload
def BigQueryConnection(resource_name: str,
args: BigQueryConnectionArgs,
opts: Optional[ResourceOptions] = None)
@overload
def BigQueryConnection(resource_name: str,
opts: Optional[ResourceOptions] = None,
private_key_id: Optional[str] = None,
timeout_seconds: Optional[int] = None,
auth_provider_x509_cert_url: Optional[str] = None,
auth_uri: Optional[str] = None,
client_email: Optional[str] = None,
client_id: Optional[str] = None,
client_x509_cert_url: Optional[str] = None,
gcp_project_id: Optional[str] = None,
token_uri: Optional[str] = None,
project_id: Optional[int] = None,
type: Optional[str] = None,
private_key: Optional[str] = None,
maximum_bytes_billed: Optional[int] = None,
location: Optional[str] = None,
is_active: Optional[bool] = None,
name: Optional[str] = None,
priority: Optional[str] = None,
application_secret: Optional[str] = None,
application_id: Optional[str] = None,
execution_project: Optional[str] = None,
retries: Optional[int] = None,
gcs_bucket: Optional[str] = None,
dataproc_region: Optional[str] = None,
dataproc_cluster_name: Optional[str] = None)
func NewBigQueryConnection(ctx *Context, name string, args BigQueryConnectionArgs, opts ...ResourceOption) (*BigQueryConnection, error)
public BigQueryConnection(string name, BigQueryConnectionArgs args, CustomResourceOptions? opts = null)
public BigQueryConnection(String name, BigQueryConnectionArgs args)
public BigQueryConnection(String name, BigQueryConnectionArgs args, CustomResourceOptions options)
type: dbtcloud:BigQueryConnection
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args BigQueryConnectionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args BigQueryConnectionArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args BigQueryConnectionArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args BigQueryConnectionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args BigQueryConnectionArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var bigQueryConnectionResource = new DbtCloud.BigQueryConnection("bigQueryConnectionResource", new()
{
PrivateKeyId = "string",
TimeoutSeconds = 0,
AuthProviderX509CertUrl = "string",
AuthUri = "string",
ClientEmail = "string",
ClientId = "string",
ClientX509CertUrl = "string",
GcpProjectId = "string",
TokenUri = "string",
ProjectId = 0,
Type = "string",
PrivateKey = "string",
MaximumBytesBilled = 0,
Location = "string",
IsActive = false,
Name = "string",
Priority = "string",
ApplicationSecret = "string",
ApplicationId = "string",
ExecutionProject = "string",
Retries = 0,
GcsBucket = "string",
DataprocRegion = "string",
DataprocClusterName = "string",
});
example, err := dbtcloud.NewBigQueryConnection(ctx, "bigQueryConnectionResource", &dbtcloud.BigQueryConnectionArgs{
PrivateKeyId: pulumi.String("string"),
TimeoutSeconds: pulumi.Int(0),
AuthProviderX509CertUrl: pulumi.String("string"),
AuthUri: pulumi.String("string"),
ClientEmail: pulumi.String("string"),
ClientId: pulumi.String("string"),
ClientX509CertUrl: pulumi.String("string"),
GcpProjectId: pulumi.String("string"),
TokenUri: pulumi.String("string"),
ProjectId: pulumi.Int(0),
Type: pulumi.String("string"),
PrivateKey: pulumi.String("string"),
MaximumBytesBilled: pulumi.Int(0),
Location: pulumi.String("string"),
IsActive: pulumi.Bool(false),
Name: pulumi.String("string"),
Priority: pulumi.String("string"),
ApplicationSecret: pulumi.String("string"),
ApplicationId: pulumi.String("string"),
ExecutionProject: pulumi.String("string"),
Retries: pulumi.Int(0),
GcsBucket: pulumi.String("string"),
DataprocRegion: pulumi.String("string"),
DataprocClusterName: pulumi.String("string"),
})
var bigQueryConnectionResource = new BigQueryConnection("bigQueryConnectionResource", BigQueryConnectionArgs.builder()
.privateKeyId("string")
.timeoutSeconds(0)
.authProviderX509CertUrl("string")
.authUri("string")
.clientEmail("string")
.clientId("string")
.clientX509CertUrl("string")
.gcpProjectId("string")
.tokenUri("string")
.projectId(0)
.type("string")
.privateKey("string")
.maximumBytesBilled(0)
.location("string")
.isActive(false)
.name("string")
.priority("string")
.applicationSecret("string")
.applicationId("string")
.executionProject("string")
.retries(0)
.gcsBucket("string")
.dataprocRegion("string")
.dataprocClusterName("string")
.build());
big_query_connection_resource = dbtcloud.BigQueryConnection("bigQueryConnectionResource",
private_key_id="string",
timeout_seconds=0,
auth_provider_x509_cert_url="string",
auth_uri="string",
client_email="string",
client_id="string",
client_x509_cert_url="string",
gcp_project_id="string",
token_uri="string",
project_id=0,
type="string",
private_key="string",
maximum_bytes_billed=0,
location="string",
is_active=False,
name="string",
priority="string",
application_secret="string",
application_id="string",
execution_project="string",
retries=0,
gcs_bucket="string",
dataproc_region="string",
dataproc_cluster_name="string")
const bigQueryConnectionResource = new dbtcloud.BigQueryConnection("bigQueryConnectionResource", {
privateKeyId: "string",
timeoutSeconds: 0,
authProviderX509CertUrl: "string",
authUri: "string",
clientEmail: "string",
clientId: "string",
clientX509CertUrl: "string",
gcpProjectId: "string",
tokenUri: "string",
projectId: 0,
type: "string",
privateKey: "string",
maximumBytesBilled: 0,
location: "string",
isActive: false,
name: "string",
priority: "string",
applicationSecret: "string",
applicationId: "string",
executionProject: "string",
retries: 0,
gcsBucket: "string",
dataprocRegion: "string",
dataprocClusterName: "string",
});
type: dbtcloud:BigQueryConnection
properties:
applicationId: string
applicationSecret: string
authProviderX509CertUrl: string
authUri: string
clientEmail: string
clientId: string
clientX509CertUrl: string
dataprocClusterName: string
dataprocRegion: string
executionProject: string
gcpProjectId: string
gcsBucket: string
isActive: false
location: string
maximumBytesBilled: 0
name: string
priority: string
privateKey: string
privateKeyId: string
projectId: 0
retries: 0
timeoutSeconds: 0
tokenUri: string
type: string
BigQueryConnection Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The BigQueryConnection resource accepts the following input properties:
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Gcp
Project stringId - GCP project ID
- Private
Key string - Private key of the Service Account
- Private
Key stringId - Private key ID of the Service Account
- Project
Id int - Project ID to create the connection in
- Timeout
Seconds int - Timeout in seconds for queries
- Token
Uri string - Token URI for the Service Account
- Type string
- The type of connection
- Application
Id string - The Application ID for BQ OAuth
- Application
Secret string - The Application Secret for BQ OAuth
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Is
Active bool - Whether the connection is active
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Name string
- Connection name
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Retries int
- Number of retries for queries
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Gcp
Project stringId - GCP project ID
- Private
Key string - Private key of the Service Account
- Private
Key stringId - Private key ID of the Service Account
- Project
Id int - Project ID to create the connection in
- Timeout
Seconds int - Timeout in seconds for queries
- Token
Uri string - Token URI for the Service Account
- Type string
- The type of connection
- Application
Id string - The Application ID for BQ OAuth
- Application
Secret string - The Application Secret for BQ OAuth
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Is
Active bool - Whether the connection is active
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Name string
- Connection name
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Retries int
- Number of retries for queries
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- gcp
Project StringId - GCP project ID
- private
Key String - Private key of the Service Account
- private
Key StringId - Private key ID of the Service Account
- project
Id Integer - Project ID to create the connection in
- timeout
Seconds Integer - Timeout in seconds for queries
- token
Uri String - Token URI for the Service Account
- type String
- The type of connection
- application
Id String - The Application ID for BQ OAuth
- application
Secret String - The Application Secret for BQ OAuth
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is
Active Boolean - Whether the connection is active
- location String
- Location to create new Datasets in
- maximum
Bytes IntegerBilled - Max number of bytes that can be billed for a given BigQuery query
- name String
- Connection name
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- retries Integer
- Number of retries for queries
- auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri string - Auth URI for the Service Account
- client
Email string - Service Account email
- client
Id string - Client ID of the Service Account
- client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- gcp
Project stringId - GCP project ID
- private
Key string - Private key of the Service Account
- private
Key stringId - Private key ID of the Service Account
- project
Id number - Project ID to create the connection in
- timeout
Seconds number - Timeout in seconds for queries
- token
Uri string - Token URI for the Service Account
- type string
- The type of connection
- application
Id string - The Application ID for BQ OAuth
- application
Secret string - The Application Secret for BQ OAuth
- dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- execution
Project string - Project to bill for query execution
- gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is
Active boolean - Whether the connection is active
- location string
- Location to create new Datasets in
- maximum
Bytes numberBilled - Max number of bytes that can be billed for a given BigQuery query
- name string
- Connection name
- priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- retries number
- Number of retries for queries
- auth_
provider_ strx509_ cert_ url - Auth Provider X509 Cert URL for the Service Account
- auth_
uri str - Auth URI for the Service Account
- client_
email str - Service Account email
- client_
id str - Client ID of the Service Account
- client_
x509_ strcert_ url - Client X509 Cert URL for the Service Account
- gcp_
project_ strid - GCP project ID
- private_
key str - Private key of the Service Account
- private_
key_ strid - Private key ID of the Service Account
- project_
id int - Project ID to create the connection in
- timeout_
seconds int - Timeout in seconds for queries
- token_
uri str - Token URI for the Service Account
- type str
- The type of connection
- application_
id str - The Application ID for BQ OAuth
- application_
secret str - The Application Secret for BQ OAuth
- dataproc_
cluster_ strname - Dataproc cluster name for PySpark workloads
- dataproc_
region str - Google Cloud region for PySpark workloads on Dataproc
- execution_
project str - Project to bill for query execution
- gcs_
bucket str - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is_
active bool - Whether the connection is active
- location str
- Location to create new Datasets in
- maximum_
bytes_ intbilled - Max number of bytes that can be billed for a given BigQuery query
- name str
- Connection name
- priority str
- The priority with which to execute BigQuery queries (batch or interactive)
- retries int
- Number of retries for queries
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- gcp
Project StringId - GCP project ID
- private
Key String - Private key of the Service Account
- private
Key StringId - Private key ID of the Service Account
- project
Id Number - Project ID to create the connection in
- timeout
Seconds Number - Timeout in seconds for queries
- token
Uri String - Token URI for the Service Account
- type String
- The type of connection
- application
Id String - The Application ID for BQ OAuth
- application
Secret String - The Application Secret for BQ OAuth
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is
Active Boolean - Whether the connection is active
- location String
- Location to create new Datasets in
- maximum
Bytes NumberBilled - Max number of bytes that can be billed for a given BigQuery query
- name String
- Connection name
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- retries Number
- Number of retries for queries
Outputs
All input properties are implicitly available as output properties. Additionally, the BigQueryConnection resource produces the following output properties:
- Connection
Id int - Connection Identifier
- Id string
- The provider-assigned unique ID for this managed resource.
- Is
Configured boolFor Oauth - Whether the connection is configured for OAuth or not
- Connection
Id int - Connection Identifier
- Id string
- The provider-assigned unique ID for this managed resource.
- Is
Configured boolFor Oauth - Whether the connection is configured for OAuth or not
- connection
Id Integer - Connection Identifier
- id String
- The provider-assigned unique ID for this managed resource.
- is
Configured BooleanFor Oauth - Whether the connection is configured for OAuth or not
- connection
Id number - Connection Identifier
- id string
- The provider-assigned unique ID for this managed resource.
- is
Configured booleanFor Oauth - Whether the connection is configured for OAuth or not
- connection_
id int - Connection Identifier
- id str
- The provider-assigned unique ID for this managed resource.
- is_
configured_ boolfor_ oauth - Whether the connection is configured for OAuth or not
- connection
Id Number - Connection Identifier
- id String
- The provider-assigned unique ID for this managed resource.
- is
Configured BooleanFor Oauth - Whether the connection is configured for OAuth or not
Look up Existing BigQueryConnection Resource
Get an existing BigQueryConnection resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: BigQueryConnectionState, opts?: CustomResourceOptions): BigQueryConnection
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
application_id: Optional[str] = None,
application_secret: Optional[str] = None,
auth_provider_x509_cert_url: Optional[str] = None,
auth_uri: Optional[str] = None,
client_email: Optional[str] = None,
client_id: Optional[str] = None,
client_x509_cert_url: Optional[str] = None,
connection_id: Optional[int] = None,
dataproc_cluster_name: Optional[str] = None,
dataproc_region: Optional[str] = None,
execution_project: Optional[str] = None,
gcp_project_id: Optional[str] = None,
gcs_bucket: Optional[str] = None,
is_active: Optional[bool] = None,
is_configured_for_oauth: Optional[bool] = None,
location: Optional[str] = None,
maximum_bytes_billed: Optional[int] = None,
name: Optional[str] = None,
priority: Optional[str] = None,
private_key: Optional[str] = None,
private_key_id: Optional[str] = None,
project_id: Optional[int] = None,
retries: Optional[int] = None,
timeout_seconds: Optional[int] = None,
token_uri: Optional[str] = None,
type: Optional[str] = None) -> BigQueryConnection
func GetBigQueryConnection(ctx *Context, name string, id IDInput, state *BigQueryConnectionState, opts ...ResourceOption) (*BigQueryConnection, error)
public static BigQueryConnection Get(string name, Input<string> id, BigQueryConnectionState? state, CustomResourceOptions? opts = null)
public static BigQueryConnection get(String name, Output<String> id, BigQueryConnectionState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Application
Id string - The Application ID for BQ OAuth
- Application
Secret string - The Application Secret for BQ OAuth
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Connection
Id int - Connection Identifier
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcp
Project stringId - GCP project ID
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Is
Active bool - Whether the connection is active
- Is
Configured boolFor Oauth - Whether the connection is configured for OAuth or not
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Name string
- Connection name
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Private
Key string - Private key of the Service Account
- Private
Key stringId - Private key ID of the Service Account
- Project
Id int - Project ID to create the connection in
- Retries int
- Number of retries for queries
- Timeout
Seconds int - Timeout in seconds for queries
- Token
Uri string - Token URI for the Service Account
- Type string
- The type of connection
- Application
Id string - The Application ID for BQ OAuth
- Application
Secret string - The Application Secret for BQ OAuth
- Auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- Auth
Uri string - Auth URI for the Service Account
- Client
Email string - Service Account email
- Client
Id string - Client ID of the Service Account
- Client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- Connection
Id int - Connection Identifier
- Dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- Dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- Execution
Project string - Project to bill for query execution
- Gcp
Project stringId - GCP project ID
- Gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- Is
Active bool - Whether the connection is active
- Is
Configured boolFor Oauth - Whether the connection is configured for OAuth or not
- Location string
- Location to create new Datasets in
- Maximum
Bytes intBilled - Max number of bytes that can be billed for a given BigQuery query
- Name string
- Connection name
- Priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- Private
Key string - Private key of the Service Account
- Private
Key stringId - Private key ID of the Service Account
- Project
Id int - Project ID to create the connection in
- Retries int
- Number of retries for queries
- Timeout
Seconds int - Timeout in seconds for queries
- Token
Uri string - Token URI for the Service Account
- Type string
- The type of connection
- application
Id String - The Application ID for BQ OAuth
- application
Secret String - The Application Secret for BQ OAuth
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- connection
Id Integer - Connection Identifier
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcp
Project StringId - GCP project ID
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is
Active Boolean - Whether the connection is active
- is
Configured BooleanFor Oauth - Whether the connection is configured for OAuth or not
- location String
- Location to create new Datasets in
- maximum
Bytes IntegerBilled - Max number of bytes that can be billed for a given BigQuery query
- name String
- Connection name
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- private
Key String - Private key of the Service Account
- private
Key StringId - Private key ID of the Service Account
- project
Id Integer - Project ID to create the connection in
- retries Integer
- Number of retries for queries
- timeout
Seconds Integer - Timeout in seconds for queries
- token
Uri String - Token URI for the Service Account
- type String
- The type of connection
- application
Id string - The Application ID for BQ OAuth
- application
Secret string - The Application Secret for BQ OAuth
- auth
Provider stringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri string - Auth URI for the Service Account
- client
Email string - Service Account email
- client
Id string - Client ID of the Service Account
- client
X509Cert stringUrl - Client X509 Cert URL for the Service Account
- connection
Id number - Connection Identifier
- dataproc
Cluster stringName - Dataproc cluster name for PySpark workloads
- dataproc
Region string - Google Cloud region for PySpark workloads on Dataproc
- execution
Project string - Project to bill for query execution
- gcp
Project stringId - GCP project ID
- gcs
Bucket string - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is
Active boolean - Whether the connection is active
- is
Configured booleanFor Oauth - Whether the connection is configured for OAuth or not
- location string
- Location to create new Datasets in
- maximum
Bytes numberBilled - Max number of bytes that can be billed for a given BigQuery query
- name string
- Connection name
- priority string
- The priority with which to execute BigQuery queries (batch or interactive)
- private
Key string - Private key of the Service Account
- private
Key stringId - Private key ID of the Service Account
- project
Id number - Project ID to create the connection in
- retries number
- Number of retries for queries
- timeout
Seconds number - Timeout in seconds for queries
- token
Uri string - Token URI for the Service Account
- type string
- The type of connection
- application_
id str - The Application ID for BQ OAuth
- application_
secret str - The Application Secret for BQ OAuth
- auth_
provider_ strx509_ cert_ url - Auth Provider X509 Cert URL for the Service Account
- auth_
uri str - Auth URI for the Service Account
- client_
email str - Service Account email
- client_
id str - Client ID of the Service Account
- client_
x509_ strcert_ url - Client X509 Cert URL for the Service Account
- connection_
id int - Connection Identifier
- dataproc_
cluster_ strname - Dataproc cluster name for PySpark workloads
- dataproc_
region str - Google Cloud region for PySpark workloads on Dataproc
- execution_
project str - Project to bill for query execution
- gcp_
project_ strid - GCP project ID
- gcs_
bucket str - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is_
active bool - Whether the connection is active
- is_
configured_ boolfor_ oauth - Whether the connection is configured for OAuth or not
- location str
- Location to create new Datasets in
- maximum_
bytes_ intbilled - Max number of bytes that can be billed for a given BigQuery query
- name str
- Connection name
- priority str
- The priority with which to execute BigQuery queries (batch or interactive)
- private_
key str - Private key of the Service Account
- private_
key_ strid - Private key ID of the Service Account
- project_
id int - Project ID to create the connection in
- retries int
- Number of retries for queries
- timeout_
seconds int - Timeout in seconds for queries
- token_
uri str - Token URI for the Service Account
- type str
- The type of connection
- application
Id String - The Application ID for BQ OAuth
- application
Secret String - The Application Secret for BQ OAuth
- auth
Provider StringX509Cert Url - Auth Provider X509 Cert URL for the Service Account
- auth
Uri String - Auth URI for the Service Account
- client
Email String - Service Account email
- client
Id String - Client ID of the Service Account
- client
X509Cert StringUrl - Client X509 Cert URL for the Service Account
- connection
Id Number - Connection Identifier
- dataproc
Cluster StringName - Dataproc cluster name for PySpark workloads
- dataproc
Region String - Google Cloud region for PySpark workloads on Dataproc
- execution
Project String - Project to bill for query execution
- gcp
Project StringId - GCP project ID
- gcs
Bucket String - URI for a Google Cloud Storage bucket to host Python code executed via Datapro
- is
Active Boolean - Whether the connection is active
- is
Configured BooleanFor Oauth - Whether the connection is configured for OAuth or not
- location String
- Location to create new Datasets in
- maximum
Bytes NumberBilled - Max number of bytes that can be billed for a given BigQuery query
- name String
- Connection name
- priority String
- The priority with which to execute BigQuery queries (batch or interactive)
- private
Key String - Private key of the Service Account
- private
Key StringId - Private key ID of the Service Account
- project
Id Number - Project ID to create the connection in
- retries Number
- Number of retries for queries
- timeout
Seconds Number - Timeout in seconds for queries
- token
Uri String - Token URI for the Service Account
- type String
- The type of connection
Import
using import blocks (requires Terraform >= 1.5)
import {
to = dbtcloud_bigquery_connection.my_connection
id = “project_id:connection_id”
}
import {
to = dbtcloud_bigquery_connection.my_connection
id = “12345:6789”
}
using the older import command
$ pulumi import dbtcloud:index/bigQueryConnection:BigQueryConnection my_connection "project_id:connection_id"
$ pulumi import dbtcloud:index/bigQueryConnection:BigQueryConnection my_connection 12345:6789
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- dbtcloud pulumi/pulumi-dbtcloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
dbtcloud
Terraform Provider.